diff --git a/pillar/logrotate/init.sls b/pillar/logrotate/init.sls new file mode 100644 index 000000000..1b27ffca9 --- /dev/null +++ b/pillar/logrotate/init.sls @@ -0,0 +1,11 @@ +logrotate: + conf: | + daily + rotate 14 + missingok + copytruncate + compress + create + extension .log + dateext + dateyesterday \ No newline at end of file diff --git a/pillar/top.sls b/pillar/top.sls index 73d66ef2a..488286c6a 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -1,6 +1,7 @@ base: '*': - patch.needs_restarting + - logrotate '*_eval or *_helix or *_heavynode or *_sensor or *_standalone or *_import': - match: compound diff --git a/salt/common/cron/common-rotate b/salt/common/cron/common-rotate new file mode 100644 index 000000000..55282f23f --- /dev/null +++ b/salt/common/cron/common-rotate @@ -0,0 +1,2 @@ +#!/bin/bash +logrotate -f /opt/so/conf/log-rotate.conf >/dev/null 2>&1 diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf new file mode 100644 index 000000000..d383981cd --- /dev/null +++ b/salt/common/files/log-rotate.conf @@ -0,0 +1,23 @@ +{%- set logrotate_conf = salt['pillar.get']('logrotate:conf') %} + +/opt/so/log/aptcacher-ng/*.log +/opt/so/log/idstools/*.log +/opt/so/log/nginx/*.log +/opt/so/log/soc/*.log +/opt/so/log/kratos/*.log +/opt/so/log/kibana/*.log +/opt/so/log/influxdb/*.log +/opt/so/log/elastalert/*.log +/opt/so/log/soctopus/*.log +/opt/so/log/curator/*.log +/opt/so/log/fleet/*.log +/opt/so/log/suricata/*.log +/opt/so/log/mysql/*.log +/opt/so/log/playbook/*.log +/opt/so/log/logstash/*.log +/opt/so/log/filebeat/*.log +/opt/so/log/telegraf/*.log +/opt/so/log/redis/*.log +{ + {{ logrotate_conf | indent(width=4) }} +} diff --git a/salt/common/init.sls b/salt/common/init.sls index 4591499c9..769484ef3 100644 --- a/salt/common/init.sls +++ b/salt/common/init.sls @@ -198,6 +198,28 @@ sensorrotateconf: {% endif %} +commonlogrotatescript: + file.managed: + - name: /usr/local/bin/common-rotate + - source: salt://common/cron/common-rotate + - mode: 755 + +commonlogrotateconf: + file.managed: + - name: /opt/so/conf/log-rotate.conf + - source: salt://common/files/log-rotate.conf + - template: jinja + - mode: 644 + +/usr/local/bin/common-rotate: + cron.present: + - user: root + - minute: '1' + - hour: '0' + - daymonth: '*' + - month: '*' + - dayweek: '*' + {% if role in ['eval', 'manager', 'managersearch', 'standalone'] %} # Add config backup /usr/sbin/so-config-backup > /dev/null 2>&1: diff --git a/salt/common/tools/sbin/so-sensor-clean b/salt/common/tools/sbin/so-sensor-clean index 9f295ced5..a7aa64d99 100755 --- a/salt/common/tools/sbin/so-sensor-clean +++ b/salt/common/tools/sbin/so-sensor-clean @@ -23,97 +23,104 @@ CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) LOG="/opt/so/log/sensor_clean.log" TODAY=$(date -u "+%Y-%m-%d") -clean () { - ## find the oldest Zeek logs directory - OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1) - if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ] - then - echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >> $LOG - #exit 0 - else - echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >> $LOG - rm -rf /nsm/zeek/logs/"$OLDEST_DIR" - fi +clean() { + ## find the oldest Zeek logs directory + OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1) + if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ]; then + echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >>$LOG + #exit 0 + else + echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >>$LOG + rm -rf /nsm/zeek/logs/"$OLDEST_DIR" + fi + ## Remarking for now, as we are moving extracted files to /nsm/strelka/processed + ## find oldest files in extracted directory and exclude today + #OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1) + #if [ -z "$OLDEST_EXTRACT" -o "$OLDEST_EXTRACT" == ".." -o "$OLDEST_EXTRACT" == "." ] + #then + # echo "$(date) - No old extracted files available to clean up in /nsm/zeek/extracted/complete" >> $LOG + #else + # OLDEST_EXTRACT_DATE=`echo $OLDEST_EXTRACT | awk '{print $1}' | cut -d+ -f1` + # OLDEST_EXTRACT_FILE=`echo $OLDEST_EXTRACT | awk '{print $2}'` + # echo "$(date) - Removing extracted files for $OLDEST_EXTRACT_DATE" >> $LOG + # find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' | grep $OLDEST_EXTRACT_DATE | awk '{print $2}' |while read FILE + # do + # echo "$(date) - Removing extracted file: $FILE" >> $LOG + # rm -f "$FILE" + # done + #fi - ## Remarking for now, as we are moving extracted files to /nsm/strelka/processed - ## find oldest files in extracted directory and exclude today - #OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1) - #if [ -z "$OLDEST_EXTRACT" -o "$OLDEST_EXTRACT" == ".." -o "$OLDEST_EXTRACT" == "." ] - #then - # echo "$(date) - No old extracted files available to clean up in /nsm/zeek/extracted/complete" >> $LOG - #else - # OLDEST_EXTRACT_DATE=`echo $OLDEST_EXTRACT | awk '{print $1}' | cut -d+ -f1` - # OLDEST_EXTRACT_FILE=`echo $OLDEST_EXTRACT | awk '{print $2}'` - # echo "$(date) - Removing extracted files for $OLDEST_EXTRACT_DATE" >> $LOG - # find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' | grep $OLDEST_EXTRACT_DATE | awk '{print $2}' |while read FILE - # do - # echo "$(date) - Removing extracted file: $FILE" >> $LOG - # rm -f "$FILE" - # done - #fi - - ## Clean up Zeek extracted files processed by Strelka - STRELKA_FILES='/nsm/strelka/processed' - OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) - if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ] - then - echo "$(date) - No old files available to clean up in $STRELKA_FILES" >> $LOG - else - OLDEST_STRELKA_DATE=`echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1` - OLDEST_STRELKA_FILE=`echo $OLDEST_STRELKA | awk '{print $2}'` - echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >> $LOG - find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + ## Clean up Zeek extracted files processed by Strelka + STRELKA_FILES='/nsm/strelka/processed' + OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ]; then + echo "$(date) - No old files available to clean up in $STRELKA_FILES" >>$LOG + else + OLDEST_STRELKA_DATE=$(echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1) + OLDEST_STRELKA_FILE=$(echo $OLDEST_STRELKA | awk '{print $2}') + echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >>$LOG + find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi - ## Clean up Suricata log files - SURICATA_LOGS='/nsm/suricata' - OLDEST_SURICATA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1) - if [ -z "$OLDEST_SURICATA" -o "$OLDEST_SURICATA" == ".." -o "$OLDEST_SURICATA" == "." ] - then - echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >> $LOG - else - OLDEST_SURICATA_DATE=`echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1` - OLDEST_SURICATA_FILE=`echo $OLDEST_SURICATA | awk '{print $2}'` - echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >> $LOG - find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + ## Clean up Suricata log files + SURICATA_LOGS='/nsm/suricata' + OLDEST_SURICATA=$(find $SURICATA_LOGS -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [[ -z "$OLDEST_SURICATA" ]] || [[ "$OLDEST_SURICATA" == ".." ]] || [[ "$OLDEST_SURICATA" == "." ]]; then + echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >>$LOG + else + OLDEST_SURICATA_DATE=$(echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1) + OLDEST_SURICATA_FILE=$(echo $OLDEST_SURICATA | awk '{print $2}') + echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >>$LOG + find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi - ## Clean up extracted pcaps from Steno - PCAPS='/nsm/pcapout' - OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) - if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ] - then - echo "$(date) - No old files available to clean up in $PCAPS" >> $LOG - else - OLDEST_PCAP_DATE=`echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1` - OLDEST_PCAP_FILE=`echo $OLDEST_PCAP | awk '{print $2}'` - echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >> $LOG - find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + # Clean Wazuh archives + # Slightly different code since we have 2 files to remove (.json and .log) + WAZUH_ARCHIVE='/nsm/wazuh/logs/archives' + OLDEST_WAZUH=$(find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $1}' | head -n 1) + # Make sure we don't delete the current files + find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $2}' | head -n 1 >/tmp/files$$ + if [[ $(wc -l >$LOG + while read -r line; do + echo "$(date) - Removing file: $line" >>$LOG + rm "$line" + done >$LOG + fi + rm /tmp/files$$ + + ## Clean up extracted pcaps from Steno + PCAPS='/nsm/pcapout' + OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ]; then + echo "$(date) - No old files available to clean up in $PCAPS" >>$LOG + else + OLDEST_PCAP_DATE=$(echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1) + OLDEST_PCAP_FILE=$(echo $OLDEST_PCAP | awk '{print $2}') + echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >>$LOG + find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi } # Check to see if we are already running IS_RUNNING=$(ps aux | grep "sensor_clean" | grep -v grep | wc -l) -[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >> $LOG && exit 0 +[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0 if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then - while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; - do - clean - CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) - done + while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do + clean + CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) + done fi - diff --git a/salt/influxdb/init.sls b/salt/influxdb/init.sls index 6f95cfec2..669c9e9eb 100644 --- a/salt/influxdb/init.sls +++ b/salt/influxdb/init.sls @@ -16,6 +16,14 @@ influxconfdir: - name: /opt/so/conf/influxdb/etc - makedirs: True +influxlogdir: + file.directory: + - name: /opt/so/log/influxdb + - dir_mode: 775 + - user: 939 + - group: 939 + - makedirs: True + influxdbdir: file.directory: - name: /nsm/influxdb @@ -36,6 +44,7 @@ so-influxdb: - environment: - INFLUXDB_HTTP_LOG_ENABLED=false - binds: + - /opt/so/log/influxdb/:/log:rw - /opt/so/conf/influxdb/etc/influxdb.conf:/etc/influxdb/influxdb.conf:ro - /nsm/influxdb:/var/lib/influxdb:rw - /etc/pki/influxdb.crt:/etc/ssl/influxdb.crt:ro diff --git a/salt/playbook/init.sls b/salt/playbook/init.sls index 6e2397347..c78743eb5 100644 --- a/salt/playbook/init.sls +++ b/salt/playbook/init.sls @@ -58,6 +58,14 @@ query_updatepluginurls: - connection_user: root - connection_pass: {{ MYSQLPASS }} +playbooklogdir: + file.directory: + - name: /opt/so/log/playbook + - dir_mode: 775 + - user: 939 + - group: 939 + - makedirs: True + {% if PLAYBOOKPASS == None %} playbook_password_none: @@ -73,6 +81,8 @@ so-playbook: - image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-playbook:{{ VERSION }} - hostname: playbook - name: so-playbook + - binds: + - /opt/so/log/playbook:/playbook/log:rw - environment: - REDMINE_DB_MYSQL={{ MANAGERIP }} - REDMINE_DB_DATABASE=playbook @@ -83,13 +93,6 @@ so-playbook: {% endif %} -playbooklogdir: - file.directory: - - name: /opt/so/log/playbook - - user: 939 - - group: 939 - - makedirs: True - so-playbooksynccron: cron.present: - name: /usr/sbin/so-playbook-sync > /opt/so/log/playbook/sync.log 2>&1 diff --git a/setup/so-functions b/setup/so-functions index 6911016f3..9df9e80cd 100755 --- a/setup/so-functions +++ b/setup/so-functions @@ -1271,9 +1271,9 @@ manager_global() { "soctopus:"\ " playbook:"\ " rulesets:"\ - " - windows" >> "$global_pillar"\ + " - windows"\ "redis_settings:"\ - " redis_maxmemory: 812"\ + " redis_maxmemory: 812" >> "$global_pillar" printf '%s\n' '----' >> "$setup_log" 2>&1