From e6cb75ce7e9e00e974e5661b3dad9f66f6c5bc56 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Mon, 5 Oct 2020 13:57:36 -0400 Subject: [PATCH 01/10] [feat] Add common logrotate cron+config --- salt/common/cron/common-rotate | 2 ++ salt/common/files/log-rotate.conf | 25 +++++++++++++++++++++++++ salt/common/init.sls | 21 +++++++++++++++++++++ 3 files changed, 48 insertions(+) create mode 100644 salt/common/cron/common-rotate create mode 100644 salt/common/files/log-rotate.conf diff --git a/salt/common/cron/common-rotate b/salt/common/cron/common-rotate new file mode 100644 index 000000000..55282f23f --- /dev/null +++ b/salt/common/cron/common-rotate @@ -0,0 +1,2 @@ +#!/bin/bash +logrotate -f /opt/so/conf/log-rotate.conf >/dev/null 2>&1 diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf new file mode 100644 index 000000000..b1be8b868 --- /dev/null +++ b/salt/common/files/log-rotate.conf @@ -0,0 +1,25 @@ +/opt/so/log/aptcacher-ng/*.log +/opt/so/log/idstools/*.log +/opt/so/log/nginx/*.log +/opt/so/log/soc/*.log +/opt/so/log/kratos/*.log +/opt/so/log/kibana/*.log +/opt/so/log/elastalert/*.log +/opt/so/log/soctopus/*.log +/opt/so/log/mysql/*.log +/opt/so/log/playbook/*.log +/opt/so/log/logstash/*.log +/opt/so/log/filebeat/*.log +/opt/so/log/telegraf/*.log +/opt/so/log/redis/*.log +{ + daily + rotate 2 + missingok + copytruncate + nocompress + create + extension .log + dateext + dateyesterday +} diff --git a/salt/common/init.sls b/salt/common/init.sls index 298db451b..151df3060 100644 --- a/salt/common/init.sls +++ b/salt/common/init.sls @@ -192,6 +192,27 @@ sensorrotateconf: {% endif %} +commonlogrotatescript: + file.managed: + - name: /usr/local/bin/common-rotate + - source: salt://common/cron/common-rotate + - mode: 755 + +commonlogrotateconf: + file.managed: + - name: /opt/so/conf/log-rotate.conf + - source: salt://common/files/log-rotate.conf + - mode: 644 + +/usr/local/bin/common-rotate: +cron.present: + - user: root + - minute: '1' + - hour: '0' + - daymonth: '*' + - month: '*' + - dayweek: '*' + {% if role in ['eval', 'manager', 'managersearch', 'standalone'] %} # Add config backup /usr/sbin/so-config-backup > /dev/null 2>&1: From 5dfd11a01896e7079d1c8ea01f93d3e5314fede3 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Mon, 5 Oct 2020 13:58:49 -0400 Subject: [PATCH 02/10] [feat] Add wazuh archive cleanup + fix indentation --- salt/common/tools/sbin/so-sensor-clean | 173 +++++++++++++------------ 1 file changed, 90 insertions(+), 83 deletions(-) diff --git a/salt/common/tools/sbin/so-sensor-clean b/salt/common/tools/sbin/so-sensor-clean index 9f295ced5..dd24bab07 100755 --- a/salt/common/tools/sbin/so-sensor-clean +++ b/salt/common/tools/sbin/so-sensor-clean @@ -23,97 +23,104 @@ CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) LOG="/opt/so/log/sensor_clean.log" TODAY=$(date -u "+%Y-%m-%d") -clean () { - ## find the oldest Zeek logs directory - OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1) - if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ] - then - echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >> $LOG - #exit 0 - else - echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >> $LOG - rm -rf /nsm/zeek/logs/"$OLDEST_DIR" - fi +clean() { + ## find the oldest Zeek logs directory + OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1) + if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ]; then + echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >>$LOG + #exit 0 + else + echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >>$LOG + rm -rf /nsm/zeek/logs/"$OLDEST_DIR" + fi + ## Remarking for now, as we are moving extracted files to /nsm/strelka/processed + ## find oldest files in extracted directory and exclude today + #OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1) + #if [ -z "$OLDEST_EXTRACT" -o "$OLDEST_EXTRACT" == ".." -o "$OLDEST_EXTRACT" == "." ] + #then + # echo "$(date) - No old extracted files available to clean up in /nsm/zeek/extracted/complete" >> $LOG + #else + # OLDEST_EXTRACT_DATE=`echo $OLDEST_EXTRACT | awk '{print $1}' | cut -d+ -f1` + # OLDEST_EXTRACT_FILE=`echo $OLDEST_EXTRACT | awk '{print $2}'` + # echo "$(date) - Removing extracted files for $OLDEST_EXTRACT_DATE" >> $LOG + # find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' | grep $OLDEST_EXTRACT_DATE | awk '{print $2}' |while read FILE + # do + # echo "$(date) - Removing extracted file: $FILE" >> $LOG + # rm -f "$FILE" + # done + #fi - ## Remarking for now, as we are moving extracted files to /nsm/strelka/processed - ## find oldest files in extracted directory and exclude today - #OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1) - #if [ -z "$OLDEST_EXTRACT" -o "$OLDEST_EXTRACT" == ".." -o "$OLDEST_EXTRACT" == "." ] - #then - # echo "$(date) - No old extracted files available to clean up in /nsm/zeek/extracted/complete" >> $LOG - #else - # OLDEST_EXTRACT_DATE=`echo $OLDEST_EXTRACT | awk '{print $1}' | cut -d+ -f1` - # OLDEST_EXTRACT_FILE=`echo $OLDEST_EXTRACT | awk '{print $2}'` - # echo "$(date) - Removing extracted files for $OLDEST_EXTRACT_DATE" >> $LOG - # find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' | grep $OLDEST_EXTRACT_DATE | awk '{print $2}' |while read FILE - # do - # echo "$(date) - Removing extracted file: $FILE" >> $LOG - # rm -f "$FILE" - # done - #fi - - ## Clean up Zeek extracted files processed by Strelka - STRELKA_FILES='/nsm/strelka/processed' - OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) - if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ] - then - echo "$(date) - No old files available to clean up in $STRELKA_FILES" >> $LOG - else - OLDEST_STRELKA_DATE=`echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1` - OLDEST_STRELKA_FILE=`echo $OLDEST_STRELKA | awk '{print $2}'` - echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >> $LOG - find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + ## Clean up Zeek extracted files processed by Strelka + STRELKA_FILES='/nsm/strelka/processed' + OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ]; then + echo "$(date) - No old files available to clean up in $STRELKA_FILES" >>$LOG + else + OLDEST_STRELKA_DATE=$(echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1) + OLDEST_STRELKA_FILE=$(echo $OLDEST_STRELKA | awk '{print $2}') + echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >>$LOG + find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi - ## Clean up Suricata log files - SURICATA_LOGS='/nsm/suricata' - OLDEST_SURICATA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1) - if [ -z "$OLDEST_SURICATA" -o "$OLDEST_SURICATA" == ".." -o "$OLDEST_SURICATA" == "." ] - then - echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >> $LOG - else - OLDEST_SURICATA_DATE=`echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1` - OLDEST_SURICATA_FILE=`echo $OLDEST_SURICATA | awk '{print $2}'` - echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >> $LOG - find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + ## Clean up Suricata log files + SURICATA_LOGS='/nsm/suricata' + OLDEST_SURICATA=$(find $SURICATA_LOGS -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [[ -z "$OLDEST_SURICATA" ]] || [[ "$OLDEST_SURICATA" == ".." ]] || [[ "$OLDEST_SURICATA" == "." ]]; then + echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >>$LOG + else + OLDEST_SURICATA_DATE=$(echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1) + OLDEST_SURICATA_FILE=$(echo $OLDEST_SURICATA | awk '{print $2}') + echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >>$LOG + find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi - ## Clean up extracted pcaps from Steno - PCAPS='/nsm/pcapout' - OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) - if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ] - then - echo "$(date) - No old files available to clean up in $PCAPS" >> $LOG - else - OLDEST_PCAP_DATE=`echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1` - OLDEST_PCAP_FILE=`echo $OLDEST_PCAP | awk '{print $2}'` - echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >> $LOG - find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' |while read FILE - do - echo "$(date) - Removing file: $FILE" >> $LOG - rm -f "$FILE" - done - fi + # Clean Wazuh archives + # Slightly different code since we have 2 files to remove (.json and .log) + WAZUH_ARCHIVE='/nsm/wazuh/logs/archives' + OLDEST_WAZUH=$(find $WAZUH_ARCHIVE -type f ! -name "archives.json" ! -name "archives.log" -printf "%T+\t%p\n" | sort -n | awk '{print $1}' | head -n 1) + # Make sure we don't delete the current files + find $WAZUH_ARCHIVE -type f ! -name "archives.json" ! -name "archives.log" -printf "%T+\t%p\n" | sort -n | awk '{print $2}' | head -n 2 >/tmp/files$$ + if [[ $(wc -l >$LOG + while read -r line; do + echo "$(date) - Removing file: $line" >>$LOG + rm "$line" + done >$LOG + fi + rm /tmp/files$$ + + ## Clean up extracted pcaps from Steno + PCAPS='/nsm/pcapout' + OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1) + if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ]; then + echo "$(date) - No old files available to clean up in $PCAPS" >>$LOG + else + OLDEST_PCAP_DATE=$(echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1) + OLDEST_PCAP_FILE=$(echo $OLDEST_PCAP | awk '{print $2}') + echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >>$LOG + find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' | while read FILE; do + echo "$(date) - Removing file: $FILE" >>$LOG + rm -f "$FILE" + done + fi } # Check to see if we are already running IS_RUNNING=$(ps aux | grep "sensor_clean" | grep -v grep | wc -l) -[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >> $LOG && exit 0 +[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0 if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then - while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; - do - clean - CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) - done + while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do + clean + CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) + done fi - From 94f15c63cebaf57e33e55f7ad4e10e414ad465d7 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 13:21:37 -0400 Subject: [PATCH 03/10] [fix] Correct indent in common init.sls --- salt/common/init.sls | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/salt/common/init.sls b/salt/common/init.sls index 151df3060..253c57fcb 100644 --- a/salt/common/init.sls +++ b/salt/common/init.sls @@ -205,13 +205,13 @@ commonlogrotateconf: - mode: 644 /usr/local/bin/common-rotate: -cron.present: - - user: root - - minute: '1' - - hour: '0' - - daymonth: '*' - - month: '*' - - dayweek: '*' + cron.present: + - user: root + - minute: '1' + - hour: '0' + - daymonth: '*' + - month: '*' + - dayweek: '*' {% if role in ['eval', 'manager', 'managersearch', 'standalone'] %} # Add config backup From 9737b0167628280e5a0971d70e6fcd0459416929 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 13:22:44 -0400 Subject: [PATCH 04/10] [feat] Move logrotate configuration settings to pillar --- pillar/logrotate/init.sls | 11 +++++++++++ pillar/top.sls | 1 + salt/common/files/log-rotate.conf | 12 +++--------- 3 files changed, 15 insertions(+), 9 deletions(-) create mode 100644 pillar/logrotate/init.sls diff --git a/pillar/logrotate/init.sls b/pillar/logrotate/init.sls new file mode 100644 index 000000000..85b91640f --- /dev/null +++ b/pillar/logrotate/init.sls @@ -0,0 +1,11 @@ +logrotate: + conf: | + daily + rotate 14 + missingok + copytruncate + nocompress + create + extension .log + dateext + dateyesterday diff --git a/pillar/top.sls b/pillar/top.sls index 73d66ef2a..488286c6a 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -1,6 +1,7 @@ base: '*': - patch.needs_restarting + - logrotate '*_eval or *_helix or *_heavynode or *_sensor or *_standalone or *_import': - match: compound diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf index b1be8b868..5cd444757 100644 --- a/salt/common/files/log-rotate.conf +++ b/salt/common/files/log-rotate.conf @@ -1,3 +1,5 @@ +{%- set logrotate_conf = salt['pillar.get']['logrotate:conf'] %} + /opt/so/log/aptcacher-ng/*.log /opt/so/log/idstools/*.log /opt/so/log/nginx/*.log @@ -13,13 +15,5 @@ /opt/so/log/telegraf/*.log /opt/so/log/redis/*.log { - daily - rotate 2 - missingok - copytruncate - nocompress - create - extension .log - dateext - dateyesterday + {{ logrotate_conf }} } From bd4292711e188630ba5e53bcc1822be781e3be92 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 13:23:26 -0400 Subject: [PATCH 05/10] [fix] Redirect missing lines to global pillar --- setup/so-functions | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup/so-functions b/setup/so-functions index 945176d57..0f977cd00 100755 --- a/setup/so-functions +++ b/setup/so-functions @@ -1272,9 +1272,9 @@ manager_global() { "soctopus:"\ " playbook:"\ " rulesets:"\ - " - windows" >> "$global_pillar" + " - windows"\ "redis_settings:"\ - " redis_maxmemory: 812"\ + " redis_maxmemory: 812" >> "$global_pillar" printf '%s\n' '----' >> "$setup_log" 2>&1 From 27351fa5205dfcbdb2f87d13e2d19738f7a2e28b Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 14:49:01 -0400 Subject: [PATCH 06/10] [fix] Correct jinja syntax + indent all lines --- salt/common/files/log-rotate.conf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf index 5cd444757..56fef6846 100644 --- a/salt/common/files/log-rotate.conf +++ b/salt/common/files/log-rotate.conf @@ -1,4 +1,4 @@ -{%- set logrotate_conf = salt['pillar.get']['logrotate:conf'] %} +{%- set logrotate_conf = salt['pillar.get']('logrotate:conf') %} /opt/so/log/aptcacher-ng/*.log /opt/so/log/idstools/*.log @@ -15,5 +15,5 @@ /opt/so/log/telegraf/*.log /opt/so/log/redis/*.log { - {{ logrotate_conf }} + {{ logrotate_conf | indent(width=4) }} } From b64a91f13c6b85171bed465dede1e68733411081 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 14:49:23 -0400 Subject: [PATCH 07/10] [refactor] Remove nocompress option --- pillar/logrotate/init.sls | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pillar/logrotate/init.sls b/pillar/logrotate/init.sls index 85b91640f..c00c1787e 100644 --- a/pillar/logrotate/init.sls +++ b/pillar/logrotate/init.sls @@ -4,8 +4,7 @@ logrotate: rotate 14 missingok copytruncate - nocompress create extension .log dateext - dateyesterday + dateyesterday \ No newline at end of file From d84f85335e736a3dbc5be51f9e0db5a8e7e2a9d8 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Tue, 6 Oct 2020 17:18:39 -0400 Subject: [PATCH 08/10] [fix] Add jinja option, missing log dirs, compress option --- pillar/logrotate/init.sls | 1 + salt/common/files/log-rotate.conf | 3 +++ salt/common/init.sls | 1 + 3 files changed, 5 insertions(+) diff --git a/pillar/logrotate/init.sls b/pillar/logrotate/init.sls index c00c1787e..1b27ffca9 100644 --- a/pillar/logrotate/init.sls +++ b/pillar/logrotate/init.sls @@ -4,6 +4,7 @@ logrotate: rotate 14 missingok copytruncate + compress create extension .log dateext diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf index 56fef6846..b2f972b7f 100644 --- a/salt/common/files/log-rotate.conf +++ b/salt/common/files/log-rotate.conf @@ -8,6 +8,9 @@ /opt/so/log/kibana/*.log /opt/so/log/elastalert/*.log /opt/so/log/soctopus/*.log +/opt/so/log/curator/*.log +/opt/so/log/fleet/*.log +/opt/so/log/suricata/*.log /opt/so/log/mysql/*.log /opt/so/log/playbook/*.log /opt/so/log/logstash/*.log diff --git a/salt/common/init.sls b/salt/common/init.sls index 253c57fcb..becfef512 100644 --- a/salt/common/init.sls +++ b/salt/common/init.sls @@ -202,6 +202,7 @@ commonlogrotateconf: file.managed: - name: /opt/so/conf/log-rotate.conf - source: salt://common/files/log-rotate.conf + - template: jinja - mode: 644 /usr/local/bin/common-rotate: From e1d8f578c2ca2173073b77f9296742c6dc8d741c Mon Sep 17 00:00:00 2001 From: William Wernert Date: Thu, 8 Oct 2020 12:35:14 -0400 Subject: [PATCH 09/10] [feat] Add log dirs for playbook + influxdb --- salt/common/files/log-rotate.conf | 1 + salt/influxdb/init.sls | 9 +++++++++ salt/playbook/init.sls | 17 ++++++++++------- 3 files changed, 20 insertions(+), 7 deletions(-) diff --git a/salt/common/files/log-rotate.conf b/salt/common/files/log-rotate.conf index b2f972b7f..d383981cd 100644 --- a/salt/common/files/log-rotate.conf +++ b/salt/common/files/log-rotate.conf @@ -6,6 +6,7 @@ /opt/so/log/soc/*.log /opt/so/log/kratos/*.log /opt/so/log/kibana/*.log +/opt/so/log/influxdb/*.log /opt/so/log/elastalert/*.log /opt/so/log/soctopus/*.log /opt/so/log/curator/*.log diff --git a/salt/influxdb/init.sls b/salt/influxdb/init.sls index 6f95cfec2..669c9e9eb 100644 --- a/salt/influxdb/init.sls +++ b/salt/influxdb/init.sls @@ -16,6 +16,14 @@ influxconfdir: - name: /opt/so/conf/influxdb/etc - makedirs: True +influxlogdir: + file.directory: + - name: /opt/so/log/influxdb + - dir_mode: 775 + - user: 939 + - group: 939 + - makedirs: True + influxdbdir: file.directory: - name: /nsm/influxdb @@ -36,6 +44,7 @@ so-influxdb: - environment: - INFLUXDB_HTTP_LOG_ENABLED=false - binds: + - /opt/so/log/influxdb/:/log:rw - /opt/so/conf/influxdb/etc/influxdb.conf:/etc/influxdb/influxdb.conf:ro - /nsm/influxdb:/var/lib/influxdb:rw - /etc/pki/influxdb.crt:/etc/ssl/influxdb.crt:ro diff --git a/salt/playbook/init.sls b/salt/playbook/init.sls index 6e2397347..c78743eb5 100644 --- a/salt/playbook/init.sls +++ b/salt/playbook/init.sls @@ -58,6 +58,14 @@ query_updatepluginurls: - connection_user: root - connection_pass: {{ MYSQLPASS }} +playbooklogdir: + file.directory: + - name: /opt/so/log/playbook + - dir_mode: 775 + - user: 939 + - group: 939 + - makedirs: True + {% if PLAYBOOKPASS == None %} playbook_password_none: @@ -73,6 +81,8 @@ so-playbook: - image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-playbook:{{ VERSION }} - hostname: playbook - name: so-playbook + - binds: + - /opt/so/log/playbook:/playbook/log:rw - environment: - REDMINE_DB_MYSQL={{ MANAGERIP }} - REDMINE_DB_DATABASE=playbook @@ -83,13 +93,6 @@ so-playbook: {% endif %} -playbooklogdir: - file.directory: - - name: /opt/so/log/playbook - - user: 939 - - group: 939 - - makedirs: True - so-playbooksynccron: cron.present: - name: /usr/sbin/so-playbook-sync > /opt/so/log/playbook/sync.log 2>&1 From 2ad3f9da11562d436bbf3f64f3ce2c1b8c9e95b9 Mon Sep 17 00:00:00 2001 From: William Wernert Date: Thu, 8 Oct 2020 12:41:51 -0400 Subject: [PATCH 10/10] [fix] Wazuh not saving .log files anymore, only check .json files --- salt/common/tools/sbin/so-sensor-clean | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/common/tools/sbin/so-sensor-clean b/salt/common/tools/sbin/so-sensor-clean index dd24bab07..a7aa64d99 100755 --- a/salt/common/tools/sbin/so-sensor-clean +++ b/salt/common/tools/sbin/so-sensor-clean @@ -84,9 +84,9 @@ clean() { # Clean Wazuh archives # Slightly different code since we have 2 files to remove (.json and .log) WAZUH_ARCHIVE='/nsm/wazuh/logs/archives' - OLDEST_WAZUH=$(find $WAZUH_ARCHIVE -type f ! -name "archives.json" ! -name "archives.log" -printf "%T+\t%p\n" | sort -n | awk '{print $1}' | head -n 1) + OLDEST_WAZUH=$(find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $1}' | head -n 1) # Make sure we don't delete the current files - find $WAZUH_ARCHIVE -type f ! -name "archives.json" ! -name "archives.log" -printf "%T+\t%p\n" | sort -n | awk '{print $2}' | head -n 2 >/tmp/files$$ + find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $2}' | head -n 1 >/tmp/files$$ if [[ $(wc -l >$LOG while read -r line; do