Merge pull request #1487 from Security-Onion-Solutions/feature/rotate-logs

Feature/rotate logs
This commit is contained in:
William Wernert
2020-10-08 12:48:01 -04:00
committed by GitHub
9 changed files with 170 additions and 92 deletions

11
pillar/logrotate/init.sls Normal file
View File

@@ -0,0 +1,11 @@
logrotate:
conf: |
daily
rotate 14
missingok
copytruncate
compress
create
extension .log
dateext
dateyesterday

View File

@@ -1,6 +1,7 @@
base: base:
'*': '*':
- patch.needs_restarting - patch.needs_restarting
- logrotate
'*_eval or *_helix or *_heavynode or *_sensor or *_standalone or *_import': '*_eval or *_helix or *_heavynode or *_sensor or *_standalone or *_import':
- match: compound - match: compound

View File

@@ -0,0 +1,2 @@
#!/bin/bash
logrotate -f /opt/so/conf/log-rotate.conf >/dev/null 2>&1

View File

@@ -0,0 +1,23 @@
{%- set logrotate_conf = salt['pillar.get']('logrotate:conf') %}
/opt/so/log/aptcacher-ng/*.log
/opt/so/log/idstools/*.log
/opt/so/log/nginx/*.log
/opt/so/log/soc/*.log
/opt/so/log/kratos/*.log
/opt/so/log/kibana/*.log
/opt/so/log/influxdb/*.log
/opt/so/log/elastalert/*.log
/opt/so/log/soctopus/*.log
/opt/so/log/curator/*.log
/opt/so/log/fleet/*.log
/opt/so/log/suricata/*.log
/opt/so/log/mysql/*.log
/opt/so/log/playbook/*.log
/opt/so/log/logstash/*.log
/opt/so/log/filebeat/*.log
/opt/so/log/telegraf/*.log
/opt/so/log/redis/*.log
{
{{ logrotate_conf | indent(width=4) }}
}

View File

@@ -198,6 +198,28 @@ sensorrotateconf:
{% endif %} {% endif %}
commonlogrotatescript:
file.managed:
- name: /usr/local/bin/common-rotate
- source: salt://common/cron/common-rotate
- mode: 755
commonlogrotateconf:
file.managed:
- name: /opt/so/conf/log-rotate.conf
- source: salt://common/files/log-rotate.conf
- template: jinja
- mode: 644
/usr/local/bin/common-rotate:
cron.present:
- user: root
- minute: '1'
- hour: '0'
- daymonth: '*'
- month: '*'
- dayweek: '*'
{% if role in ['eval', 'manager', 'managersearch', 'standalone'] %} {% if role in ['eval', 'manager', 'managersearch', 'standalone'] %}
# Add config backup # Add config backup
/usr/sbin/so-config-backup > /dev/null 2>&1: /usr/sbin/so-config-backup > /dev/null 2>&1:

View File

@@ -23,19 +23,17 @@ CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %)
LOG="/opt/so/log/sensor_clean.log" LOG="/opt/so/log/sensor_clean.log"
TODAY=$(date -u "+%Y-%m-%d") TODAY=$(date -u "+%Y-%m-%d")
clean () { clean() {
## find the oldest Zeek logs directory ## find the oldest Zeek logs directory
OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1) OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1)
if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ] if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ]; then
then echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >>$LOG
echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >> $LOG
#exit 0 #exit 0
else else
echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >> $LOG echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >>$LOG
rm -rf /nsm/zeek/logs/"$OLDEST_DIR" rm -rf /nsm/zeek/logs/"$OLDEST_DIR"
fi fi
## Remarking for now, as we are moving extracted files to /nsm/strelka/processed ## Remarking for now, as we are moving extracted files to /nsm/strelka/processed
## find oldest files in extracted directory and exclude today ## find oldest files in extracted directory and exclude today
#OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1) #OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1)
@@ -55,51 +53,62 @@ clean () {
## Clean up Zeek extracted files processed by Strelka ## Clean up Zeek extracted files processed by Strelka
STRELKA_FILES='/nsm/strelka/processed' STRELKA_FILES='/nsm/strelka/processed'
OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1)
if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ] if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ]; then
then echo "$(date) - No old files available to clean up in $STRELKA_FILES" >>$LOG
echo "$(date) - No old files available to clean up in $STRELKA_FILES" >> $LOG
else else
OLDEST_STRELKA_DATE=`echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1` OLDEST_STRELKA_DATE=$(echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1)
OLDEST_STRELKA_FILE=`echo $OLDEST_STRELKA | awk '{print $2}'` OLDEST_STRELKA_FILE=$(echo $OLDEST_STRELKA | awk '{print $2}')
echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >> $LOG echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >>$LOG
find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' |while read FILE find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' | while read FILE; do
do echo "$(date) - Removing file: $FILE" >>$LOG
echo "$(date) - Removing file: $FILE" >> $LOG
rm -f "$FILE" rm -f "$FILE"
done done
fi fi
## Clean up Suricata log files ## Clean up Suricata log files
SURICATA_LOGS='/nsm/suricata' SURICATA_LOGS='/nsm/suricata'
OLDEST_SURICATA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1) OLDEST_SURICATA=$(find $SURICATA_LOGS -type f -printf '%T+ %p\n' | sort -n | head -n 1)
if [ -z "$OLDEST_SURICATA" -o "$OLDEST_SURICATA" == ".." -o "$OLDEST_SURICATA" == "." ] if [[ -z "$OLDEST_SURICATA" ]] || [[ "$OLDEST_SURICATA" == ".." ]] || [[ "$OLDEST_SURICATA" == "." ]]; then
then echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >>$LOG
echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >> $LOG
else else
OLDEST_SURICATA_DATE=`echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1` OLDEST_SURICATA_DATE=$(echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1)
OLDEST_SURICATA_FILE=`echo $OLDEST_SURICATA | awk '{print $2}'` OLDEST_SURICATA_FILE=$(echo $OLDEST_SURICATA | awk '{print $2}')
echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >> $LOG echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >>$LOG
find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' |while read FILE find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' | while read FILE; do
do echo "$(date) - Removing file: $FILE" >>$LOG
echo "$(date) - Removing file: $FILE" >> $LOG
rm -f "$FILE" rm -f "$FILE"
done done
fi fi
# Clean Wazuh archives
# Slightly different code since we have 2 files to remove (.json and .log)
WAZUH_ARCHIVE='/nsm/wazuh/logs/archives'
OLDEST_WAZUH=$(find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $1}' | head -n 1)
# Make sure we don't delete the current files
find $WAZUH_ARCHIVE -type f ! -name "archives.json" -printf "%T+\t%p\n" | sort -n | awk '{print $2}' | head -n 1 >/tmp/files$$
if [[ $(wc -l </tmp/files$$) -ge 1 ]]; then
echo "$(date) - Removing logs for $OLDEST_WAZUH" >>$LOG
while read -r line; do
echo "$(date) - Removing file: $line" >>$LOG
rm "$line"
done </tmp/files$$
else
echo "$(date) - No old files available to clean up in $WAZUH_ARCHIVE" >>$LOG
fi
rm /tmp/files$$
## Clean up extracted pcaps from Steno ## Clean up extracted pcaps from Steno
PCAPS='/nsm/pcapout' PCAPS='/nsm/pcapout'
OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1 ) OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1)
if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ] if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ]; then
then echo "$(date) - No old files available to clean up in $PCAPS" >>$LOG
echo "$(date) - No old files available to clean up in $PCAPS" >> $LOG
else else
OLDEST_PCAP_DATE=`echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1` OLDEST_PCAP_DATE=$(echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1)
OLDEST_PCAP_FILE=`echo $OLDEST_PCAP | awk '{print $2}'` OLDEST_PCAP_FILE=$(echo $OLDEST_PCAP | awk '{print $2}')
echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >> $LOG echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >>$LOG
find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' |while read FILE find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' | while read FILE; do
do echo "$(date) - Removing file: $FILE" >>$LOG
echo "$(date) - Removing file: $FILE" >> $LOG
rm -f "$FILE" rm -f "$FILE"
done done
fi fi
@@ -107,13 +116,11 @@ clean () {
# Check to see if we are already running # Check to see if we are already running
IS_RUNNING=$(ps aux | grep "sensor_clean" | grep -v grep | wc -l) IS_RUNNING=$(ps aux | grep "sensor_clean" | grep -v grep | wc -l)
[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >> $LOG && exit 0 [ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0
if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then
while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do
do
clean clean
CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %) CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %)
done done
fi fi

View File

@@ -16,6 +16,14 @@ influxconfdir:
- name: /opt/so/conf/influxdb/etc - name: /opt/so/conf/influxdb/etc
- makedirs: True - makedirs: True
influxlogdir:
file.directory:
- name: /opt/so/log/influxdb
- dir_mode: 775
- user: 939
- group: 939
- makedirs: True
influxdbdir: influxdbdir:
file.directory: file.directory:
- name: /nsm/influxdb - name: /nsm/influxdb
@@ -36,6 +44,7 @@ so-influxdb:
- environment: - environment:
- INFLUXDB_HTTP_LOG_ENABLED=false - INFLUXDB_HTTP_LOG_ENABLED=false
- binds: - binds:
- /opt/so/log/influxdb/:/log:rw
- /opt/so/conf/influxdb/etc/influxdb.conf:/etc/influxdb/influxdb.conf:ro - /opt/so/conf/influxdb/etc/influxdb.conf:/etc/influxdb/influxdb.conf:ro
- /nsm/influxdb:/var/lib/influxdb:rw - /nsm/influxdb:/var/lib/influxdb:rw
- /etc/pki/influxdb.crt:/etc/ssl/influxdb.crt:ro - /etc/pki/influxdb.crt:/etc/ssl/influxdb.crt:ro

View File

@@ -58,6 +58,14 @@ query_updatepluginurls:
- connection_user: root - connection_user: root
- connection_pass: {{ MYSQLPASS }} - connection_pass: {{ MYSQLPASS }}
playbooklogdir:
file.directory:
- name: /opt/so/log/playbook
- dir_mode: 775
- user: 939
- group: 939
- makedirs: True
{% if PLAYBOOKPASS == None %} {% if PLAYBOOKPASS == None %}
playbook_password_none: playbook_password_none:
@@ -73,6 +81,8 @@ so-playbook:
- image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-playbook:{{ VERSION }} - image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-playbook:{{ VERSION }}
- hostname: playbook - hostname: playbook
- name: so-playbook - name: so-playbook
- binds:
- /opt/so/log/playbook:/playbook/log:rw
- environment: - environment:
- REDMINE_DB_MYSQL={{ MANAGERIP }} - REDMINE_DB_MYSQL={{ MANAGERIP }}
- REDMINE_DB_DATABASE=playbook - REDMINE_DB_DATABASE=playbook
@@ -83,13 +93,6 @@ so-playbook:
{% endif %} {% endif %}
playbooklogdir:
file.directory:
- name: /opt/so/log/playbook
- user: 939
- group: 939
- makedirs: True
so-playbooksynccron: so-playbooksynccron:
cron.present: cron.present:
- name: /usr/sbin/so-playbook-sync > /opt/so/log/playbook/sync.log 2>&1 - name: /usr/sbin/so-playbook-sync > /opt/so/log/playbook/sync.log 2>&1

View File

@@ -1271,9 +1271,9 @@ manager_global() {
"soctopus:"\ "soctopus:"\
" playbook:"\ " playbook:"\
" rulesets:"\ " rulesets:"\
" - windows" >> "$global_pillar"\ " - windows"\
"redis_settings:"\ "redis_settings:"\
" redis_maxmemory: 812"\ " redis_maxmemory: 812" >> "$global_pillar"
printf '%s\n' '----' >> "$setup_log" 2>&1 printf '%s\n' '----' >> "$setup_log" 2>&1