mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-19 15:33:06 +01:00
Merge branch 'dev' into feature/setup
# Conflicts: # salt/thehive/scripts/cortex_init # salt/thehive/scripts/hive_init # setup/so-functions # setup/so-whiptail
This commit is contained in:
@@ -1,3 +1,5 @@
|
||||
{% set role = grains.id.split('_') | last %}
|
||||
|
||||
# Add socore Group
|
||||
socoregroup:
|
||||
group.present:
|
||||
@@ -131,3 +133,15 @@ utilsyncscripts:
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
- source: salt://common/tools/sbin
|
||||
|
||||
{% if role in ['eval', 'standalone', 'sensor', 'heavynode'] %}
|
||||
# Add sensor cleanup
|
||||
/usr/sbin/so-sensor-clean:
|
||||
cron.present:
|
||||
- user: root
|
||||
- minute: '*'
|
||||
- hour: '*'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
{% endif %}
|
||||
|
||||
@@ -18,14 +18,14 @@
|
||||
}
|
||||
},grain='id', merge=salt['pillar.get']('docker')) %}
|
||||
|
||||
{% if role in ['eval', 'mastersearch', 'master', 'standalone'] %}
|
||||
{{ append_containers('master', 'grafana', 0) }}
|
||||
{{ append_containers('static', 'fleet_master', 0) }}
|
||||
{{ append_containers('master', 'wazuh', 0) }}
|
||||
{{ append_containers('master', 'thehive', 0) }}
|
||||
{{ append_containers('master', 'playbook', 0) }}
|
||||
{{ append_containers('master', 'freq', 0) }}
|
||||
{{ append_containers('master', 'domainstats', 0) }}
|
||||
{% if role in ['eval', 'managersearch', 'manager', 'standalone'] %}
|
||||
{{ append_containers('manager', 'grafana', 0) }}
|
||||
{{ append_containers('static', 'fleet_manager', 0) }}
|
||||
{{ append_containers('manager', 'wazuh', 0) }}
|
||||
{{ append_containers('manager', 'thehive', 0) }}
|
||||
{{ append_containers('manager', 'playbook', 0) }}
|
||||
{{ append_containers('manager', 'freq', 0) }}
|
||||
{{ append_containers('manager', 'domainstats', 0) }}
|
||||
{% endif %}
|
||||
|
||||
{% if role in ['eval', 'heavynode', 'sensor', 'standalone'] %}
|
||||
@@ -37,7 +37,7 @@
|
||||
{% endif %}
|
||||
|
||||
{% if role == 'searchnode' %}
|
||||
{{ append_containers('master', 'wazuh', 0) }}
|
||||
{{ append_containers('manager', 'wazuh', 0) }}
|
||||
{% endif %}
|
||||
|
||||
{% if role == 'sensor' %}
|
||||
|
||||
@@ -11,7 +11,7 @@ bro_logs_enabled() {
|
||||
|
||||
}
|
||||
|
||||
whiptail_master_adv_service_brologs() {
|
||||
whiptail_manager_adv_service_brologs() {
|
||||
|
||||
BLOGS=$(whiptail --title "Security Onion Setup" --checklist "Please Select Logs to Send:" 24 78 12 \
|
||||
"conn" "Connection Logging" ON \
|
||||
@@ -54,5 +54,5 @@ whiptail_master_adv_service_brologs() {
|
||||
"x509" "x.509 Logs" ON 3>&1 1>&2 2>&3 )
|
||||
}
|
||||
|
||||
whiptail_master_adv_service_brologs
|
||||
whiptail_manager_adv_service_brologs
|
||||
bro_logs_enabled
|
||||
|
||||
@@ -21,13 +21,13 @@ got_root(){
|
||||
fi
|
||||
}
|
||||
|
||||
master_check() {
|
||||
# Check to see if this is a master
|
||||
MASTERCHECK=$(cat /etc/salt/grains | grep role | awk '{print $2}')
|
||||
if [ $MASTERCHECK == 'so-eval' ] || [ $MASTERCHECK == 'so-master' ] || [ $MASTERCHECK == 'so-mastersearch' ] || [ $MASTERCHECK == 'so-standalone' ] || [ $MASTERCHECK == 'so-helix' ]; then
|
||||
echo "This is a master. We can proceed"
|
||||
manager_check() {
|
||||
# Check to see if this is a manager
|
||||
MANAGERCHECK=$(cat /etc/salt/grains | grep role | awk '{print $2}')
|
||||
if [ $MANAGERCHECK == 'so-eval' ] || [ $MANAGERCHECK == 'so-manager' ] || [ $MANAGERCHECK == 'so-managersearch' ] || [ $MANAGERCHECK == 'so-standalone' ] || [ $MANAGERCHECK == 'so-helix' ]; then
|
||||
echo "This is a manager. We can proceed"
|
||||
else
|
||||
echo "Please run soup on the master. The master controls all updates."
|
||||
echo "Please run soup on the manager. The manager controls all updates."
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
@@ -56,13 +56,13 @@ version_check() {
|
||||
fi
|
||||
}
|
||||
got_root
|
||||
master_check
|
||||
manager_check
|
||||
version_check
|
||||
|
||||
# Use the hostname
|
||||
HOSTNAME=$(hostname)
|
||||
# List all the containers
|
||||
if [ $MASTERCHECK != 'so-helix' ]; then
|
||||
if [ $MANAGERCHECK != 'so-helix' ]; then
|
||||
TRUSTED_CONTAINERS=( \
|
||||
"so-acng:$VERSION" \
|
||||
"so-thehive-cortex:$VERSION" \
|
||||
|
||||
@@ -198,7 +198,7 @@ EOF
|
||||
read alertoption
|
||||
|
||||
if [ $alertoption = "1" ] ; then
|
||||
echo "Please enter the email address you want to send the alerts to. Note: Ensure the Master Server is configured for SMTP."
|
||||
echo "Please enter the email address you want to send the alerts to. Note: Ensure the Manager Server is configured for SMTP."
|
||||
read emailaddress
|
||||
cat << EOF >> "$rulename.yaml"
|
||||
# (Required)
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
{%- set MASTERIP = salt['pillar.get']('static:masterip', '') -%}
|
||||
{%- set MANAGERIP = salt['pillar.get']('static:managerip', '') -%}
|
||||
. /usr/sbin/so-common
|
||||
|
||||
SKIP=0
|
||||
@@ -50,7 +50,7 @@ done
|
||||
if [ $SKIP -ne 1 ]; then
|
||||
# List indices
|
||||
echo
|
||||
curl {{ MASTERIP }}:9200/_cat/indices?v
|
||||
curl {{ MANAGERIP }}:9200/_cat/indices?v
|
||||
echo
|
||||
# Inform user we are about to delete all data
|
||||
echo
|
||||
@@ -89,10 +89,10 @@ fi
|
||||
# Delete data
|
||||
echo "Deleting data..."
|
||||
|
||||
INDXS=$(curl -s -XGET {{ MASTERIP }}:9200/_cat/indices?v | egrep 'logstash|elastalert|so-' | awk '{ print $3 }')
|
||||
INDXS=$(curl -s -XGET {{ MANAGERIP }}:9200/_cat/indices?v | egrep 'logstash|elastalert|so-' | awk '{ print $3 }')
|
||||
for INDX in ${INDXS}
|
||||
do
|
||||
curl -XDELETE "{{ MASTERIP }}:9200/${INDX}" > /dev/null 2>&1
|
||||
curl -XDELETE "{{ MANAGERIP }}:9200/${INDX}" > /dev/null 2>&1
|
||||
done
|
||||
|
||||
#Start Logstash/Filebeat
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/bin/bash
|
||||
MASTER=MASTER
|
||||
MANAGER=MANAGER
|
||||
VERSION="HH1.1.4"
|
||||
TRUSTED_CONTAINERS=( \
|
||||
"so-nginx:$VERSION" \
|
||||
@@ -37,7 +37,7 @@ do
|
||||
echo "Downloading $i"
|
||||
docker pull --disable-content-trust=false docker.io/soshybridhunter/$i
|
||||
# Tag it with the new registry destination
|
||||
docker tag soshybridhunter/$i $MASTER:5000/soshybridhunter/$i
|
||||
docker push $MASTER:5000/soshybridhunter/$i
|
||||
docker tag soshybridhunter/$i $MANAGER:5000/soshybridhunter/$i
|
||||
docker push $MANAGER:5000/soshybridhunter/$i
|
||||
docker rmi soshybridhunter/$i
|
||||
done
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('master:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
|
||||
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('manager:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
|
||||
ESPORT=9200
|
||||
THEHIVEESPORT=9400
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{% set MASTERIP = salt['pillar.get']('master:mainip', '') %}
|
||||
{% set MANAGERIP = salt['pillar.get']('manager:mainip', '') %}
|
||||
#!/bin/bash
|
||||
# Copyright 2014,2015,2016,2017,2018,2019 Security Onion Solutions, LLC
|
||||
#
|
||||
@@ -16,7 +16,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
default_salt_dir=/opt/so/saltstack/default
|
||||
ELASTICSEARCH_HOST="{{ MASTERIP}}"
|
||||
ELASTICSEARCH_HOST="{{ MANAGERIP}}"
|
||||
ELASTICSEARCH_PORT=9200
|
||||
#ELASTICSEARCH_AUTH=""
|
||||
|
||||
|
||||
@@ -15,28 +15,29 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
{% set MASTER = salt['grains.get']('master') %}
|
||||
{% set MANAGER = salt['grains.get']('master') %}
|
||||
{% set VERSION = salt['pillar.get']('static:soversion') %}
|
||||
{%- set MASTERIP = salt['pillar.get']('static:masterip') -%}
|
||||
{%- set MANAGERIP = salt['pillar.get']('static:managerip') -%}
|
||||
|
||||
function usage {
|
||||
cat << EOF
|
||||
Usage: $0 <pcap-file-1> [pcap-file-2] [pcap-file-N]
|
||||
|
||||
Imports one or more PCAP files for analysis. If available, curator will be automatically stopped.
|
||||
Imports one or more PCAP files onto a sensor node. The PCAP traffic will be analyzed and
|
||||
made available for review in the Security Onion toolset.
|
||||
EOF
|
||||
}
|
||||
|
||||
function pcapinfo() {
|
||||
PCAP=$1
|
||||
ARGS=$2
|
||||
docker run --rm -v $PCAP:/input.pcap --entrypoint capinfos {{ MASTER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap $ARGS
|
||||
docker run --rm -v $PCAP:/input.pcap --entrypoint capinfos {{ MANAGER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap $ARGS
|
||||
}
|
||||
|
||||
function pcapfix() {
|
||||
PCAP=$1
|
||||
PCAP_OUT=$2
|
||||
docker run --rm -v $PCAP:/input.pcap -v $PCAP_OUT:$PCAP_OUT --entrypoint pcapfix {{ MASTER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap -o $PCAP_OUT > /dev/null 2>&1
|
||||
docker run --rm -v $PCAP:/input.pcap -v $PCAP_OUT:$PCAP_OUT --entrypoint pcapfix {{ MANAGER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap -o $PCAP_OUT > /dev/null 2>&1
|
||||
}
|
||||
|
||||
function suricata() {
|
||||
@@ -57,7 +58,7 @@ function suricata() {
|
||||
-v ${NSM_PATH}/:/nsm/:rw \
|
||||
-v $PCAP:/input.pcap:ro \
|
||||
-v /opt/so/conf/suricata/bpf:/etc/suricata/bpf:ro \
|
||||
{{ MASTER }}:5000/soshybridhunter/so-suricata:{{ VERSION }} \
|
||||
{{ MANAGER }}:5000/soshybridhunter/so-suricata:{{ VERSION }} \
|
||||
--runmode single -k none -r /input.pcap > $LOG_PATH/console.log 2>&1
|
||||
}
|
||||
|
||||
@@ -85,7 +86,7 @@ function zeek() {
|
||||
-v /opt/so/conf/zeek/bpf:/opt/zeek/etc/bpf:ro \
|
||||
--entrypoint /opt/zeek/bin/zeek \
|
||||
-w /nsm/zeek/logs \
|
||||
{{ MASTER }}:5000/soshybridhunter/so-zeek:{{ VERSION }} \
|
||||
{{ MANAGER }}:5000/soshybridhunter/so-zeek:{{ VERSION }} \
|
||||
-C -r /input.pcap local > $NSM_PATH/logs/console.log 2>&1
|
||||
}
|
||||
|
||||
@@ -110,14 +111,6 @@ for i in "$@"; do
|
||||
fi
|
||||
done
|
||||
|
||||
if ! [ -d /opt/so/conf/curator ]; then
|
||||
echo "Curator is not installed on this node and cannot be stopped automatically."
|
||||
else
|
||||
echo -n "Stopping curator..."
|
||||
so-curator-stop > /dev/null 2>&1
|
||||
echo "Done"
|
||||
fi
|
||||
|
||||
# track if we have any valid or invalid pcaps
|
||||
INVALID_PCAPS="no"
|
||||
VALID_PCAPS="no"
|
||||
@@ -206,17 +199,20 @@ if [ "$INVALID_PCAPS" = "yes" ]; then
|
||||
echo "Please note! One or more pcaps was invalid! You can scroll up to see which ones were invalid."
|
||||
fi
|
||||
|
||||
START_OLDEST_SLASH=$(echo $START_OLDEST | sed -e 's/-/%2F/g')
|
||||
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
||||
|
||||
if [ "$VALID_PCAPS" = "yes" ]; then
|
||||
cat << EOF
|
||||
|
||||
Import complete!
|
||||
|
||||
You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser:
|
||||
https://{{ MASTERIP }}/kibana/app/kibana#/dashboard/a8411b30-6d03-11ea-b301-3d6c35840645?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:'${START_OLDEST}T00:00:00.000Z',mode:absolute,to:'${END_NEWEST}T00:00:00.000Z'))
|
||||
https://{{ MANAGERIP }}/kibana/app/kibana#/dashboard/a8411b30-6d03-11ea-b301-3d6c35840645?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:'${START_OLDEST}T00:00:00.000Z',mode:absolute,to:'${END_NEWEST}T00:00:00.000Z'))
|
||||
|
||||
or you can manually set your Time Range to be:
|
||||
From: $START_OLDEST To: $END_NEWEST
|
||||
|
||||
Please note that it may take 30 seconds or more for events to appear in Kibana.
|
||||
Please note that it may take 30 seconds or more for events to appear in Onion Hunt.
|
||||
EOF
|
||||
fi
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# {%- set FLEET_MASTER = salt['pillar.get']('static:fleet_master', False) -%}
|
||||
# {%- set FLEET_MANAGER = salt['pillar.get']('static:fleet_manager', False) -%}
|
||||
# {%- set FLEET_NODE = salt['pillar.get']('static:fleet_node', False) -%}
|
||||
# {%- set FLEET_IP = salt['pillar.get']('static:fleet_ip', '') %}
|
||||
# {%- set MASTER = salt['pillar.get']('master:url_base', '') %}
|
||||
# {%- set MANAGER = salt['pillar.get']('manager:url_base', '') %}
|
||||
#
|
||||
# Copyright 2014,2015,2016,2017,2018,2019,2020 Security Onion Solutions, LLC
|
||||
#
|
||||
@@ -20,7 +20,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
KIBANA_HOST={{ MASTER }}
|
||||
KIBANA_HOST={{ MANAGER }}
|
||||
KSO_PORT=5601
|
||||
OUTFILE="saved_objects.ndjson"
|
||||
curl -s -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -XPOST $KIBANA_HOST:$KSO_PORT/api/saved_objects/_export -d '{ "type": [ "index-pattern", "config", "visualization", "dashboard", "search" ], "excludeExportDetails": false }' > $OUTFILE
|
||||
@@ -29,7 +29,7 @@ curl -s -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -XPOST $KIBANA_H
|
||||
sed -i "s/$KIBANA_HOST/PLACEHOLDER/g" $OUTFILE
|
||||
|
||||
# Clean up for Fleet, if applicable
|
||||
# {% if FLEET_NODE or FLEET_MASTER %}
|
||||
# {% if FLEET_NODE or FLEET_MANAGER %}
|
||||
# Fleet IP
|
||||
sed -i "s/{{ MASTER }}/FLEETPLACEHOLDER/g" $OUTFILE
|
||||
sed -i "s/{{ MANAGER }}/FLEETPLACEHOLDER/g" $OUTFILE
|
||||
# {% endif %}
|
||||
|
||||
121
salt/common/tools/sbin/so-sensor-clean
Normal file
121
salt/common/tools/sbin/so-sensor-clean
Normal file
@@ -0,0 +1,121 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Delete Zeek Logs based on defined CRIT_DISK_USAGE value
|
||||
|
||||
# Copyright 2014,2015,2016,2017,2018, 2019 Security Onion Solutions, LLC
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
SENSOR_DIR='/nsm'
|
||||
CRIT_DISK_USAGE=90
|
||||
CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %)
|
||||
LOG="/opt/so/log/sensor_clean.log"
|
||||
TODAY=$(date -u "+%Y-%m-%d")
|
||||
|
||||
clean () {
|
||||
## find the oldest Zeek logs directory
|
||||
OLDEST_DIR=$(ls /nsm/zeek/logs/ | grep -v "current" | grep -v "stats" | grep -v "packetloss" | grep -v "zeek_clean" | sort | head -n 1)
|
||||
if [ -z "$OLDEST_DIR" -o "$OLDEST_DIR" == ".." -o "$OLDEST_DIR" == "." ]
|
||||
then
|
||||
echo "$(date) - No old Zeek logs available to clean up in /nsm/zeek/logs/" >> $LOG
|
||||
#exit 0
|
||||
else
|
||||
echo "$(date) - Removing directory: /nsm/zeek/logs/$OLDEST_DIR" >> $LOG
|
||||
rm -rf /nsm/zeek/logs/"$OLDEST_DIR"
|
||||
fi
|
||||
|
||||
|
||||
## Remarking for now, as we are moving extracted files to /nsm/strelka/processed
|
||||
## find oldest files in extracted directory and exclude today
|
||||
#OLDEST_EXTRACT=$(find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' 2>/dev/null | sort | grep -v $TODAY | head -n 1)
|
||||
#if [ -z "$OLDEST_EXTRACT" -o "$OLDEST_EXTRACT" == ".." -o "$OLDEST_EXTRACT" == "." ]
|
||||
#then
|
||||
# echo "$(date) - No old extracted files available to clean up in /nsm/zeek/extracted/complete" >> $LOG
|
||||
#else
|
||||
# OLDEST_EXTRACT_DATE=`echo $OLDEST_EXTRACT | awk '{print $1}' | cut -d+ -f1`
|
||||
# OLDEST_EXTRACT_FILE=`echo $OLDEST_EXTRACT | awk '{print $2}'`
|
||||
# echo "$(date) - Removing extracted files for $OLDEST_EXTRACT_DATE" >> $LOG
|
||||
# find /nsm/zeek/extracted/complete -type f -printf '%T+ %p\n' | grep $OLDEST_EXTRACT_DATE | awk '{print $2}' |while read FILE
|
||||
# do
|
||||
# echo "$(date) - Removing extracted file: $FILE" >> $LOG
|
||||
# rm -f "$FILE"
|
||||
# done
|
||||
#fi
|
||||
|
||||
## Clean up Zeek extracted files processed by Strelka
|
||||
STRELKA_FILES='/nsm/strelka/processed'
|
||||
OLDEST_STRELKA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1 )
|
||||
if [ -z "$OLDEST_STRELKA" -o "$OLDEST_STRELKA" == ".." -o "$OLDEST_STRELKA" == "." ]
|
||||
then
|
||||
echo "$(date) - No old files available to clean up in $STRELKA_FILES" >> $LOG
|
||||
else
|
||||
OLDEST_STRELKA_DATE=`echo $OLDEST_STRELKA | awk '{print $1}' | cut -d+ -f1`
|
||||
OLDEST_STRELKA_FILE=`echo $OLDEST_STRELKA | awk '{print $2}'`
|
||||
echo "$(date) - Removing extracted files for $OLDEST_STRELKA_DATE" >> $LOG
|
||||
find $STRELKA_FILES -type f -printf '%T+ %p\n' | grep $OLDEST_STRELKA_DATE | awk '{print $2}' |while read FILE
|
||||
do
|
||||
echo "$(date) - Removing file: $FILE" >> $LOG
|
||||
rm -f "$FILE"
|
||||
done
|
||||
fi
|
||||
|
||||
## Clean up Suricata log files
|
||||
SURICATA_LOGS='/nsm/suricata'
|
||||
OLDEST_SURICATA=$(find $STRELKA_FILES -type f -printf '%T+ %p\n' | sort -n | head -n 1)
|
||||
if [ -z "$OLDEST_SURICATA" -o "$OLDEST_SURICATA" == ".." -o "$OLDEST_SURICATA" == "." ]
|
||||
then
|
||||
echo "$(date) - No old files available to clean up in $SURICATA_LOGS" >> $LOG
|
||||
else
|
||||
OLDEST_SURICATA_DATE=`echo $OLDEST_SURICATA | awk '{print $1}' | cut -d+ -f1`
|
||||
OLDEST_SURICATA_FILE=`echo $OLDEST_SURICATA | awk '{print $2}'`
|
||||
echo "$(date) - Removing logs for $OLDEST_SURICATA_DATE" >> $LOG
|
||||
find $SURICATA_LOGS -type f -printf '%T+ %p\n' | grep $OLDEST_SURICATA_DATE | awk '{print $2}' |while read FILE
|
||||
do
|
||||
echo "$(date) - Removing file: $FILE" >> $LOG
|
||||
rm -f "$FILE"
|
||||
done
|
||||
fi
|
||||
|
||||
## Clean up extracted pcaps from Steno
|
||||
PCAPS='/nsm/pcapout'
|
||||
OLDEST_PCAP=$(find $PCAPS -type f -printf '%T+ %p\n' | sort -n | head -n 1 )
|
||||
if [ -z "$OLDEST_PCAP" -o "$OLDEST_PCAP" == ".." -o "$OLDEST_PCAP" == "." ]
|
||||
then
|
||||
echo "$(date) - No old files available to clean up in $PCAPS" >> $LOG
|
||||
else
|
||||
OLDEST_PCAP_DATE=`echo $OLDEST_PCAP | awk '{print $1}' | cut -d+ -f1`
|
||||
OLDEST_PCAP_FILE=`echo $OLDEST_PCAP | awk '{print $2}'`
|
||||
echo "$(date) - Removing extracted files for $OLDEST_PCAP_DATE" >> $LOG
|
||||
find $PCAPS -type f -printf '%T+ %p\n' | grep $OLDEST_PCAP_DATE | awk '{print $2}' |while read FILE
|
||||
do
|
||||
echo "$(date) - Removing file: $FILE" >> $LOG
|
||||
rm -f "$FILE"
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
||||
# Check to see if we are already running
|
||||
IS_RUNNING=$(ps aux | grep "sensor_clean" | grep -v grep | wc -l)
|
||||
[ "$IS_RUNNING" -gt 2 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >> $LOG && exit 0
|
||||
|
||||
if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then
|
||||
while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ];
|
||||
do
|
||||
clean
|
||||
CUR_USAGE=$(df -P $SENSOR_DIR | tail -1 | awk '{print $5}' | tr -d %)
|
||||
done
|
||||
else
|
||||
echo "$(date) - Current usage value of $CUR_USAGE not greater than CRIT_DISK_USAGE value of $CRIT_DISK_USAGE..." >> $LOG
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user