diff --git a/salt/common/tools/sbin/so-import-pcap b/salt/common/tools/sbin/so-import-pcap new file mode 100644 index 000000000..3ca1f0277 --- /dev/null +++ b/salt/common/tools/sbin/so-import-pcap @@ -0,0 +1,221 @@ +#!/bin/bash +# +# Copyright 2014,2015,2016,2017,2018,2019,2020 Security Onion Solutions, LLC +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +{% set MASTER = salt['grains.get']('master') %} +{% set VERSION = salt['pillar.get']('static:soversion') %} +{%- set MASTERIP = salt['pillar.get']('static:masterip') -%} + +function usage { + cat << EOF +Usage: +Please supply at least one pcap file. + +For example, to import a single pcap named import.pcap: +so-import-pcap import.pcap + +To import multiple pcaps: +so-import-pcap import1.pcap import2.pcap + +** IMPORTANT ** +Security Onion installations contain processes that automatically discard old data. Therefore, imports of old network traffic might immediately be erased, unless those processes are first disabled. + +EOF +} + +function pcapinfo() { + PCAP=$1 + ARGS=$2 + docker run --rm -v $PCAP:/input.pcap --entrypoint capinfos {{ MASTER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap $ARGS +} + +function pcapfix() { + PCAP=$1 + PCAP_OUT=$2 + docker run --rm -v $PCAP:/input.pcap -v $PCAP_OUT:$PCAP_OUT --entrypoint pcapfix {{ MASTER }}:5000/soshybridhunter/so-pcaptools:{{ VERSION }} /input.pcap -o $PCAP_OUT > /dev/null 2>&1 +} + +function suricata() { + PCAP=$1 + HASH=$2 + + NSM_PATH=/nsm/import/${HASH}/suricata + mkdir -p $NSM_PATH + chown suricata:socore $NSM_PATH + LOG_PATH=/opt/so/log/suricata/import/${HASH} + mkdir -p $LOG_PATH + chown suricata:suricata $LOG_PATH + docker run --rm \ + -v /opt/so/conf/suricata/suricata.yaml:/etc/suricata/suricata.yaml:ro \ + -v /opt/so/conf/suricata/threshold.conf:/etc/suricata/threshold.conf:ro \ + -v /opt/so/conf/suricata/rules:/etc/suricata/rules:ro \ + -v ${LOG_PATH}:/var/log/suricata/:rw \ + -v ${NSM_PATH}/:/nsm/:rw \ + -v $PCAP:/input.pcap:ro \ + -v /opt/so/conf/suricata/bpf:/etc/suricata/bpf:ro \ + {{ MASTER }}:5000/soshybridhunter/so-suricata:{{ VERSION }} \ + --runmode single -k none -r /input.pcap > $LOG_PATH/console.log 2>&1 +} + +function zeek() { + PCAP=$1 + HASH=$2 + + NSM_PATH=/nsm/import/${HASH}/zeek + mkdir -p $NSM_PATH/logs + mkdir -p $NSM_PATH/spool + mkdir -p $NSM_PATH/extracted + chown -R zeek:socore $NSM_PATH + docker run --rm \ + -v $NSM_PATH:/nsm:rw \ + -v $PCAP:/input.pcap:ro \ + -v /opt/so/conf/zeek/local.zeek:/opt/zeek/share/zeek/site/local.zeek:ro \ + -v /opt/so/conf/zeek/node.cfg:/opt/zeek/etc/node.cfg:ro \ + -v /opt/so/conf/zeek/zeekctl.cfg:/opt/zeek/etc/zeekctl.cfg:ro \ + -v /opt/so/conf/zeek/policy/securityonion:/opt/zeek/share/zeek/policy/securityonion:ro \ + -v /opt/so/conf/zeek/policy/custom:/opt/zeek/share/zeek/policy/custom:ro \ + -v /opt/so/conf/zeek/policy/cve-2020-0601:/opt/zeek/share/zeek/policy/cve-2020-0601:ro \ + -v /opt/so/conf/zeek/policy/intel:/opt/zeek/share/zeek/policy/intel:rw \ + -v /opt/so/conf/zeek/bpf:/opt/zeek/etc/bpf:ro \ + --entrypoint /opt/zeek/bin/zeek \ + -w /nsm/zeek/logs \ + {{ MASTER }}:5000/soshybridhunter/so-zeek:{{ VERSION }} \ + -C -r /input.pcap local > $NSM_PATH/logs/console.log 2>&1 +} + +# if no parameters supplied, display usage +if [ $# -eq 0 ]; then + usage + exit +fi + +# ensure this is a manager node +if [ ! -d /opt/so/conf/soc ]; then + echo "This procedure must be run on a manager node." + exit +fi + +# verify that all parameters are files +for i in "$@"; do + if ! [ -f "$i" ]; then + usage + echo "\"$i\" is not a valid file!" + exit + fi +done + +# track if we have any valid or invalid pcaps +INVALID_PCAPS="no" +VALID_PCAPS="no" + +# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end +START_OLDEST="2050-12-31" +END_NEWEST="1971-01-01" + +# paths must be quoted in case they include spaces +for PCAP in "$@"; do + PCAP=$(/usr/bin/realpath "$PCAP") + echo "Processing Import: ${PCAP}" + echo "- verifying file" + if ! pcapinfo "${PCAP}" > /dev/null 2>&1; then + # try to fix pcap and then process the fixed pcap directly + PCAP_FIXED=`mktemp /tmp/so-import-pcap-XXXXXXXXXX.pcap` + echo "- attempting to recover corrupted PCAP file" + pcapfix "${PCAP}" "${PCAP_FIXED}" + PCAP="${PCAP_FIXED}" + TEMP_PCAPS+=(${PCAP_FIXED}) + fi + + # generate a unique hash to assist with dedupe checks + HASH=$(md5sum "${PCAP}" | awk '{ print $1 }') + HASH_DIR=/nsm/import/${HASH} + echo "- assigning unique identifier to import: $HASH" + + if [ -d $HASH_DIR ]; then + echo "- this PCAP has already been imported; skipping" + INVALID_PCAPS="yes" + elif pcapinfo "${PCAP}" |egrep -q "Last packet time: 1970-01-01|Last packet time: n/a"; then + echo "- this PCAP file is invalid; skipping" + INVALID_PCAPS="yes" + else + VALID_PCAPS="yes" + + PCAP_DIR=$HASH_DIR/pcap + mkdir -p $PCAP_DIR + + # generate IDS alerts and write them to standard pipeline + echo "- analyzing traffic with Suricata" + suricata "${PCAP}" $HASH + + # generate Zeek logs and write them to a unique subdirectory in /nsm/import/bro/ + # since each run writes to a unique subdirectory, there is no need for a lock file + echo "- analyzing traffic with Zeek" + zeek "${PCAP}" $HASH + + START=$(pcapinfo "${PCAP}" -a |grep "First packet time:" | awk '{print $4}') + END=$(pcapinfo "${PCAP}" -e |grep "Last packet time:" | awk '{print $4}') + echo "- saving PCAP data spanning dates $START through $END" + + # compare $START to $START_OLDEST + START_COMPARE=$(date -d $START +%s) + START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s) + if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then + START_OLDEST=$START + fi + + # compare $ENDNEXT to $END_NEWEST + ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"` + ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s) + END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s) + if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then + END_NEWEST=$ENDNEXT + fi + + cp -f "${PCAP}" "${PCAP_DIR}"/data.pcap + + fi # end of valid pcap + + echo + +done # end of for-loop processing pcap files + +# remove temp files +echo "Cleaning up:" +for TEMP_PCAP in ${TEMP_PCAPS[@]}; do + echo "- removing temporary pcap $TEMP_PCAP" + rm -f $TEMP_PCAP +done + +# output final messages +if [ "$INVALID_PCAPS" = "yes" ]; then + echo + echo "Please note! One or more pcaps was invalid! You can scroll up to see which ones were invalid." +fi + +if [ "$VALID_PCAPS" = "yes" ]; then +cat << EOF + +Import complete! + +You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser: +https://{{ MASTERIP }}/kibana/app/kibana#/dashboard/a8411b30-6d03-11ea-b301-3d6c35840645?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:'${START_OLDEST}T00:00:00.000Z',mode:absolute,to:'${END_NEWEST}T00:00:00.000Z')) + +or you can manually set your Time Range to be: +From: $START_OLDEST To: $END_NEWEST + +Please note that it may take 30 seconds or more for events to appear in Kibana. +EOF +fi \ No newline at end of file diff --git a/salt/filebeat/etc/filebeat.yml b/salt/filebeat/etc/filebeat.yml index da116cf2c..ac4e0fe6f 100644 --- a/salt/filebeat/etc/filebeat.yml +++ b/salt/filebeat/etc/filebeat.yml @@ -118,12 +118,31 @@ filebeat.inputs: clean_removed: false close_removed: false + - type: log + paths: + - /nsm/import/*/zeek/logs/{{ LOGNAME }}.log + fields: + module: zeek + dataset: {{ LOGNAME }} + category: network + imported: true + processors: + - dissect: + tokenizer: "/nsm/import/%{import_id}/zeek/logs/%{import_source}" + field: "source" + target_prefix: "" + - drop_fields: + fields: ["source", "prospector", "input", "offset", "beat"] + + fields_under_root: true + clean_removed: false + close_removed: false {%- endfor %} {%- endif %} - type: log paths: - - /suricata/eve*.json + - /nsm/suricata/eve*.json fields: module: suricata dataset: common @@ -137,8 +156,27 @@ filebeat.inputs: clean_removed: false close_removed: false - {%- if STRELKAENABLED == 1 %} + - type: log + paths: + - /nsm/import/*/suricata/eve*.json + fields: + module: suricata + dataset: common + category: network + imported: true + processors: + - dissect: + tokenizer: "/nsm/import/%{import_id}/suricata/%{import_source}" + field: "source" + target_prefix: "" + - drop_fields: + fields: ["source", "prospector", "input", "offset", "beat"] + fields_under_root: true + clean_removed: false + close_removed: false + + {%- if STRELKAENABLED == 1 %} - type: log paths: - /nsm/strelka/log/strelka.log @@ -229,7 +267,7 @@ output.elasticsearch: - index: "so-strelka-%{+yyyy.MM.dd}" when.contains: module: "strelka" - + setup.template.enabled: false {%- else %} diff --git a/salt/filebeat/init.sls b/salt/filebeat/init.sls index 897bb3937..8fba7e258 100644 --- a/salt/filebeat/init.sls +++ b/salt/filebeat/init.sls @@ -55,9 +55,7 @@ so-filebeat: - binds: - /opt/so/log/filebeat:/usr/share/filebeat/logs:rw - /opt/so/conf/filebeat/etc/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro - - /nsm/zeek:/nsm/zeek:ro - - /nsm/strelka/log:/nsm/strelka/log:ro - - /nsm/suricata:/suricata:ro + - /nsm:/nsm:ro - /opt/so/wazuh/logs/alerts:/wazuh/alerts:ro - /opt/so/wazuh/logs/archives:/wazuh/archives:ro - /nsm/osquery/fleet/:/nsm/osquery/fleet:ro