mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 09:12:45 +01:00
291 lines
9.7 KiB
Bash
Executable File
291 lines
9.7 KiB
Bash
Executable File
#!/bin/bash
|
|
#
|
|
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
# https://securityonion.net/license; you may not use this file except in compliance with the
|
|
# Elastic License 2.0.
|
|
|
|
{%- set MANAGER = salt['grains.get']('master') %}
|
|
{%- set VERSION = salt['pillar.get']('global:soversion') %}
|
|
{%- set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
|
{%- set MANAGERIP = salt['pillar.get']('global:managerip') %}
|
|
{%- set URLBASE = salt['pillar.get']('global:url_base') %}
|
|
|
|
. /usr/sbin/so-common
|
|
|
|
function usage {
|
|
cat << EOF
|
|
Usage: $0 [options] <pcap-file-1> [pcap-file-2] [pcap-file-N]
|
|
|
|
Imports one or more PCAP files onto a sensor node. The PCAP traffic will be analyzed and made available for review in the Security Onion toolset.
|
|
|
|
Options:
|
|
--json Outputs summary in JSON format. Implies --quiet.
|
|
--quiet Silences progress information to stdout.
|
|
EOF
|
|
}
|
|
|
|
quiet=0
|
|
json=0
|
|
INPUT_FILES=
|
|
while [[ $# -gt 0 ]]; do
|
|
param=$1
|
|
shift
|
|
case "$param" in
|
|
--json)
|
|
json=1
|
|
quiet=1
|
|
;;
|
|
--quiet)
|
|
quiet=1
|
|
;;
|
|
-*)
|
|
echo "Encountered unexpected parameter: $param"
|
|
usage
|
|
exit 1
|
|
;;
|
|
*)
|
|
if [[ "$INPUT_FILES" != "" ]]; then
|
|
INPUT_FILES="$INPUT_FILES $param"
|
|
else
|
|
INPUT_FILES="$param"
|
|
fi
|
|
;;
|
|
esac
|
|
done
|
|
|
|
function status {
|
|
msg=$1
|
|
[[ $quiet -eq 1 ]] && return
|
|
echo "$msg"
|
|
}
|
|
|
|
function pcapinfo() {
|
|
PCAP=$1
|
|
ARGS=$2
|
|
docker run --rm -v "$PCAP:/input.pcap" --entrypoint capinfos {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} /input.pcap -ae $ARGS
|
|
}
|
|
|
|
function pcapfix() {
|
|
PCAP=$1
|
|
PCAP_OUT=$2
|
|
docker run --rm -v "$PCAP:/input.pcap" -v $PCAP_OUT:$PCAP_OUT --entrypoint pcapfix {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} /input.pcap -o $PCAP_OUT > /dev/null 2>&1
|
|
}
|
|
|
|
function suricata() {
|
|
PCAP=$1
|
|
HASH=$2
|
|
|
|
NSM_PATH=/nsm/import/${HASH}/suricata
|
|
mkdir -p $NSM_PATH
|
|
chown suricata:socore $NSM_PATH
|
|
LOG_PATH=/opt/so/log/suricata/import/${HASH}
|
|
mkdir -p $LOG_PATH
|
|
chown suricata:suricata $LOG_PATH
|
|
docker run --rm \
|
|
-v /opt/so/conf/suricata/suricata.yaml:/etc/suricata/suricata.yaml:ro \
|
|
-v /opt/so/conf/suricata/threshold.conf:/etc/suricata/threshold.conf:ro \
|
|
-v /opt/so/conf/suricata/rules:/etc/suricata/rules:ro \
|
|
-v ${LOG_PATH}:/var/log/suricata/:rw \
|
|
-v ${NSM_PATH}/:/nsm/:rw \
|
|
-v "$PCAP:/input.pcap:ro" \
|
|
-v /dev/null:/nsm/suripcap:rw \
|
|
-v /opt/so/conf/suricata/bpf:/etc/suricata/bpf:ro \
|
|
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-suricata:{{ VERSION }} \
|
|
--runmode single -k none -r /input.pcap > $LOG_PATH/console.log 2>&1
|
|
}
|
|
|
|
function zeek() {
|
|
PCAP=$1
|
|
HASH=$2
|
|
|
|
NSM_PATH=/nsm/import/${HASH}/zeek
|
|
mkdir -p $NSM_PATH/logs
|
|
mkdir -p $NSM_PATH/extracted
|
|
mkdir -p $NSM_PATH/spool
|
|
chown -R zeek:socore $NSM_PATH
|
|
docker run --rm \
|
|
-v $NSM_PATH/logs:/nsm/zeek/logs:rw \
|
|
-v $NSM_PATH/spool:/nsm/zeek/spool:rw \
|
|
-v $NSM_PATH/extracted:/nsm/zeek/extracted:rw \
|
|
-v "$PCAP:/input.pcap:ro" \
|
|
-v /opt/so/conf/zeek/local.zeek:/opt/zeek/share/zeek/site/local.zeek:ro \
|
|
-v /opt/so/conf/zeek/node.cfg:/opt/zeek/etc/node.cfg:ro \
|
|
-v /opt/so/conf/zeek/zeekctl.cfg:/opt/zeek/etc/zeekctl.cfg:ro \
|
|
-v /opt/so/conf/zeek/policy/securityonion:/opt/zeek/share/zeek/policy/securityonion:ro \
|
|
-v /opt/so/conf/zeek/policy/custom:/opt/zeek/share/zeek/policy/custom:ro \
|
|
-v /opt/so/conf/zeek/policy/cve-2020-0601:/opt/zeek/share/zeek/policy/cve-2020-0601:ro \
|
|
-v /opt/so/conf/zeek/policy/intel:/opt/zeek/share/zeek/policy/intel:rw \
|
|
-v /opt/so/conf/zeek/bpf:/opt/zeek/etc/bpf:ro \
|
|
--entrypoint /opt/zeek/bin/zeek \
|
|
-w /nsm/zeek/logs \
|
|
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-zeek:{{ VERSION }} \
|
|
-C -r /input.pcap local > $NSM_PATH/logs/console.log 2>&1
|
|
}
|
|
|
|
# if no parameters supplied, display usage
|
|
if [ "$INPUT_FILES" == "" ]; then
|
|
usage
|
|
exit 1
|
|
fi
|
|
|
|
# ensure this is a sensor node
|
|
if [ ! -d /opt/so/conf/suricata ]; then
|
|
echo "This command must be run on a sensor node."
|
|
exit 3
|
|
fi
|
|
|
|
# verify that all parameters are files
|
|
for i in $INPUT_FILES; do
|
|
if ! [ -f "$i" ]; then
|
|
echo "\"$i\" is not a valid file!"
|
|
exit 2
|
|
fi
|
|
done
|
|
|
|
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
|
|
START_OLDEST="2050-12-31"
|
|
END_NEWEST="1971-01-01"
|
|
|
|
INVALID_PCAPS_COUNT=0
|
|
VALID_PCAPS_COUNT=0
|
|
SKIPPED_PCAPS_COUNT=0
|
|
|
|
# paths must be quoted in case they include spaces
|
|
for PCAP in $INPUT_FILES; do
|
|
PCAP=$(/usr/bin/realpath "$PCAP")
|
|
status "Processing Import: ${PCAP}"
|
|
status "- verifying file"
|
|
if ! pcapinfo "${PCAP}" > /dev/null 2>&1; then
|
|
# try to fix pcap and then process the fixed pcap directly
|
|
PCAP_FIXED=`mktemp /tmp/so-import-pcap-XXXXXXXXXX.pcap`
|
|
status "- attempting to recover corrupted PCAP file"
|
|
pcapfix "${PCAP}" "${PCAP_FIXED}"
|
|
# Make fixed file world readable since the Suricata docker container will runas a non-root user
|
|
chmod a+r "${PCAP_FIXED}"
|
|
PCAP="${PCAP_FIXED}"
|
|
TEMP_PCAPS+=(${PCAP_FIXED})
|
|
fi
|
|
|
|
# generate a unique hash to assist with dedupe checks
|
|
HASH=$(md5sum "${PCAP}" | awk '{ print $1 }')
|
|
HASH_DIR=/nsm/import/${HASH}
|
|
status "- assigning unique identifier to import: $HASH"
|
|
|
|
pcap_data=$(pcapinfo "${PCAP}")
|
|
if ! echo "$pcap_data" | grep -q "Earliest packet time:" || echo "$pcap_data" |egrep -q "Latest packet time: 1970-01-01|Latest packet time: n/a"; then
|
|
status "- this PCAP file is invalid; skipping"
|
|
INVALID_PCAPS_COUNT=$((INVALID_PCAPS_COUNT + 1))
|
|
else
|
|
if [ -d $HASH_DIR ]; then
|
|
status "- this PCAP has already been imported; skipping"
|
|
SKIPPED_PCAPS_COUNT=$((SKIPPED_PCAPS_COUNT + 1))
|
|
else
|
|
VALID_PCAPS_COUNT=$((VALID_PCAPS_COUNT + 1))
|
|
|
|
PCAP_DIR=$HASH_DIR/pcap
|
|
mkdir -p $PCAP_DIR
|
|
|
|
# generate IDS alerts and write them to standard pipeline
|
|
status "- analyzing traffic with Suricata"
|
|
suricata "${PCAP}" $HASH
|
|
{% if salt['pillar.get']('global:mdengine') == 'ZEEK' %}
|
|
# generate Zeek logs and write them to a unique subdirectory in /nsm/import/zeek/
|
|
# since each run writes to a unique subdirectory, there is no need for a lock file
|
|
status "- analyzing traffic with Zeek"
|
|
zeek "${PCAP}" $HASH
|
|
{% endif %}
|
|
fi
|
|
|
|
if [[ "$HASH_FILTERS" == "" ]]; then
|
|
HASH_FILTERS="import.id:${HASH}"
|
|
HASHES="${HASH}"
|
|
else
|
|
HASH_FILTERS="$HASH_FILTERS%20OR%20import.id:${HASH}"
|
|
HASHES="${HASHES} ${HASH}"
|
|
fi
|
|
|
|
START=$(pcapinfo "${PCAP}" -a |grep "Earliest packet time:" | awk '{print $4}')
|
|
END=$(pcapinfo "${PCAP}" -e |grep "Latest packet time:" | awk '{print $4}')
|
|
status "- found PCAP data spanning dates $START through $END"
|
|
|
|
# compare $START to $START_OLDEST
|
|
START_COMPARE=$(date -d $START +%s)
|
|
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
|
|
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
|
|
START_OLDEST=$START
|
|
fi
|
|
|
|
# compare $ENDNEXT to $END_NEWEST
|
|
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
|
|
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
|
|
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
|
|
if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then
|
|
END_NEWEST=$ENDNEXT
|
|
fi
|
|
|
|
cp -f "${PCAP}" "${PCAP_DIR}"/data.pcap
|
|
chmod 644 "${PCAP_DIR}"/data.pcap
|
|
|
|
fi # end of valid pcap
|
|
|
|
status
|
|
|
|
done # end of for-loop processing pcap files
|
|
|
|
# remove temp files
|
|
for TEMP_PCAP in ${TEMP_PCAPS[@]}; do
|
|
status "- removing temporary pcap $TEMP_PCAP"
|
|
rm -f $TEMP_PCAP
|
|
done
|
|
|
|
# output final messages
|
|
if [[ $INVALID_PCAPS_COUNT -gt 0 ]]; then
|
|
status
|
|
status "WARNING: One or more pcaps was invalid. Scroll up to see which ones were invalid."
|
|
fi
|
|
|
|
START_OLDEST_SLASH=$(echo $START_OLDEST | sed -e 's/-/%2F/g')
|
|
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
|
if [[ $VALID_PCAPS_COUNT -gt 0 ]] || [[ $SKIPPED_PCAPS_COUNT -gt 0 ]]; then
|
|
URL="https://{{ URLBASE }}/#/dashboards?q=$HASH_FILTERS%20%7C%20groupby%20event.module*%20%7C%20groupby%20-sankey%20event.module*%20event.dataset%20%7C%20groupby%20event.dataset%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port%20%7C%20groupby%20network.protocol%20%7C%20groupby%20rule.name%20rule.category%20event.severity_label%20%7C%20groupby%20dns.query.name%20%7C%20groupby%20file.mime_type%20%7C%20groupby%20http.virtual_host%20http.uri%20%7C%20groupby%20notice.note%20notice.message%20notice.sub_message%20%7C%20groupby%20ssl.server_name%20%7C%20groupby%20source.as.organization.name%20source.geo.country_name%20%7C%20groupby%20destination.as.organization.name%20destination.geo.country_name&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC"
|
|
|
|
status "Import complete!"
|
|
status
|
|
status "Use the following hyperlink to view the imported data. Triple-click to quickly highlight the entire hyperlink and then copy it into a browser:"
|
|
status "$URL"
|
|
status
|
|
status "or, manually set the Time Range to be (in UTC):"
|
|
status "From: $START_OLDEST To: $END_NEWEST"
|
|
status
|
|
status "Note: It can take 30 seconds or more for events to appear in Security Onion Console."
|
|
RESULT=0
|
|
else
|
|
START_OLDEST=
|
|
END_NEWEST=
|
|
URL=
|
|
RESULT=1
|
|
fi
|
|
|
|
if [[ $json -eq 1 ]]; then
|
|
jq -n \
|
|
--arg success_count "$VALID_PCAPS_COUNT" \
|
|
--arg fail_count "$INVALID_PCAPS_COUNT" \
|
|
--arg skipped_count "$SKIPPED_PCAPS_COUNT" \
|
|
--arg begin_date "$START_OLDEST" \
|
|
--arg end_date "$END_NEWEST" \
|
|
--arg url "$URL" \
|
|
--arg hashes "$HASHES" \
|
|
'''{
|
|
success_count: $success_count,
|
|
fail_count: $fail_count,
|
|
skipped_count: $skipped_count,
|
|
begin_date: $begin_date,
|
|
end_date: $end_date,
|
|
url: $url,
|
|
hash: ($hashes / " ")
|
|
}'''
|
|
fi
|
|
|
|
exit $RESULT
|