Merge branch 'dev' into kilo

This commit is contained in:
Jason Ertel
2021-09-16 07:44:35 -04:00
10 changed files with 214 additions and 15 deletions

View File

@@ -0,0 +1,172 @@
#!/bin/bash
#
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{%- set MANAGER = salt['grains.get']('master') %}
{%- set VERSION = salt['pillar.get']('global:soversion') %}
{%- set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
{%- set MANAGERIP = salt['pillar.get']('global:managerip') -%}
{%- set URLBASE = salt['pillar.get']('global:url_base') %}
{% set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
{% set ES_PW = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
INDEX_DATE=$(date +'%Y.%m.%d')
RUNID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1)
. /usr/sbin/so-common
function usage {
cat << EOF
Usage: $0 <evtx-file-1> [evtx-file-2] [evtx-file-*]
Imports one or more evtx files into Security Onion. The evtx files will be analyzed and made available for review in the Security Onion toolset.
EOF
}
function evtx2es() {
EVTX=$1
HASH=$2
docker run --rm \
-v "$EVTX:/tmp/$RUNID.evtx" \
--entrypoint evtx2es \
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} \
--host {{ MANAGERIP }} --scheme https \
--index so-beats-$INDEX_DATE --pipeline import.wel \
--login {{ES_USER}} --pwd {{ES_PW}} \
"/tmp/$RUNID.evtx" 1>/dev/null 2>/dev/null
docker run --rm \
-v "$EVTX:/tmp/import.evtx" \
-v "/nsm/import/evtx-end_newest:/tmp/newest" \
-v "/nsm/import/evtx-start_oldest:/tmp/oldest" \
--entrypoint '/evtx_calc_timestamps.sh' \
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }}
}
# if no parameters supplied, display usage
if [ $# -eq 0 ]; then
usage
exit 1
fi
# ensure this is a Manager node
require_manager
# verify that all parameters are files
for i in "$@"; do
if ! [ -f "$i" ]; then
usage
echo "\"$i\" is not a valid file!"
exit 2
fi
done
# track if we have any valid or invalid evtx
INVALID_EVTXS="no"
VALID_EVTXS="no"
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
START_OLDEST="2050-12-31"
END_NEWEST="1971-01-01"
touch /nsm/import/evtx-start_oldest
touch /nsm/import/evtx-end_newest
echo $START_OLDEST > /nsm/import/evtx-start_oldest
echo $END_NEWEST > /nsm/import/evtx-end_newest
# paths must be quoted in case they include spaces
for EVTX in "$@"; do
EVTX=$(/usr/bin/realpath "$EVTX")
echo "Processing Import: ${EVTX}"
# generate a unique hash to assist with dedupe checks
HASH=$(md5sum "${EVTX}" | awk '{ print $1 }')
HASH_DIR=/nsm/import/${HASH}
echo "- assigning unique identifier to import: $HASH"
if [ -d $HASH_DIR ]; then
echo "- this EVTX has already been imported; skipping"
INVALID_EVTXS="yes"
else
VALID_EVTXS="yes"
EVTX_DIR=$HASH_DIR/evtx
mkdir -p $EVTX_DIR
# import evtx and write them to import ingest pipeline
echo "- importing logs to Elasticsearch..."
evtx2es "${EVTX}" $HASH
# compare $START to $START_OLDEST
START=$(cat /nsm/import/evtx-start_oldest)
START_COMPARE=$(date -d $START +%s)
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
START_OLDEST=$START
fi
# compare $ENDNEXT to $END_NEWEST
END=$(cat /nsm/import/evtx-end_newest)
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then
END_NEWEST=$ENDNEXT
fi
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
chmod 644 "${EVTX_DIR}"/data.evtx
fi # end of valid evtx
echo
done # end of for-loop processing evtx files
# remove temp files
echo "Cleaning up:"
for TEMP_EVTX in ${TEMP_EVTXS[@]}; do
echo "- removing temporary evtx $TEMP_EVTX"
rm -f $TEMP_EVTX
done
# output final messages
if [ "$INVALID_EVTXS" = "yes" ]; then
echo
echo "Please note! One or more evtx was invalid! You can scroll up to see which ones were invalid."
fi
START_OLDEST_FORMATTED=`date +%Y-%m-%d --date="$START_OLDEST"`
START_OLDEST_SLASH=$(echo $START_OLDEST_FORMATTED | sed -e 's/-/%2F/g')
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
if [ "$VALID_EVTXS" = "yes" ]; then
cat << EOF
Import complete!
You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser:
https://{{ URLBASE }}/#/hunt?q=import.id:${RUNID}%20%7C%20groupby%20event.module%20event.dataset&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC
or you can manually set your Time Range to be (in UTC):
From: $START_OLDEST_FORMATTED To: $END_NEWEST
Please note that it may take 30 seconds or more for events to appear in Hunt.
EOF
fi

View File

@@ -20,6 +20,9 @@
appliance_check() {
{%- if salt['grains.get']('sosmodel', '') %}
APPLIANCE=1
{%- if grains['sosmodel'] in ['SO2AMI01', 'SO2GCI01', 'SO2AZI01'] %}
exit 0
{%- endif %}
DUDEYOUGOTADELL=$(dmidecode |grep Dell)
if [[ -n $DUDEYOUGOTADELL ]]; then
APPTYPE=dell

View File

@@ -1,13 +1,10 @@
#!/bin/bash
got_root() {
# Make sure you are root
if [ "$(id -u)" -ne 0 ]; then
echo "This script must be run using sudo!"
exit 1
fi
. /usr/sbin/so-common
}
argstr=""
for arg in "$@"; do
argstr="${argstr} \"${arg}\""
done
got_root
docker exec so-idstools /bin/bash -c "cd /opt/so/idstools/etc && idstools-rulecat $1"
docker exec so-idstools /bin/bash -c "cd /opt/so/idstools/etc && idstools-rulecat --force ${argstr}"

View File

@@ -385,6 +385,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.3.0 || "$INSTALLEDVERSION" == 2.3.1 || "$INSTALLEDVERSION" == 2.3.2 || "$INSTALLEDVERSION" == 2.3.10 ]] && up_2.3.0_to_2.3.20
[[ "$INSTALLEDVERSION" == 2.3.20 || "$INSTALLEDVERSION" == 2.3.21 ]] && up_2.3.2X_to_2.3.30
[[ "$INSTALLEDVERSION" == 2.3.30 || "$INSTALLEDVERSION" == 2.3.40 ]] && up_2.3.3X_to_2.3.50
[[ "$INSTALLEDVERSION" == 2.3.50 || "$INSTALLEDVERSION" == 2.3.51 || "$INSTALLEDVERSION" == 2.3.52 || "$INSTALLEDVERSION" == 2.3.60 || "$INSTALLEDVERSION" == 2.3.61 || "$INSTALLEDVERSION" == 2.3.70]] && up_2.3.5X_to_2.3.80
true
}
@@ -622,6 +623,20 @@ EOF
INSTALLEDVERSION=2.3.50
}
up_2.3.5X_to_2.3.80() {
# Remove watermark settings from global.sls
sed -i '/ cluster_routing_allocation_disk/d' /opt/so/saltstack/local/pillar/global.sls
# Add new indices to the global
sed -i '/ index_settings:/a \\ so-elasticsearch: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
sed -i '/ index_settings:/a \\ so-logstash: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
sed -i '/ index_settings:/a \\ so-kibana: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
sed -i '/ index_settings:/a \\ so-redis: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
INSTALLEDVERSION=2.3.80
}
verify_upgradespace() {
CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//')
if [ "$CURRENTSPACE" -lt "10" ]; then