mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge remote-tracking branch 'remotes/origin/dev' into issue/1257
This commit is contained in:
172
salt/common/tools/sbin/so-import-evtx
Normal file
172
salt/common/tools/sbin/so-import-evtx
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
{%- set MANAGER = salt['grains.get']('master') %}
|
||||||
|
{%- set VERSION = salt['pillar.get']('global:soversion') %}
|
||||||
|
{%- set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||||
|
{%- set MANAGERIP = salt['pillar.get']('global:managerip') -%}
|
||||||
|
{%- set URLBASE = salt['pillar.get']('global:url_base') %}
|
||||||
|
{% set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
|
||||||
|
{% set ES_PW = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
|
||||||
|
|
||||||
|
INDEX_DATE=$(date +'%Y.%m.%d')
|
||||||
|
RUNID=$(cat /dev/urandom | tr -dc 'a-z0-9' | fold -w 8 | head -n 1)
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
function usage {
|
||||||
|
cat << EOF
|
||||||
|
Usage: $0 <evtx-file-1> [evtx-file-2] [evtx-file-*]
|
||||||
|
|
||||||
|
Imports one or more evtx files into Security Onion. The evtx files will be analyzed and made available for review in the Security Onion toolset.
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function evtx2es() {
|
||||||
|
EVTX=$1
|
||||||
|
HASH=$2
|
||||||
|
|
||||||
|
docker run --rm \
|
||||||
|
-v "$EVTX:/tmp/$RUNID.evtx" \
|
||||||
|
--entrypoint evtx2es \
|
||||||
|
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} \
|
||||||
|
--host {{ MANAGERIP }} --scheme https \
|
||||||
|
--index so-beats-$INDEX_DATE --pipeline import.wel \
|
||||||
|
--login {{ES_USER}} --pwd {{ES_PW}} \
|
||||||
|
"/tmp/$RUNID.evtx" 1>/dev/null 2>/dev/null
|
||||||
|
|
||||||
|
docker run --rm \
|
||||||
|
-v "$EVTX:/tmp/import.evtx" \
|
||||||
|
-v "/nsm/import/evtx-end_newest:/tmp/newest" \
|
||||||
|
-v "/nsm/import/evtx-start_oldest:/tmp/oldest" \
|
||||||
|
--entrypoint '/evtx_calc_timestamps.sh' \
|
||||||
|
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }}
|
||||||
|
}
|
||||||
|
|
||||||
|
# if no parameters supplied, display usage
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ensure this is a Manager node
|
||||||
|
require_manager
|
||||||
|
|
||||||
|
# verify that all parameters are files
|
||||||
|
for i in "$@"; do
|
||||||
|
if ! [ -f "$i" ]; then
|
||||||
|
usage
|
||||||
|
echo "\"$i\" is not a valid file!"
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# track if we have any valid or invalid evtx
|
||||||
|
INVALID_EVTXS="no"
|
||||||
|
VALID_EVTXS="no"
|
||||||
|
|
||||||
|
# track oldest start and newest end so that we can generate the Kibana search hyperlink at the end
|
||||||
|
START_OLDEST="2050-12-31"
|
||||||
|
END_NEWEST="1971-01-01"
|
||||||
|
|
||||||
|
touch /nsm/import/evtx-start_oldest
|
||||||
|
touch /nsm/import/evtx-end_newest
|
||||||
|
|
||||||
|
echo $START_OLDEST > /nsm/import/evtx-start_oldest
|
||||||
|
echo $END_NEWEST > /nsm/import/evtx-end_newest
|
||||||
|
|
||||||
|
# paths must be quoted in case they include spaces
|
||||||
|
for EVTX in "$@"; do
|
||||||
|
EVTX=$(/usr/bin/realpath "$EVTX")
|
||||||
|
echo "Processing Import: ${EVTX}"
|
||||||
|
|
||||||
|
# generate a unique hash to assist with dedupe checks
|
||||||
|
HASH=$(md5sum "${EVTX}" | awk '{ print $1 }')
|
||||||
|
HASH_DIR=/nsm/import/${HASH}
|
||||||
|
echo "- assigning unique identifier to import: $HASH"
|
||||||
|
|
||||||
|
if [ -d $HASH_DIR ]; then
|
||||||
|
echo "- this EVTX has already been imported; skipping"
|
||||||
|
INVALID_EVTXS="yes"
|
||||||
|
else
|
||||||
|
VALID_EVTXS="yes"
|
||||||
|
|
||||||
|
EVTX_DIR=$HASH_DIR/evtx
|
||||||
|
mkdir -p $EVTX_DIR
|
||||||
|
|
||||||
|
# import evtx and write them to import ingest pipeline
|
||||||
|
echo "- importing logs to Elasticsearch..."
|
||||||
|
evtx2es "${EVTX}" $HASH
|
||||||
|
|
||||||
|
# compare $START to $START_OLDEST
|
||||||
|
START=$(cat /nsm/import/evtx-start_oldest)
|
||||||
|
START_COMPARE=$(date -d $START +%s)
|
||||||
|
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
|
||||||
|
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
|
||||||
|
START_OLDEST=$START
|
||||||
|
fi
|
||||||
|
|
||||||
|
# compare $ENDNEXT to $END_NEWEST
|
||||||
|
END=$(cat /nsm/import/evtx-end_newest)
|
||||||
|
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
|
||||||
|
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
|
||||||
|
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
|
||||||
|
if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then
|
||||||
|
END_NEWEST=$ENDNEXT
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
|
||||||
|
chmod 644 "${EVTX_DIR}"/data.evtx
|
||||||
|
|
||||||
|
fi # end of valid evtx
|
||||||
|
|
||||||
|
echo
|
||||||
|
|
||||||
|
done # end of for-loop processing evtx files
|
||||||
|
|
||||||
|
# remove temp files
|
||||||
|
echo "Cleaning up:"
|
||||||
|
for TEMP_EVTX in ${TEMP_EVTXS[@]}; do
|
||||||
|
echo "- removing temporary evtx $TEMP_EVTX"
|
||||||
|
rm -f $TEMP_EVTX
|
||||||
|
done
|
||||||
|
|
||||||
|
# output final messages
|
||||||
|
if [ "$INVALID_EVTXS" = "yes" ]; then
|
||||||
|
echo
|
||||||
|
echo "Please note! One or more evtx was invalid! You can scroll up to see which ones were invalid."
|
||||||
|
fi
|
||||||
|
|
||||||
|
START_OLDEST_FORMATTED=`date +%Y-%m-%d --date="$START_OLDEST"`
|
||||||
|
START_OLDEST_SLASH=$(echo $START_OLDEST_FORMATTED | sed -e 's/-/%2F/g')
|
||||||
|
END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g')
|
||||||
|
|
||||||
|
if [ "$VALID_EVTXS" = "yes" ]; then
|
||||||
|
cat << EOF
|
||||||
|
|
||||||
|
Import complete!
|
||||||
|
|
||||||
|
You can use the following hyperlink to view data in the time range of your import. You can triple-click to quickly highlight the entire hyperlink and you can then copy it into your browser:
|
||||||
|
https://{{ URLBASE }}/#/hunt?q=import.id:${RUNID}%20%7C%20groupby%20event.module%20event.dataset&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC
|
||||||
|
|
||||||
|
or you can manually set your Time Range to be (in UTC):
|
||||||
|
From: $START_OLDEST_FORMATTED To: $END_NEWEST
|
||||||
|
|
||||||
|
Please note that it may take 30 seconds or more for events to appear in Hunt.
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
@@ -1,5 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
@@ -20,13 +19,8 @@ echo "Starting to check for yara rule updates at $(date)..."
|
|||||||
|
|
||||||
output_dir="/opt/so/saltstack/default/salt/strelka/rules"
|
output_dir="/opt/so/saltstack/default/salt/strelka/rules"
|
||||||
mkdir -p $output_dir
|
mkdir -p $output_dir
|
||||||
|
|
||||||
repos="$output_dir/repos.txt"
|
repos="$output_dir/repos.txt"
|
||||||
ignorefile="$output_dir/ignore.txt"
|
|
||||||
|
|
||||||
deletecounter=0
|
|
||||||
newcounter=0
|
newcounter=0
|
||||||
updatecounter=0
|
|
||||||
|
|
||||||
{% if ISAIRGAP is sameas true %}
|
{% if ISAIRGAP is sameas true %}
|
||||||
|
|
||||||
@@ -35,58 +29,21 @@ echo "Airgap mode enabled."
|
|||||||
clone_dir="/nsm/repo/rules/strelka"
|
clone_dir="/nsm/repo/rules/strelka"
|
||||||
repo_name="signature-base"
|
repo_name="signature-base"
|
||||||
mkdir -p /opt/so/saltstack/default/salt/strelka/rules/signature-base
|
mkdir -p /opt/so/saltstack/default/salt/strelka/rules/signature-base
|
||||||
|
# Ensure a copy of the license is available for the rules
|
||||||
[ -f $clone_dir/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
[ -f $clone_dir/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
||||||
|
|
||||||
# Copy over rules
|
# Copy over rules
|
||||||
for i in $(find $clone_dir/yara -name "*.yar*"); do
|
for i in $(find $clone_dir/yara -name "*.yar*"); do
|
||||||
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
||||||
repo_sum=$(sha256sum $i | awk '{print $1}')
|
echo "Adding rule: $rule_name..."
|
||||||
|
|
||||||
# Check rules against those in ignore list -- don't copy if ignored.
|
|
||||||
if ! grep -iq $rule_name $ignorefile; then
|
|
||||||
existing_rules=$(find $output_dir/$repo_name/ -name $rule_name | wc -l)
|
|
||||||
|
|
||||||
# For existing rules, check to see if they need to be updated, by comparing checksums
|
|
||||||
if [ $existing_rules -gt 0 ];then
|
|
||||||
local_sum=$(sha256sum $output_dir/$repo_name/$rule_name | awk '{print $1}')
|
|
||||||
if [ "$repo_sum" != "$local_sum" ]; then
|
|
||||||
echo "Checksums do not match!"
|
|
||||||
echo "Updating $rule_name..."
|
|
||||||
cp $i $output_dir/$repo_name;
|
|
||||||
((updatecounter++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# If rule doesn't exist already, we'll add it
|
|
||||||
echo "Adding new rule: $rule_name..."
|
|
||||||
cp $i $output_dir/$repo_name
|
cp $i $output_dir/$repo_name
|
||||||
((newcounter++))
|
((newcounter++))
|
||||||
fi
|
|
||||||
fi;
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check to see if we have any old rules that need to be removed
|
|
||||||
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
|
||||||
is_repo_rule=$(find $clone_dir -name "$i" | wc -l)
|
|
||||||
if [ $is_repo_rule -eq 0 ]; then
|
|
||||||
echo "Could not find $i in source $repo_name repo...removing from $output_dir/$repo_name..."
|
|
||||||
rm $output_dir/$repo_name/$i
|
|
||||||
((deletecounter++))
|
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "Done!"
|
echo "Done!"
|
||||||
|
|
||||||
if [ "$newcounter" -gt 0 ];then
|
if [ "$newcounter" -gt 0 ];then
|
||||||
echo "$newcounter new rules added."
|
echo "$newcounter rules added."
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$updatecounter" -gt 0 ];then
|
|
||||||
echo "$updatecounter rules updated."
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$deletecounter" -gt 0 ];then
|
|
||||||
echo "$deletecounter rules removed because they were deprecated or don't exist in the source repo."
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
@@ -99,50 +56,21 @@ if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
|||||||
if ! $(echo "$repo" | grep -qE '^#'); then
|
if ! $(echo "$repo" | grep -qE '^#'); then
|
||||||
# Remove old repo if existing bc of previous error condition or unexpected disruption
|
# Remove old repo if existing bc of previous error condition or unexpected disruption
|
||||||
repo_name=`echo $repo | awk -F '/' '{print $NF}'`
|
repo_name=`echo $repo | awk -F '/' '{print $NF}'`
|
||||||
[ -d $repo_name ] && rm -rf $repo_name
|
[ -d $output_dir/$repo_name ] && rm -rf $output_dir/$repo_name
|
||||||
|
|
||||||
# Clone repo and make appropriate directories for rules
|
# Clone repo and make appropriate directories for rules
|
||||||
|
|
||||||
git clone $repo $clone_dir/$repo_name
|
git clone $repo $clone_dir/$repo_name
|
||||||
echo "Analyzing rules from $clone_dir/$repo_name..."
|
echo "Analyzing rules from $clone_dir/$repo_name..."
|
||||||
mkdir -p $output_dir/$repo_name
|
mkdir -p $output_dir/$repo_name
|
||||||
|
# Ensure a copy of the license is available for the rules
|
||||||
[ -f $clone_dir/$repo_name/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
[ -f $clone_dir/$repo_name/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
||||||
|
|
||||||
# Copy over rules
|
# Copy over rules
|
||||||
for i in $(find $clone_dir/$repo_name -name "*.yar*"); do
|
for i in $(find $clone_dir/$repo_name -name "*.yar*"); do
|
||||||
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
||||||
repo_sum=$(sha256sum $i | awk '{print $1}')
|
echo "Adding rule: $rule_name..."
|
||||||
|
|
||||||
# Check rules against those in ignore list -- don't copy if ignored.
|
|
||||||
if ! grep -iq $rule_name $ignorefile; then
|
|
||||||
existing_rules=$(find $output_dir/$repo_name/ -name $rule_name | wc -l)
|
|
||||||
|
|
||||||
# For existing rules, check to see if they need to be updated, by comparing checksums
|
|
||||||
if [ $existing_rules -gt 0 ];then
|
|
||||||
local_sum=$(sha256sum $output_dir/$repo_name/$rule_name | awk '{print $1}')
|
|
||||||
if [ "$repo_sum" != "$local_sum" ]; then
|
|
||||||
echo "Checksums do not match!"
|
|
||||||
echo "Updating $rule_name..."
|
|
||||||
cp $i $output_dir/$repo_name;
|
|
||||||
((updatecounter++))
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# If rule doesn't exist already, we'll add it
|
|
||||||
echo "Adding new rule: $rule_name..."
|
|
||||||
cp $i $output_dir/$repo_name
|
cp $i $output_dir/$repo_name
|
||||||
((newcounter++))
|
((newcounter++))
|
||||||
fi
|
|
||||||
fi;
|
|
||||||
done
|
|
||||||
|
|
||||||
# Check to see if we have any old rules that need to be removed
|
|
||||||
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
|
||||||
is_repo_rule=$(find $clone_dir/$repo_name -name "$i" | wc -l)
|
|
||||||
if [ $is_repo_rule -eq 0 ]; then
|
|
||||||
echo "Could not find $i in source $repo_name repo...removing from $output_dir/$repo_name..."
|
|
||||||
rm $output_dir/$repo_name/$i
|
|
||||||
((deletecounter++))
|
|
||||||
fi
|
|
||||||
done
|
done
|
||||||
rm -rf $clone_dir/$repo_name
|
rm -rf $clone_dir/$repo_name
|
||||||
fi
|
fi
|
||||||
@@ -151,15 +79,7 @@ if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
|||||||
echo "Done!"
|
echo "Done!"
|
||||||
|
|
||||||
if [ "$newcounter" -gt 0 ];then
|
if [ "$newcounter" -gt 0 ];then
|
||||||
echo "$newcounter new rules added."
|
echo "$newcounter rules added."
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$updatecounter" -gt 0 ];then
|
|
||||||
echo "$updatecounter rules updated."
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$deletecounter" -gt 0 ];then
|
|
||||||
echo "$deletecounter rules removed because they were deprecated or don't exist in the source repo."
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -385,6 +385,7 @@ preupgrade_changes() {
|
|||||||
[[ "$INSTALLEDVERSION" == 2.3.0 || "$INSTALLEDVERSION" == 2.3.1 || "$INSTALLEDVERSION" == 2.3.2 || "$INSTALLEDVERSION" == 2.3.10 ]] && up_2.3.0_to_2.3.20
|
[[ "$INSTALLEDVERSION" == 2.3.0 || "$INSTALLEDVERSION" == 2.3.1 || "$INSTALLEDVERSION" == 2.3.2 || "$INSTALLEDVERSION" == 2.3.10 ]] && up_2.3.0_to_2.3.20
|
||||||
[[ "$INSTALLEDVERSION" == 2.3.20 || "$INSTALLEDVERSION" == 2.3.21 ]] && up_2.3.2X_to_2.3.30
|
[[ "$INSTALLEDVERSION" == 2.3.20 || "$INSTALLEDVERSION" == 2.3.21 ]] && up_2.3.2X_to_2.3.30
|
||||||
[[ "$INSTALLEDVERSION" == 2.3.30 || "$INSTALLEDVERSION" == 2.3.40 ]] && up_2.3.3X_to_2.3.50
|
[[ "$INSTALLEDVERSION" == 2.3.30 || "$INSTALLEDVERSION" == 2.3.40 ]] && up_2.3.3X_to_2.3.50
|
||||||
|
[[ "$INSTALLEDVERSION" == 2.3.50 || "$INSTALLEDVERSION" == 2.3.51 || "$INSTALLEDVERSION" == 2.3.52 || "$INSTALLEDVERSION" == 2.3.60 || "$INSTALLEDVERSION" == 2.3.61 || "$INSTALLEDVERSION" == 2.3.70]] && up_2.3.5X_to_2.3.80
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -622,6 +623,20 @@ EOF
|
|||||||
INSTALLEDVERSION=2.3.50
|
INSTALLEDVERSION=2.3.50
|
||||||
}
|
}
|
||||||
|
|
||||||
|
up_2.3.5X_to_2.3.80() {
|
||||||
|
|
||||||
|
# Remove watermark settings from global.sls
|
||||||
|
sed -i '/ cluster_routing_allocation_disk/d' /opt/so/saltstack/local/pillar/global.sls
|
||||||
|
|
||||||
|
# Add new indices to the global
|
||||||
|
sed -i '/ index_settings:/a \\ so-elasticsearch: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
|
||||||
|
sed -i '/ index_settings:/a \\ so-logstash: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
|
||||||
|
sed -i '/ index_settings:/a \\ so-kibana: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
|
||||||
|
sed -i '/ index_settings:/a \\ so-redis: \n shards: 1 \n warm: 7 \n close: 30 \n delete: 365' /opt/so/saltstack/local/pillar/global.sls
|
||||||
|
|
||||||
|
INSTALLEDVERSION=2.3.80
|
||||||
|
}
|
||||||
|
|
||||||
verify_upgradespace() {
|
verify_upgradespace() {
|
||||||
CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//')
|
CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//')
|
||||||
if [ "$CURRENTSPACE" -lt "10" ]; then
|
if [ "$CURRENTSPACE" -lt "10" ]; then
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
{ "remove": { "field": ["event.created","timestamp", "winlog.event_data.UtcTime", "event_record_id"], "ignore_failure": true } },
|
{ "remove": { "field": ["event.created","timestamp", "winlog.event_data.UtcTime", "event_record_id"], "ignore_failure": true } },
|
||||||
{ "pipeline": { "if": "ctx.winlog?.channel == 'Microsoft-Windows-Sysmon/Operational'", "name": "sysmon" } },
|
{ "pipeline": { "if": "ctx.winlog?.channel == 'Microsoft-Windows-Sysmon/Operational'", "name": "sysmon" } },
|
||||||
{ "pipeline": { "if": "ctx.winlog?.channel != 'Microsoft-Windows-Sysmon/Operational'", "name":"win.eventlogs" } },
|
{ "pipeline": { "if": "ctx.winlog?.channel != 'Microsoft-Windows-Sysmon/Operational'", "name":"win.eventlogs" } },
|
||||||
|
{ "dissect": { "field": "log.file.name", "pattern" : "/tmp/%{import.id}.evtx" } },
|
||||||
{ "pipeline": { "name": "common" } }
|
{ "pipeline": { "name": "common" } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -3,7 +3,8 @@
|
|||||||
{%- set THEHIVEKEY = salt['pillar.get']('global:hivekey', '') %}
|
{%- set THEHIVEKEY = salt['pillar.get']('global:hivekey', '') %}
|
||||||
{%- set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
{%- set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||||
{%- set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
{%- set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||||
{%- set OSQUERY = salt['pillar.get']('manager:osquery', '0') %}
|
{% set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) %}
|
||||||
|
{% set FLEETNODE = salt['pillar.get']('global:fleet_node', False) %}
|
||||||
{%- set GRAFANA = salt['pillar.get']('manager:grafana', '0') %}
|
{%- set GRAFANA = salt['pillar.get']('manager:grafana', '0') %}
|
||||||
{%- set ISAIRGAP = salt['pillar.get']('global:airgap', 'False') %}
|
{%- set ISAIRGAP = salt['pillar.get']('global:airgap', 'False') %}
|
||||||
{%- set API_TIMEOUT = salt['pillar.get']('sensoroni:api_timeout_ms', 0) %}
|
{%- set API_TIMEOUT = salt['pillar.get']('sensoroni:api_timeout_ms', 0) %}
|
||||||
@@ -116,7 +117,7 @@
|
|||||||
{%- if THEHIVE == 0 %}
|
{%- if THEHIVE == 0 %}
|
||||||
"toolTheHive",
|
"toolTheHive",
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if OSQUERY == 0 %}
|
{%- if not FLEETMANAGER and not FLEETNODE %}
|
||||||
"toolFleet",
|
"toolFleet",
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if GRAFANA == 0 %}
|
{%- if GRAFANA == 0 %}
|
||||||
|
|||||||
9
salt/strelka/defaults.yaml
Normal file
9
salt/strelka/defaults.yaml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
strelka:
|
||||||
|
ignore:
|
||||||
|
- generic_anomalies.yar
|
||||||
|
- general_cloaking.yar
|
||||||
|
- thor_inverse_matches.yar
|
||||||
|
- yara_mixed_ext_vars.yar
|
||||||
|
- gen_susp_js_obfuscatorio.yar
|
||||||
|
- apt_flame2_orchestrator.yar
|
||||||
|
- apt_tetris.yar
|
||||||
@@ -21,6 +21,8 @@
|
|||||||
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||||
{% set STRELKA_RULES = salt['pillar.get']('strelka:rules', '1') %}
|
{% set STRELKA_RULES = salt['pillar.get']('strelka:rules', '1') %}
|
||||||
{% set ENGINE = salt['pillar.get']('global:mdengine', '') %}
|
{% set ENGINE = salt['pillar.get']('global:mdengine', '') %}
|
||||||
|
{% import_yaml 'strelka/defaults.yaml' as strelka_config with context %}
|
||||||
|
{% set IGNORELIST = salt['pillar.get']('strelka:ignore', strelka_config.strelka.ignore, merge=True, merge_nested_lists=True) %}
|
||||||
|
|
||||||
# Strelka config
|
# Strelka config
|
||||||
strelkaconfdir:
|
strelkaconfdir:
|
||||||
@@ -54,6 +56,17 @@ strelkarules:
|
|||||||
- source: salt://strelka/rules
|
- source: salt://strelka/rules
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
|
- clean: True
|
||||||
|
- exclude_pat:
|
||||||
|
{% for IGNOREDRULE in IGNORELIST %}
|
||||||
|
- {{ IGNOREDRULE }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% for IGNOREDRULE in IGNORELIST %}
|
||||||
|
remove_rule_{{ IGNOREDRULE }}:
|
||||||
|
file.absent:
|
||||||
|
- name: /opt/so/conf/strelka/rules/signature-base/{{ IGNOREDRULE }}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
{% if grains['role'] in ['so-eval','so-managersearch', 'so-manager', 'so-standalone', 'so-import'] %}
|
{% if grains['role'] in ['so-eval','so-managersearch', 'so-manager', 'so-standalone', 'so-import'] %}
|
||||||
strelkarepos:
|
strelkarepos:
|
||||||
|
|||||||
Reference in New Issue
Block a user