mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 09:12:45 +01:00
Merge remote-tracking branch 'origin/2.4/dev' into 2.4/detections-defaults
This commit is contained in:
@@ -1,7 +1,10 @@
|
||||
{% import_yaml 'bpf/defaults.yaml' as BPFDEFAULTS %}
|
||||
{% set BPFMERGED = salt['pillar.get']('bpf', BPFDEFAULTS.bpf, merge=True) %}
|
||||
{% import 'bpf/macros.jinja' as MACROS %}
|
||||
|
||||
{{ MACROS.remove_comments(BPFMERGED, 'pcap') }}
|
||||
|
||||
{% set PCAPBPF = BPFMERGED.pcap %}
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
{% if GLOBALS.pcap_engine == "TRANSITION" %}
|
||||
{% set PCAPBPF = ["ip and host 255.255.255.1 and port 1"] %}
|
||||
{% else %}
|
||||
{% import_yaml 'bpf/defaults.yaml' as BPFDEFAULTS %}
|
||||
{% set BPFMERGED = salt['pillar.get']('bpf', BPFDEFAULTS.bpf, merge=True) %}
|
||||
{% import 'bpf/macros.jinja' as MACROS %}
|
||||
{{ MACROS.remove_comments(BPFMERGED, 'pcap') }}
|
||||
{% set PCAPBPF = BPFMERGED.pcap %}
|
||||
{% endif %}
|
||||
|
||||
@@ -15,9 +15,9 @@ global:
|
||||
regexFailureMessage: You must enter either ZEEK or SURICATA.
|
||||
global: True
|
||||
pcapengine:
|
||||
description: Which engine to use for generating pcap. Options are STENO and SURICATA.
|
||||
regex: ^(STENO|SURICATA)$
|
||||
regexFailureMessage: You must enter either STENO or SURICATA.
|
||||
description: Which engine to use for generating pcap. Options are STENO, SURICATA or TRANSITION.
|
||||
regex: ^(STENO|SURICATA|TRANSITION)$
|
||||
regexFailureMessage: You must enter either STENO, SURICATA or TRANSITION.
|
||||
global: True
|
||||
ids:
|
||||
description: Which IDS engine to use. Currently only Suricata is supported.
|
||||
|
||||
@@ -117,6 +117,51 @@ rules_dir:
|
||||
- group: socore
|
||||
- makedirs: True
|
||||
|
||||
{% if STRELKAMERGED.rules.enabled %}
|
||||
strelkarepos:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/strelka/repos.txt
|
||||
- source: salt://strelka/rules/repos.txt.jinja
|
||||
- template: jinja
|
||||
- defaults:
|
||||
STRELKAREPOS: {{ STRELKAMERGED.rules.repos }}
|
||||
- makedirs: True
|
||||
strelka-yara-update:
|
||||
{% if MANAGERMERGED.reposync.enabled and not GLOBALS.airgap %}
|
||||
cron.present:
|
||||
{% else %}
|
||||
cron.absent:
|
||||
{% endif %}
|
||||
- user: socore
|
||||
- name: '/usr/sbin/so-yara-update >> /opt/so/log/yarasync/yara-update.log 2>&1'
|
||||
- identifier: strelka-yara-update
|
||||
- hour: '7'
|
||||
- minute: '1'
|
||||
strelka-yara-download:
|
||||
{% if MANAGERMERGED.reposync.enabled and not GLOBALS.airgap %}
|
||||
cron.present:
|
||||
{% else %}
|
||||
cron.absent:
|
||||
{% endif %}
|
||||
- user: socore
|
||||
- name: '/usr/sbin/so-yara-download >> /opt/so/log/yarasync/yara-download.log 2>&1'
|
||||
- identifier: strelka-yara-download
|
||||
- hour: '7'
|
||||
- minute: '1'
|
||||
{% if not GLOBALS.airgap %}
|
||||
update_yara_rules:
|
||||
cmd.run:
|
||||
- name: /usr/sbin/so-yara-update
|
||||
- onchanges:
|
||||
- file: yara_update_scripts
|
||||
download_yara_rules:
|
||||
cmd.run:
|
||||
- name: /usr/sbin/so-yara-download
|
||||
- onchanges:
|
||||
- file: yara_update_scripts
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
|
||||
51
salt/manager/tools/sbin_jinja/so-yara-download
Normal file
51
salt/manager/tools/sbin_jinja/so-yara-download
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/bin/bash
|
||||
NOROOT=1
|
||||
. /usr/sbin/so-common
|
||||
|
||||
{%- set proxy = salt['pillar.get']('manager:proxy') %}
|
||||
{%- set noproxy = salt['pillar.get']('manager:no_proxy', '') %}
|
||||
|
||||
# Download the rules from the internet
|
||||
{%- if proxy %}
|
||||
export http_proxy={{ proxy }}
|
||||
export https_proxy={{ proxy }}
|
||||
export no_proxy="{{ noproxy }}"
|
||||
{%- endif %}
|
||||
|
||||
repos="/opt/so/conf/strelka/repos.txt"
|
||||
output_dir=/nsm/rules/yara
|
||||
gh_status=$(curl -s -o /dev/null -w "%{http_code}" https://github.com)
|
||||
clone_dir="/tmp"
|
||||
if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
||||
|
||||
while IFS= read -r repo; do
|
||||
if ! $(echo "$repo" | grep -qE '^#'); then
|
||||
# Remove old repo if existing bc of previous error condition or unexpected disruption
|
||||
repo_name=`echo $repo | awk -F '/' '{print $NF}'`
|
||||
[ -d $output_dir/$repo_name ] && rm -rf $output_dir/$repo_name
|
||||
|
||||
# Clone repo and make appropriate directories for rules
|
||||
git clone $repo $clone_dir/$repo_name
|
||||
echo "Analyzing rules from $clone_dir/$repo_name..."
|
||||
mkdir -p $output_dir/$repo_name
|
||||
# Ensure a copy of the license is available for the rules
|
||||
[ -f $clone_dir/$repo_name/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
||||
|
||||
# Copy over rules
|
||||
for i in $(find $clone_dir/$repo_name -name "*.yar*"); do
|
||||
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
||||
cp $i $output_dir/$repo_name
|
||||
done
|
||||
rm -rf $clone_dir/$repo_name
|
||||
fi
|
||||
done < $repos
|
||||
|
||||
echo "Done!"
|
||||
|
||||
/usr/sbin/so-yara-update
|
||||
|
||||
else
|
||||
echo "Server returned $gh_status status code."
|
||||
echo "No connectivity to Github...exiting..."
|
||||
exit 1
|
||||
fi
|
||||
41
salt/manager/tools/sbin_jinja/so-yara-update
Normal file
41
salt/manager/tools/sbin_jinja/so-yara-update
Normal file
@@ -0,0 +1,41 @@
|
||||
#!/bin/bash
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
NOROOT=1
|
||||
. /usr/sbin/so-common
|
||||
|
||||
echo "Starting to check for yara rule updates at $(date)..."
|
||||
|
||||
newcounter=0
|
||||
excludedcounter=0
|
||||
excluded_rules=({{ EXCLUDEDRULES | join(' ') }})
|
||||
|
||||
# Pull down the SO Rules
|
||||
SORULEDIR=/nsm/rules/yara
|
||||
OUTPUTDIR=/opt/so/saltstack/local/salt/strelka/rules
|
||||
|
||||
mkdir -p $OUTPUTDIR
|
||||
# remove all rules prior to copy so we can clear out old rules
|
||||
rm -f $OUTPUTDIR/*
|
||||
|
||||
for i in $(find $SORULEDIR -name "*.yar" -o -name "*.yara"); do
|
||||
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
||||
if [[ ! "${excluded_rules[*]}" =~ ${rule_name} ]]; then
|
||||
echo "Adding rule: $rule_name..."
|
||||
cp $i $OUTPUTDIR/$rule_name
|
||||
((newcounter++))
|
||||
else
|
||||
echo "Excluding rule: $rule_name..."
|
||||
((excludedcounter++))
|
||||
fi
|
||||
done
|
||||
|
||||
if [ "$newcounter" -gt 0 ] || [ "$excludedcounter" -gt 0 ];then
|
||||
echo "$newcounter rules added."
|
||||
echo "$excludedcounter rule(s) excluded."
|
||||
fi
|
||||
|
||||
echo "Finished rule updates at $(date)..."
|
||||
@@ -50,6 +50,16 @@ backend_taste:
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
{% if STRELKAMERGED.rules.enabled %}
|
||||
strelkarules:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/strelka/rules
|
||||
- source: salt://strelka/rules
|
||||
- user: 939
|
||||
- group: 939
|
||||
- clean: True
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
|
||||
@@ -42,8 +42,8 @@ strelka_backend:
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
- restart_policy: on-failure
|
||||
#- watch:
|
||||
# - file: strelkarules
|
||||
- watch:
|
||||
- file: strelkarules
|
||||
|
||||
delete_so-strelka-backend_so-status.disabled:
|
||||
file.uncomment:
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
{% set surimeta_filestore_index = [] %}
|
||||
|
||||
{# before we change outputs back to list, enable pcap-log if suricata is the pcapengine #}
|
||||
{% if GLOBALS.pcap_engine == "SURICATA" %}
|
||||
{% if GLOBALS.pcap_engine in ["SURICATA", "TRANSITION"] %}
|
||||
{% do SURICATAMERGED.config.outputs['pcap-log'].update({'enabled': 'yes'}) %}
|
||||
{# move the items in suricata.pcap into suricata.config.outputs.pcap-log. these items were placed under suricata.config for ease of access in SOC #}
|
||||
{% do SURICATAMERGED.config.outputs['pcap-log'].update({'compression': SURICATAMERGED.pcap.compression}) %}
|
||||
|
||||
@@ -11,7 +11,7 @@ suripcapdir:
|
||||
- mode: 775
|
||||
- makedirs: True
|
||||
|
||||
{% if GLOBALS.pcap_engine == "SURICATA" %}
|
||||
{% if GLOBALS.pcap_engine in ["SURICATA", "TRANSITION"] %}
|
||||
|
||||
{# there should only be 1 interface in af-packet so we can just reference the first list item #}
|
||||
{% for i in range(1, SURICATAMERGED.config['af-packet'][0].threads + 1) %}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{%- if GLOBALS.pcap_engine == "SURICATA" %}
|
||||
{%- if GLOBALS.pcap_engine in ["SURICATA", "TRANSITION"] %}
|
||||
PCAPLOC=/host/nsm/suripcap
|
||||
{%- else %}
|
||||
PCAPLOC=/host/nsm/pcap
|
||||
|
||||
Reference in New Issue
Block a user