diff --git a/DOWNLOAD_AND_VERIFY_ISO.md b/DOWNLOAD_AND_VERIFY_ISO.md index 4493f210d..a5fd6e157 100644 --- a/DOWNLOAD_AND_VERIFY_ISO.md +++ b/DOWNLOAD_AND_VERIFY_ISO.md @@ -1,17 +1,17 @@ -### 2.4.60-20240320 ISO image released on 2024/03/20 +### 2.4.70-20240529 ISO image released on 2024/05/29 ### Download and Verify -2.4.60-20240320 ISO image: -https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso +2.4.70-20240529 ISO image: +https://download.securityonion.net/file/securityonion/securityonion-2.4.70-20240529.iso -MD5: 178DD42D06B2F32F3870E0C27219821E -SHA1: 73EDCD50817A7F6003FE405CF1808A30D034F89D -SHA256: DD334B8D7088A7B78160C253B680D645E25984BA5CCAB5CC5C327CA72137FC06 +MD5: 8FCCF31C2470D1ABA380AF196B611DEC +SHA1: EE5E8F8C14819E7A1FE423E6920531A97F39600B +SHA256: EF5E781D50D50660F452ADC54FD4911296ECBECED7879FA8E04687337CA89BEC Signature for ISO image: -https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig +https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.70-20240529.iso.sig Signing key: https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS @@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2. Download the signature file for the ISO: ``` -wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig +wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.70-20240529.iso.sig ``` Download the ISO image: ``` -wget https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso +wget https://download.securityonion.net/file/securityonion/securityonion-2.4.70-20240529.iso ``` Verify the downloaded ISO image using the signature file: ``` -gpg --verify securityonion-2.4.60-20240320.iso.sig securityonion-2.4.60-20240320.iso +gpg --verify securityonion-2.4.70-20240529.iso.sig securityonion-2.4.70-20240529.iso ``` The output should show "Good signature" and the Primary key fingerprint should match what's shown below: ``` -gpg: Signature made Tue 19 Mar 2024 03:17:58 PM EDT using RSA key ID FE507013 +gpg: Signature made Wed 29 May 2024 11:40:59 AM EDT using RSA key ID FE507013 gpg: Good signature from "Security Onion Solutions, LLC " gpg: WARNING: This key is not certified with a trusted signature! gpg: There is no indication that the signature belongs to the owner. diff --git a/README.md b/README.md index 19a560419..530a21813 100644 --- a/README.md +++ b/README.md @@ -8,19 +8,22 @@ Alerts ![Alerts](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/50_alerts.png) Dashboards -![Dashboards](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/51_dashboards.png) +![Dashboards](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/53_dashboards.png) Hunt -![Hunt](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/52_hunt.png) +![Hunt](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/56_hunt.png) + +Detections +![Detections](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/57_detections.png) PCAP -![PCAP](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/53_pcap.png) +![PCAP](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/62_pcap.png) Grid -![Grid](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/57_grid.png) +![Grid](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/75_grid.png) Config -![Config](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/61_config.png) +![Config](https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion-docs/2.4/images/87_config.png) ### Release Notes diff --git a/VERSION b/VERSION index b3c5d8c27..d2587d896 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.4.70 +2.4.80 diff --git a/salt/common/soup_scripts.sls b/salt/common/soup_scripts.sls index 90ee059a4..898de67ee 100644 --- a/salt/common/soup_scripts.sls +++ b/salt/common/soup_scripts.sls @@ -1,3 +1,8 @@ +# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one +# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at +# https://securityonion.net/license; you may not use this file except in compliance with the +# Elastic License 2.0. + {% if '2.4' in salt['cp.get_file_str']('/etc/soversion') %} {% import_yaml '/opt/so/saltstack/local/pillar/global/soc_global.sls' as SOC_GLOBAL %} @@ -15,6 +20,8 @@ remove_common_so-firewall: file.absent: - name: /opt/so/saltstack/default/salt/common/tools/sbin/so-firewall +# This section is used to put the scripts in place in the Salt file system +# in case a state run tries to overwrite what we do in the next section. copy_so-common_common_tools_sbin: file.copy: - name: /opt/so/saltstack/default/salt/common/tools/sbin/so-common @@ -43,6 +50,15 @@ copy_so-firewall_manager_tools_sbin: - force: True - preserve: True +copy_so-yaml_manager_tools_sbin: + file.copy: + - name: /opt/so/saltstack/default/salt/manager/tools/sbin/so-yaml.py + - source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-yaml.py + - force: True + - preserve: True + +# This section is used to put the new script in place so that it can be called during soup. +# It is faster than calling the states that normally manage them to put them in place. copy_so-common_sbin: file.copy: - name: /usr/sbin/so-common diff --git a/salt/common/tools/sbin/so-common b/salt/common/tools/sbin/so-common index a71d67f81..8d4d9f8ab 100755 --- a/salt/common/tools/sbin/so-common +++ b/salt/common/tools/sbin/so-common @@ -179,6 +179,21 @@ copy_new_files() { cd /tmp } +create_local_directories() { + echo "Creating local pillar and salt directories if needed" + PILLARSALTDIR=$1 + local_salt_dir="/opt/so/saltstack/local" + for i in "pillar" "salt"; do + for d in $(find $PILLARSALTDIR/$i -type d); do + suffixdir=${d//$PILLARSALTDIR/} + if [ ! -d "$local_salt_dir/$suffixdir" ]; then + mkdir -pv $local_salt_dir$suffixdir + fi + done + chown -R socore:socore $local_salt_dir/$i + done +} + disable_fastestmirror() { sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf } diff --git a/salt/common/tools/sbin/so-log-check b/salt/common/tools/sbin/so-log-check index b83c98e7a..cf1691589 100755 --- a/salt/common/tools/sbin/so-log-check +++ b/salt/common/tools/sbin/so-log-check @@ -201,6 +201,10 @@ if [[ $EXCLUDE_KNOWN_ERRORS == 'Y' ]]; then EXCLUDED_ERRORS="$EXCLUDED_ERRORS|Unknown column" # Elastalert errors from running EQL queries EXCLUDED_ERRORS="$EXCLUDED_ERRORS|parsing_exception" # Elastalert EQL parsing issue. Temp. EXCLUDED_ERRORS="$EXCLUDED_ERRORS|context deadline exceeded" + EXCLUDED_ERRORS="$EXCLUDED_ERRORS|Error running query:" # Specific issues with detection rules + EXCLUDED_ERRORS="$EXCLUDED_ERRORS|detect-parse" # Suricata encountering a malformed rule + EXCLUDED_ERRORS="$EXCLUDED_ERRORS|integrity check failed" # Detections: Exclude false positive due to automated testing + EXCLUDED_ERRORS="$EXCLUDED_ERRORS|syncErrors" # Detections: Not an actual error fi RESULT=0 diff --git a/salt/common/tools/sbin/so-luks-tpm-regen b/salt/common/tools/sbin/so-luks-tpm-regen new file mode 100644 index 000000000..50058b504 --- /dev/null +++ b/salt/common/tools/sbin/so-luks-tpm-regen @@ -0,0 +1,98 @@ +#!/bin/bash +# +# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one +# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at +# https://securityonion.net/license; you may not use this file except in compliance with the +# Elastic License 2.0." + +set -e +# This script is intended to be used in the case the ISO install did not properly setup TPM decrypt for LUKS partitions at boot. +if [ -z $NOROOT ]; then + # Check for prerequisites + if [ "$(id -u)" -ne 0 ]; then + echo "This script must be run using sudo!" + exit 1 + fi +fi +ENROLL_TPM=N + +while [[ $# -gt 0 ]]; do + case $1 in + --enroll-tpm) + ENROLL_TPM=Y + ;; + *) + echo "Usage: $0 [options]" + echo "" + echo "where options are:" + echo " --enroll-tpm for when TPM enrollment was not selected during ISO install." + echo "" + exit 1 + ;; + esac + shift +done + +check_for_tpm() { + echo -n "Checking for TPM: " + if [ -d /sys/class/tpm/tpm0 ]; then + echo -e "tpm0 found." + TPM="yes" + # Check if TPM is using sha1 or sha256 + if [ -d /sys/class/tpm/tpm0/pcr-sha1 ]; then + echo -e "TPM is using sha1.\n" + TPM_PCR="sha1" + elif [ -d /sys/class/tpm/tpm0/pcr-sha256 ]; then + echo -e "TPM is using sha256.\n" + TPM_PCR="sha256" + fi + else + echo -e "No TPM found.\n" + exit 1 + fi +} + +check_for_luks_partitions() { + echo "Checking for LUKS partitions" + for part in $(lsblk -o NAME,FSTYPE -ln | grep crypto_LUKS | awk '{print $1}'); do + echo "Found LUKS partition: $part" + LUKS_PARTITIONS+=("$part") + done + if [ ${#LUKS_PARTITIONS[@]} -eq 0 ]; then + echo -e "No LUKS partitions found.\n" + exit 1 + fi + echo "" +} + +enroll_tpm_in_luks() { + read -s -p "Enter the LUKS passphrase used during ISO install: " LUKS_PASSPHRASE + echo "" + for part in "${LUKS_PARTITIONS[@]}"; do + echo "Enrolling TPM for LUKS device: /dev/$part" + if [ "$TPM_PCR" == "sha1" ]; then + clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha1","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE + elif [ "$TPM_PCR" == "sha256" ]; then + clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha256","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE + fi + done + } + +regenerate_tpm_enrollment_token() { + for part in "${LUKS_PARTITIONS[@]}"; do + clevis luks regen -d /dev/$part -s 1 -q + done +} + +check_for_tpm +check_for_luks_partitions + +if [[ $ENROLL_TPM == "Y" ]]; then + enroll_tpm_in_luks +else + regenerate_tpm_enrollment_token +fi + +echo "Running dracut" +dracut -fv +echo -e "\nTPM configuration complete. Reboot the system to verify the TPM is correctly decrypting the LUKS partition(s) at boot.\n" \ No newline at end of file diff --git a/salt/common/tools/sbin_jinja/so-import-pcap b/salt/common/tools/sbin_jinja/so-import-pcap index 30d5d4fc4..d3886305e 100755 --- a/salt/common/tools/sbin_jinja/so-import-pcap +++ b/salt/common/tools/sbin_jinja/so-import-pcap @@ -248,7 +248,7 @@ fi START_OLDEST_SLASH=$(echo $START_OLDEST | sed -e 's/-/%2F/g') END_NEWEST_SLASH=$(echo $END_NEWEST | sed -e 's/-/%2F/g') if [[ $VALID_PCAPS_COUNT -gt 0 ]] || [[ $SKIPPED_PCAPS_COUNT -gt 0 ]]; then - URL="https://{{ URLBASE }}/#/dashboards?q=$HASH_FILTERS%20%7C%20groupby%20-sankey%20event.dataset%20event.category%2a%20%7C%20groupby%20-pie%20event.category%20%7C%20groupby%20-bar%20event.module%20%7C%20groupby%20event.dataset%20%7C%20groupby%20event.module%20%7C%20groupby%20event.category%20%7C%20groupby%20observer.name%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC" + URL="https://{{ URLBASE }}/#/dashboards?q=$HASH_FILTERS%20%7C%20groupby%20event.module*%20%7C%20groupby%20-sankey%20event.module*%20event.dataset%20%7C%20groupby%20event.dataset%20%7C%20groupby%20source.ip%20%7C%20groupby%20destination.ip%20%7C%20groupby%20destination.port%20%7C%20groupby%20network.protocol%20%7C%20groupby%20rule.name%20rule.category%20event.severity_label%20%7C%20groupby%20dns.query.name%20%7C%20groupby%20file.mime_type%20%7C%20groupby%20http.virtual_host%20http.uri%20%7C%20groupby%20notice.note%20notice.message%20notice.sub_message%20%7C%20groupby%20ssl.server_name%20%7C%20groupby%20source_geo.organization_name%20source.geo.country_name%20%7C%20groupby%20destination_geo.organization_name%20destination.geo.country_name&t=${START_OLDEST_SLASH}%2000%3A00%3A00%20AM%20-%20${END_NEWEST_SLASH}%2000%3A00%3A00%20AM&z=UTC" status "Import complete!" status diff --git a/salt/elastalert/config.sls b/salt/elastalert/config.sls index 252aa83c0..25d5bf5f8 100644 --- a/salt/elastalert/config.sls +++ b/salt/elastalert/config.sls @@ -82,6 +82,36 @@ elastasomodulesync: - group: 933 - makedirs: True +elastacustomdir: + file.directory: + - name: /opt/so/conf/elastalert/custom + - user: 933 + - group: 933 + - makedirs: True + +elastacustomsync: + file.recurse: + - name: /opt/so/conf/elastalert/custom + - source: salt://elastalert/files/custom + - user: 933 + - group: 933 + - makedirs: True + - file_mode: 660 + - show_changes: False + +elastapredefinedsync: + file.recurse: + - name: /opt/so/conf/elastalert/predefined + - source: salt://elastalert/files/predefined + - user: 933 + - group: 933 + - makedirs: True + - template: jinja + - file_mode: 660 + - context: + elastalert: {{ ELASTALERTMERGED }} + - show_changes: False + elastaconf: file.managed: - name: /opt/so/conf/elastalert/elastalert_config.yaml diff --git a/salt/elastalert/defaults.yaml b/salt/elastalert/defaults.yaml index a01c80952..393932992 100644 --- a/salt/elastalert/defaults.yaml +++ b/salt/elastalert/defaults.yaml @@ -1,5 +1,6 @@ elastalert: enabled: False + alerter_parameters: "" config: rules_folder: /opt/elastalert/rules/ scan_subdirectories: true diff --git a/salt/elastalert/enabled.sls b/salt/elastalert/enabled.sls index e4b3642db..6a1ff1440 100644 --- a/salt/elastalert/enabled.sls +++ b/salt/elastalert/enabled.sls @@ -30,6 +30,8 @@ so-elastalert: - /opt/so/rules/elastalert:/opt/elastalert/rules/:ro - /opt/so/log/elastalert:/var/log/elastalert:rw - /opt/so/conf/elastalert/modules/:/opt/elastalert/modules/:ro + - /opt/so/conf/elastalert/predefined/:/opt/elastalert/predefined/:ro + - /opt/so/conf/elastalert/custom/:/opt/elastalert/custom/:ro - /opt/so/conf/elastalert/elastalert_config.yaml:/opt/elastalert/config.yaml:ro {% if DOCKER.containers['so-elastalert'].custom_bind_mounts %} {% for BIND in DOCKER.containers['so-elastalert'].custom_bind_mounts %} diff --git a/salt/elastalert/files/custom/placeholder b/salt/elastalert/files/custom/placeholder new file mode 100644 index 000000000..42e4ae4f0 --- /dev/null +++ b/salt/elastalert/files/custom/placeholder @@ -0,0 +1 @@ +THIS IS A PLACEHOLDER FILE \ No newline at end of file diff --git a/salt/elastalert/files/modules/so/securityonion-es.py b/salt/elastalert/files/modules/so/securityonion-es.py index 0a82bdce6..d9bb8009e 100644 --- a/salt/elastalert/files/modules/so/securityonion-es.py +++ b/salt/elastalert/files/modules/so/securityonion-es.py @@ -56,8 +56,8 @@ class SecurityOnionESAlerter(Alerter): "event_data": match, "@timestamp": timestamp } - url = f"https://{self.rule['es_host']}:{self.rule['es_port']}/logs-playbook.alerts-so/_doc/" + url = f"https://{self.rule['es_host']}:{self.rule['es_port']}/logs-detections.alerts-so/_doc/" requests.post(url, data=json.dumps(payload), headers=headers, verify=False, auth=creds) def get_info(self): - return {'type': 'SecurityOnionESAlerter'} \ No newline at end of file + return {'type': 'SecurityOnionESAlerter'} diff --git a/salt/elastalert/files/predefined/jira_auth.yaml b/salt/elastalert/files/predefined/jira_auth.yaml new file mode 100644 index 000000000..9bf0425c0 --- /dev/null +++ b/salt/elastalert/files/predefined/jira_auth.yaml @@ -0,0 +1,6 @@ +{% if elastalert.get('jira_user', '') | length > 0 and elastalert.get('jira_pass', '') | length > 0 %} +user: {{ elastalert.jira_user }} +password: {{ elastalert.jira_pass }} +{% else %} +apikey: {{ elastalert.get('jira_api_key', '') }} +{% endif %} \ No newline at end of file diff --git a/salt/elastalert/files/predefined/smtp_auth.yaml b/salt/elastalert/files/predefined/smtp_auth.yaml new file mode 100644 index 000000000..d09d101f1 --- /dev/null +++ b/salt/elastalert/files/predefined/smtp_auth.yaml @@ -0,0 +1,2 @@ +user: {{ elastalert.get('smtp_user', '') }} +password: {{ elastalert.get('smtp_pass', '') }} diff --git a/salt/elastalert/map.jinja b/salt/elastalert/map.jinja index cc395d8ee..b8ceca277 100644 --- a/salt/elastalert/map.jinja +++ b/salt/elastalert/map.jinja @@ -13,3 +13,19 @@ {% do ELASTALERTDEFAULTS.elastalert.config.update({'es_password': pillar.elasticsearch.auth.users.so_elastic_user.pass}) %} {% set ELASTALERTMERGED = salt['pillar.get']('elastalert', ELASTALERTDEFAULTS.elastalert, merge=True) %} + +{% if 'ntf' in salt['pillar.get']('features', []) %} + {% set params = ELASTALERTMERGED.get('alerter_parameters', '') | load_yaml %} + {% if params != None and params | length > 0 %} + {% do ELASTALERTMERGED.config.update(params) %} + {% endif %} + + {% if ELASTALERTMERGED.get('smtp_user', '') | length > 0 %} + {% do ELASTALERTMERGED.config.update({'smtp_auth_file': '/opt/elastalert/predefined/smtp_auth.yaml'}) %} + {% endif %} + + {% if ELASTALERTMERGED.get('jira_user', '') | length > 0 or ELASTALERTMERGED.get('jira_key', '') | length > 0 %} + {% do ELASTALERTMERGED.config.update({'jira_account_file': '/opt/elastalert/predefined/jira_auth.yaml'}) %} + {% endif %} + +{% endif %} diff --git a/salt/elastalert/soc_elastalert.yaml b/salt/elastalert/soc_elastalert.yaml index cde09b83e..435c5be6a 100644 --- a/salt/elastalert/soc_elastalert.yaml +++ b/salt/elastalert/soc_elastalert.yaml @@ -2,6 +2,99 @@ elastalert: enabled: description: You can enable or disable Elastalert. helpLink: elastalert.html + alerter_parameters: + title: Alerter Parameters + description: Optional configuration parameters for additional alerters that can be enabled for all Sigma rules. Filter for 'Alerter' in this Configuration screen to find the setting that allows these alerters to be enabled within the SOC ElastAlert module. Use YAML format for these parameters, and reference the ElastAlert 2 documentation, located at https://elastalert2.readthedocs.io, for available alerters and their required configuration parameters. A full update of the ElastAlert rule engine, via the Detections screen, is required in order to apply these changes. Requires a valid Security Onion license key. + global: True + multiline: True + syntax: yaml + helpLink: elastalert.html + forcedType: string + jira_api_key: + title: Jira API Key + description: Optional configuration parameter for Jira API Key, used instead of the Jira username and password. Requires a valid Security Onion license key. + global: True + sensitive: True + helpLink: elastalert.html + forcedType: string + jira_pass: + title: Jira Password + description: Optional configuration parameter for Jira password. Requires a valid Security Onion license key. + global: True + sensitive: True + helpLink: elastalert.html + forcedType: string + jira_user: + title: Jira Username + description: Optional configuration parameter for Jira username. Requires a valid Security Onion license key. + global: True + helpLink: elastalert.html + forcedType: string + smtp_pass: + title: SMTP Password + description: Optional configuration parameter for SMTP password, required for authenticating email servers. Requires a valid Security Onion license key. + global: True + sensitive: True + helpLink: elastalert.html + forcedType: string + smtp_user: + title: SMTP Username + description: Optional configuration parameter for SMTP username, required for authenticating email servers. Requires a valid Security Onion license key. + global: True + helpLink: elastalert.html + forcedType: string + files: + custom: + alertmanager_ca__crt: + description: Optional custom Certificate Authority for connecting to an AlertManager server. To utilize this custom file, the alertmanager_ca_certs key must be set to /opt/elastalert/custom/alertmanager_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + gelf_ca__crt: + description: Optional custom Certificate Authority for connecting to a Graylog server. To utilize this custom file, the graylog_ca_certs key must be set to /opt/elastalert/custom/graylog_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + http_post_ca__crt: + description: Optional custom Certificate Authority for connecting to a generic HTTP server, via the legacy HTTP POST alerter. To utilize this custom file, the http_post_ca_certs key must be set to /opt/elastalert/custom/http_post2_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + http_post2_ca__crt: + description: Optional custom Certificate Authority for connecting to a generic HTTP server, via the newer HTTP POST 2 alerter. To utilize this custom file, the http_post2_ca_certs key must be set to /opt/elastalert/custom/http_post2_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + ms_teams_ca__crt: + description: Optional custom Certificate Authority for connecting to Microsoft Teams server. To utilize this custom file, the ms_teams_ca_certs key must be set to /opt/elastalert/custom/ms_teams_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + pagerduty_ca__crt: + description: Optional custom Certificate Authority for connecting to PagerDuty server. To utilize this custom file, the pagerduty_ca_certs key must be set to /opt/elastalert/custom/pagerduty_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + rocket_chat_ca__crt: + description: Optional custom Certificate Authority for connecting to PagerDuty server. To utilize this custom file, the rocket_chart_ca_certs key must be set to /opt/elastalert/custom/rocket_chat_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + smtp__crt: + description: Optional custom certificate for connecting to an SMTP server. To utilize this custom file, the smtp_cert_file key must be set to /opt/elastalert/custom/smtp.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + smtp__key: + description: Optional custom certificate key for connecting to an SMTP server. To utilize this custom file, the smtp_key_file key must be set to /opt/elastalert/custom/smtp.key in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html + slack_ca__crt: + description: Optional custom Certificate Authority for connecting to Slack. To utilize this custom file, the slack_ca_certs key must be set to /opt/elastalert/custom/slack_ca.crt in the Alerter Parameters setting. Requires a valid Security Onion license key. + global: True + file: True + helpLink: elastalert.html config: disable_rules_on_error: description: Disable rules on failure. diff --git a/salt/elasticfleet/tools/sbin_jinja/so-elastic-agent-gen-installers b/salt/elasticfleet/tools/sbin_jinja/so-elastic-agent-gen-installers index ff46a3e07..1e4222cae 100755 --- a/salt/elasticfleet/tools/sbin_jinja/so-elastic-agent-gen-installers +++ b/salt/elasticfleet/tools/sbin_jinja/so-elastic-agent-gen-installers @@ -72,5 +72,5 @@ do printf "\n### $GOOS/$GOARCH Installer Generated...\n" done -printf "\n### Cleaning up temp files in /nsm/elastic-agent-workspace" +printf "\n### Cleaning up temp files in /nsm/elastic-agent-workspace\n" rm -rf /nsm/elastic-agent-workspace diff --git a/salt/elasticsearch/defaults.yaml b/salt/elasticsearch/defaults.yaml index 156483b03..e54d58c3b 100644 --- a/salt/elasticsearch/defaults.yaml +++ b/salt/elasticsearch/defaults.yaml @@ -3591,6 +3591,68 @@ elasticsearch: set_priority: priority: 50 min_age: 30d + so-logs-detections_x_alerts: + index_sorting: false + index_template: + composed_of: + - so-data-streams-mappings + - so-fleet_globals-1 + - so-fleet_agent_id_verification-1 + - so-logs-mappings + - so-logs-settings + data_stream: + allow_custom_routing: false + hidden: false + index_patterns: + - logs-detections.alerts-* + priority: 501 + template: + mappings: + _meta: + managed: true + managed_by: security_onion + package: + name: elastic_agent + settings: + index: + lifecycle: + name: so-logs-detections.alerts-so + mapping: + total_fields: + limit: 5001 + number_of_replicas: 0 + sort: + field: '@timestamp' + order: desc + policy: + _meta: + managed: true + managed_by: security_onion + package: + name: elastic_agent + phases: + cold: + actions: + set_priority: + priority: 0 + min_age: 60d + delete: + actions: + delete: {} + min_age: 365d + hot: + actions: + rollover: + max_age: 1d + max_primary_shard_size: 50gb + set_priority: + priority: 100 + min_age: 0ms + warm: + actions: + set_priority: + priority: 50 + min_age: 30d so-logs-elastic_agent: index_sorting: false index_template: diff --git a/salt/elasticsearch/files/ingest/strelka.file b/salt/elasticsearch/files/ingest/strelka.file index d55e19350..d9aed3b29 100644 --- a/salt/elasticsearch/files/ingest/strelka.file +++ b/salt/elasticsearch/files/ingest/strelka.file @@ -56,6 +56,7 @@ { "set": { "if": "ctx.exiftool?.Subsystem != null", "field": "host.subsystem", "value": "{{exiftool.Subsystem}}", "ignore_failure": true }}, { "set": { "if": "ctx.scan?.yara?.matches instanceof List", "field": "rule.name", "value": "{{scan.yara.matches.0}}" }}, { "set": { "if": "ctx.rule?.name != null", "field": "event.dataset", "value": "alert", "override": true }}, + { "set": { "if": "ctx.rule?.name != null", "field": "rule.uuid", "value": "{{rule.name}}", "override": true }}, { "rename": { "field": "file.flavors.mime", "target_field": "file.mime_type", "ignore_missing": true }}, { "set": { "if": "ctx.rule?.name != null && ctx.rule?.score == null", "field": "event.severity", "value": 3, "override": true } }, { "convert" : { "if": "ctx.rule?.score != null", "field" : "rule.score","type": "integer"}}, diff --git a/salt/elasticsearch/soc_elasticsearch.yaml b/salt/elasticsearch/soc_elasticsearch.yaml index cc92493fb..000fd60b7 100644 --- a/salt/elasticsearch/soc_elasticsearch.yaml +++ b/salt/elasticsearch/soc_elasticsearch.yaml @@ -394,6 +394,7 @@ elasticsearch: so-logs-darktrace_x_ai_analyst_alert: *indexSettings so-logs-darktrace_x_model_breach_alert: *indexSettings so-logs-darktrace_x_system_status_alert: *indexSettings + so-logs-detections_x_alerts: *indexSettings so-logs-f5_bigip_x_log: *indexSettings so-logs-fim_x_event: *indexSettings so-logs-fortinet_x_clientendpoint: *indexSettings diff --git a/salt/elasticsearch/template.map.jinja b/salt/elasticsearch/template.map.jinja index f5a124a9a..b59c291a4 100644 --- a/salt/elasticsearch/template.map.jinja +++ b/salt/elasticsearch/template.map.jinja @@ -2,11 +2,9 @@ {% set DEFAULT_GLOBAL_OVERRIDES = ELASTICSEARCHDEFAULTS.elasticsearch.index_settings.pop('global_overrides') %} {% set PILLAR_GLOBAL_OVERRIDES = {} %} -{% if salt['pillar.get']('elasticsearch:index_settings') is defined %} -{% set ES_INDEX_PILLAR = salt['pillar.get']('elasticsearch:index_settings') %} -{% if ES_INDEX_PILLAR.global_overrides is defined %} -{% set PILLAR_GLOBAL_OVERRIDES = ES_INDEX_PILLAR.pop('global_overrides') %} -{% endif %} +{% set ES_INDEX_PILLAR = salt['pillar.get']('elasticsearch:index_settings', {}) %} +{% if ES_INDEX_PILLAR.global_overrides is defined %} +{% set PILLAR_GLOBAL_OVERRIDES = ES_INDEX_PILLAR.pop('global_overrides') %} {% endif %} {% set ES_INDEX_SETTINGS_ORIG = ELASTICSEARCHDEFAULTS.elasticsearch.index_settings %} @@ -19,6 +17,12 @@ {% set ES_INDEX_SETTINGS = {} %} {% do ES_INDEX_SETTINGS_GLOBAL_OVERRIDES.update(salt['defaults.merge'](ES_INDEX_SETTINGS_GLOBAL_OVERRIDES, ES_INDEX_PILLAR, in_place=False)) %} {% for index, settings in ES_INDEX_SETTINGS_GLOBAL_OVERRIDES.items() %} +{# if policy isn't defined in the original index settings, then dont merge policy from the global_overrides #} +{# this will prevent so-elasticsearch-ilm-policy-load from trying to load policy on non ILM manged indices #} +{% if not ES_INDEX_SETTINGS_ORIG[index].policy is defined and ES_INDEX_SETTINGS_GLOBAL_OVERRIDES[index].policy is defined %} +{% do ES_INDEX_SETTINGS_GLOBAL_OVERRIDES[index].pop('policy') %} +{% endif %} + {% if settings.index_template is defined %} {% if not settings.get('index_sorting', False) | to_bool and settings.index_template.template.settings.index.sort is defined %} {% do settings.index_template.template.settings.index.pop('sort') %} diff --git a/salt/elasticsearch/tools/sbin/so-index-list b/salt/elasticsearch/tools/sbin/so-index-list index 1e4595b35..572e55cba 100755 --- a/salt/elasticsearch/tools/sbin/so-index-list +++ b/salt/elasticsearch/tools/sbin/so-index-list @@ -5,6 +5,6 @@ # https://securityonion.net/license; you may not use this file except in compliance with the # Elastic License 2.0. +. /usr/sbin/so-common - -curl -K /opt/so/conf/elasticsearch/curl.config-X GET -k -L "https://localhost:9200/_cat/indices?v&s=index" +curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L "https://localhost:9200/_cat/indices?pretty&v&s=index" diff --git a/salt/elasticsearch/tools/sbin_jinja/so-elasticsearch-templates-load b/salt/elasticsearch/tools/sbin_jinja/so-elasticsearch-templates-load index 8f45d6c36..080348522 100755 --- a/salt/elasticsearch/tools/sbin_jinja/so-elasticsearch-templates-load +++ b/salt/elasticsearch/tools/sbin_jinja/so-elasticsearch-templates-load @@ -133,7 +133,7 @@ if [ ! -f $STATE_FILE_SUCCESS ]; then for i in $pattern; do TEMPLATE=${i::-14} COMPONENT_PATTERN=${TEMPLATE:3} - MATCH=$(echo "$TEMPLATE" | grep -E "^so-logs-|^so-metrics" | grep -v osquery) + MATCH=$(echo "$TEMPLATE" | grep -E "^so-logs-|^so-metrics" | grep -vE "detections|osquery") if [[ -n "$MATCH" && ! "$COMPONENT_LIST" =~ "$COMPONENT_PATTERN" ]]; then load_failures=$((load_failures+1)) echo "Component template does not exist for $COMPONENT_PATTERN. The index template will not be loaded. Load failures: $load_failures" diff --git a/salt/idh/openssh/config.sls b/salt/idh/openssh/config.sls index d358bb5be..5e2acd8d2 100644 --- a/salt/idh/openssh/config.sls +++ b/salt/idh/openssh/config.sls @@ -11,6 +11,8 @@ idh_sshd_selinux: - sel_type: ssh_port_t - prereq: - file: openssh_config + - require: + - pkg: python_selinux_mgmt_tools {% endif %} openssh_config: diff --git a/salt/idh/openssh/init.sls b/salt/idh/openssh/init.sls index ba0a8ab04..79d082502 100644 --- a/salt/idh/openssh/init.sls +++ b/salt/idh/openssh/init.sls @@ -15,3 +15,9 @@ openssh: - enable: False - name: {{ openssh_map.service }} {% endif %} + +{% if grains.os_family == 'RedHat' %} +python_selinux_mgmt_tools: + pkg.installed: + - name: policycoreutils-python-utils +{% endif %} diff --git a/salt/idstools/soc_idstools.yaml b/salt/idstools/soc_idstools.yaml index 698a7a1fc..993abfd51 100644 --- a/salt/idstools/soc_idstools.yaml +++ b/salt/idstools/soc_idstools.yaml @@ -9,7 +9,7 @@ idstools: forcedType: string helpLink: rules.html ruleset: - description: 'Defines the ruleset you want to run. Options are ETOPEN or ETPRO. Once you have changed the ruleset here, you will need to wait for the rule update to take place (every 8 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Suricata --> Full Update. WARNING! Changing the ruleset will remove all existing Suricata rules of the previous ruleset and their associated overrides. This removal cannot be undone.' + description: 'Defines the ruleset you want to run. Options are ETOPEN or ETPRO. Once you have changed the ruleset here, you will need to wait for the rule update to take place (every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Suricata --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Suricata rules of the previous ruleset and their associated overrides. This removal cannot be undone.' global: True regex: ETPRO\b|ETOPEN\b helpLink: rules.html diff --git a/salt/manager/init.sls b/salt/manager/init.sls index d979482ef..ec37f9ff3 100644 --- a/salt/manager/init.sls +++ b/salt/manager/init.sls @@ -73,17 +73,6 @@ manager_sbin: - exclude_pat: - "*_test.py" -yara_update_scripts: - file.recurse: - - name: /usr/sbin/ - - source: salt://manager/tools/sbin_jinja/ - - user: socore - - group: socore - - file_mode: 755 - - template: jinja - - defaults: - EXCLUDEDRULES: {{ STRELKAMERGED.rules.excluded }} - so-repo-file: file.managed: - name: /opt/so/conf/reposync/repodownload.conf diff --git a/salt/manager/tools/sbin/so-minion b/salt/manager/tools/sbin/so-minion index b9b6ff4dd..5ef99b7a9 100755 --- a/salt/manager/tools/sbin/so-minion +++ b/salt/manager/tools/sbin/so-minion @@ -201,11 +201,7 @@ function add_idh_to_minion() { "idh:"\ " enabled: True"\ " restrict_management_ip: $IDH_MGTRESTRICT"\ - " services:" >> "$PILLARFILE" - IFS=',' read -ra IDH_SERVICES_ARRAY <<< "$IDH_SERVICES" - for service in ${IDH_SERVICES_ARRAY[@]}; do - echo " - $service" | tr '[:upper:]' '[:lower:]' | tr -d '"' >> "$PILLARFILE" - done + " " >> $PILLARFILE } function add_logstash_to_minion() { diff --git a/salt/manager/tools/sbin/soup b/salt/manager/tools/sbin/soup index 7239e6663..f643f6de7 100755 --- a/salt/manager/tools/sbin/soup +++ b/salt/manager/tools/sbin/soup @@ -438,7 +438,13 @@ post_to_2.4.60() { } post_to_2.4.70() { - echo "Nothing to apply" + printf "\nRemoving idh.services from any existing IDH node pillar files\n" + for file in /opt/so/saltstack/local/pillar/minions/*.sls; do + if [[ $file =~ "_idh.sls" && ! $file =~ "/opt/so/saltstack/local/pillar/minions/adv_" ]]; then + echo "Removing idh.services from: $file" + so-yaml.py remove "$file" idh.services + fi + done POSTVERSION=2.4.70 } @@ -583,7 +589,9 @@ up_to_2.4.60() { up_to_2.4.70() { playbook_migration + suricata_idstools_migration toggle_telemetry + add_detection_test_pillars # Kafka configuration changes @@ -603,6 +611,18 @@ up_to_2.4.70() { INSTALLEDVERSION=2.4.70 } +add_detection_test_pillars() { + if [[ -n "$SOUP_INTERNAL_TESTING" ]]; then + echo "Adding detection pillar values for automated testing" + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.elastalertengine.allowRegex SecurityOnion + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.elastalertengine.failAfterConsecutiveErrorCount 1 + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.strelkaengine.allowRegex "EquationGroup_Toolset_Apr17__ELV_.*" + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.strelkaengine.failAfterConsecutiveErrorCount 1 + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.suricataengine.allowRegex "(200033\\d|2100538|2102466)" + so-yaml.py add /opt/so/saltstack/local/pillar/soc/soc_soc.sls soc.config.server.modules.suricataengine.failAfterConsecutiveErrorCount 1 + fi +} + toggle_telemetry() { if [[ -z $UNATTENDED && $is_airgap -ne 0 ]]; then cat << ASSIST_EOF @@ -637,6 +657,38 @@ ASSIST_EOF fi } +suricata_idstools_migration() { + #Backup the pillars for idstools + mkdir -p /nsm/backup/detections-migration/idstools + rsync -av /opt/so/saltstack/local/pillar/idstools/* /nsm/backup/detections-migration/idstools + if [[ $? -eq 0 ]]; then + echo "IDStools configuration has been backed up." + else + fail "Error: rsync failed to copy the files. IDStools configuration has not been backed up." + fi + + #Backup Thresholds + mkdir -p /nsm/backup/detections-migration/suricata + rsync -av /opt/so/saltstack/local/salt/suricata/thresholding /nsm/backup/detections-migration/suricata + if [[ $? -eq 0 ]]; then + echo "Suricata thresholds have been backed up." + else + fail "Error: rsync failed to copy the files. Thresholds have not been backed up." + fi + + #Backup local rules + mkdir -p /nsm/backup/detections-migration/suricata/local-rules + rsync -av /opt/so/rules/nids/suri/local.rules /nsm/backup/detections-migration/suricata/local-rules + if [[ -f /opt/so/saltstack/local/salt/idstools/rules/local.rules ]]; then + rsync -av /opt/so/saltstack/local/salt/idstools/rules/local.rules /nsm/backup/detections-migration/suricata/local-rules/local.rules.bak + fi + + #Tell SOC to migrate + mkdir -p /opt/so/conf/soc/migrations + echo "0" > /opt/so/conf/soc/migrations/suricata-migration-2.4.70 + chown -R socore:socore /opt/so/conf/soc/migrations +} + playbook_migration() { # Start SOC Detections migration mkdir -p /nsm/backup/detections-migration/{suricata,sigma/rules,elastalert} @@ -648,22 +700,21 @@ playbook_migration() { if grep -A 1 'playbook:' /opt/so/saltstack/local/pillar/minions/* | grep -q 'enabled: True'; then # Check for active Elastalert rules - active_rules_count=$(find /opt/so/rules/elastalert/playbook/ -type f -name "*.yaml" | wc -l) + active_rules_count=$(find /opt/so/rules/elastalert/playbook/ -type f \( -name "*.yaml" -o -name "*.yml" \) | wc -l) if [[ "$active_rules_count" -gt 0 ]]; then - # Prompt the user to AGREE if active Elastalert rules found + # Prompt the user to press ENTER if active Elastalert rules found echo echo "$active_rules_count Active Elastalert/Playbook rules found." echo "In preparation for the new Detections module, they will be backed up and then disabled." echo - echo "If you would like to proceed, then type AGREE and press ENTER." + echo "Press ENTER to proceed." echo # Read user input - read INPUT - if [ "${INPUT^^}" != 'AGREE' ]; then fail "SOUP canceled."; fi + read -r echo "Backing up the Elastalert rules..." - rsync -av --stats /opt/so/rules/elastalert/playbook/*.yaml /nsm/backup/detections-migration/elastalert/ + rsync -av --ignore-missing-args --stats /opt/so/rules/elastalert/playbook/*.{yaml,yml} /nsm/backup/detections-migration/elastalert/ # Verify that rsync completed successfully if [[ $? -eq 0 ]]; then @@ -1029,6 +1080,7 @@ main() { backup_old_states_pillars fi copy_new_files + create_local_directories "/opt/so/saltstack/default" apply_hotfix echo "Hotfix applied" update_version @@ -1095,6 +1147,7 @@ main() { echo "Copying new Security Onion code from $UPDATE_DIR to $DEFAULT_SALT_DIR." copy_new_files echo "" + create_local_directories "/opt/so/saltstack/default" update_version echo "" diff --git a/salt/manager/tools/sbin_jinja/so-yara-download b/salt/manager/tools/sbin_jinja/so-yara-download deleted file mode 100644 index aa9576253..000000000 --- a/salt/manager/tools/sbin_jinja/so-yara-download +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -NOROOT=1 -. /usr/sbin/so-common - -{%- set proxy = salt['pillar.get']('manager:proxy') %} -{%- set noproxy = salt['pillar.get']('manager:no_proxy', '') %} - -# Download the rules from the internet -{%- if proxy %} -export http_proxy={{ proxy }} -export https_proxy={{ proxy }} -export no_proxy="{{ noproxy }}" -{%- endif %} - -repos="/opt/so/conf/strelka/repos.txt" -output_dir=/nsm/rules/yara -gh_status=$(curl -s -o /dev/null -w "%{http_code}" https://github.com) -clone_dir="/tmp" -if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then - - while IFS= read -r repo; do - if ! $(echo "$repo" | grep -qE '^#'); then - # Remove old repo if existing bc of previous error condition or unexpected disruption - repo_name=`echo $repo | awk -F '/' '{print $NF}'` - [ -d $output_dir/$repo_name ] && rm -rf $output_dir/$repo_name - - # Clone repo and make appropriate directories for rules - git clone $repo $clone_dir/$repo_name - echo "Analyzing rules from $clone_dir/$repo_name..." - mkdir -p $output_dir/$repo_name - # Ensure a copy of the license is available for the rules - [ -f $clone_dir/$repo_name/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name - - # Copy over rules - for i in $(find $clone_dir/$repo_name -name "*.yar*"); do - rule_name=$(echo $i | awk -F '/' '{print $NF}') - cp $i $output_dir/$repo_name - done - rm -rf $clone_dir/$repo_name - fi - done < $repos - - echo "Done!" - -/usr/sbin/so-yara-update - -else - echo "Server returned $gh_status status code." - echo "No connectivity to Github...exiting..." - exit 1 -fi diff --git a/salt/manager/tools/sbin_jinja/so-yara-update b/salt/manager/tools/sbin_jinja/so-yara-update deleted file mode 100644 index 07c940f47..000000000 --- a/salt/manager/tools/sbin_jinja/so-yara-update +++ /dev/null @@ -1,41 +0,0 @@ -#!/bin/bash -# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one -# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at -# https://securityonion.net/license; you may not use this file except in compliance with the -# Elastic License 2.0. - -NOROOT=1 -. /usr/sbin/so-common - -echo "Starting to check for yara rule updates at $(date)..." - -newcounter=0 -excludedcounter=0 -excluded_rules=({{ EXCLUDEDRULES | join(' ') }}) - -# Pull down the SO Rules -SORULEDIR=/nsm/rules/yara -OUTPUTDIR=/opt/so/saltstack/local/salt/strelka/rules - -mkdir -p $OUTPUTDIR -# remove all rules prior to copy so we can clear out old rules -rm -f $OUTPUTDIR/* - -for i in $(find $SORULEDIR -name "*.yar" -o -name "*.yara"); do - rule_name=$(echo $i | awk -F '/' '{print $NF}') - if [[ ! "${excluded_rules[*]}" =~ ${rule_name} ]]; then - echo "Adding rule: $rule_name..." - cp $i $OUTPUTDIR/$rule_name - ((newcounter++)) - else - echo "Excluding rule: $rule_name..." - ((excludedcounter++)) - fi -done - -if [ "$newcounter" -gt 0 ] || [ "$excludedcounter" -gt 0 ];then - echo "$newcounter rules added." - echo "$excludedcounter rule(s) excluded." -fi - -echo "Finished rule updates at $(date)..." diff --git a/salt/soc/config.sls b/salt/soc/config.sls index b440b07fc..8d1f0f694 100644 --- a/salt/soc/config.sls +++ b/salt/soc/config.sls @@ -80,9 +80,17 @@ socmotd: - mode: 600 - template: jinja +filedetectionsbackup: + file.managed: + - name: /opt/so/conf/soc/so-detections-backup.py + - source: salt://soc/files/soc/so-detections-backup.py + - user: 939 + - group: 939 + - mode: 600 + crondetectionsruntime: cron.present: - - name: /usr/local/bin/so-detections-runtime-status cron + - name: /usr/sbin/so-detections-runtime-status cron - identifier: detections-runtime-status - user: root - minute: '*/10' @@ -91,6 +99,17 @@ crondetectionsruntime: - month: '*' - dayweek: '*' +crondetectionsbackup: + cron.present: + - name: python3 /opt/so/conf/soc/so-detections-backup.py &>> /opt/so/log/soc/detections-backup.log + - identifier: detections-backup + - user: root + - minute: '0' + - hour: '0' + - daymonth: '*' + - month: '*' + - dayweek: '*' + socsigmafinalpipeline: file.managed: - name: /opt/so/conf/soc/sigma_final_pipeline.yaml diff --git a/salt/soc/defaults.yaml b/salt/soc/defaults.yaml index 04a66dc94..9f5faf50b 100644 --- a/salt/soc/defaults.yaml +++ b/salt/soc/defaults.yaml @@ -78,6 +78,12 @@ soc: target: '' links: - '/#/hunt?q=(process.entity_id:"{:process.entity_id}" OR process.entity_id:"{:process.Ext.ancestry|processAncestors}") | groupby event.dataset | groupby -sankey event.dataset event.action | groupby event.action | groupby process.parent.name | groupby -sankey process.parent.name process.name | groupby process.name | groupby process.command_line | groupby host.name user.name | groupby source.ip source.port destination.ip destination.port | groupby dns.question.name | groupby dns.answers.data | groupby file.path | groupby registry.path | groupby dll.path' + - name: actionRelatedAlerts + description: actionRelatedAlertsHelp + icon: fa-bell + links: + - '/#/alerts?q=rule.uuid: {:so_detection.publicId|escape} | groupby rule.name event.module* event.severity_label' + target: '' eventFields: default: - soc_timestamp @@ -1252,6 +1258,28 @@ soc: - event_data.destination.port - event_data.process.executable - event_data.process.pid + ':netflow:': + - soc_timestamp + - event.dataset + - source.ip + - source.port + - destination.ip + - destination.port + - network.type + - network.transport + - network.direction + - netflow.type + - netflow.exporter.version + - observer.ip + ':soc:': + - soc_timestamp + - event.dataset + - source.ip + - soc.fields.requestMethod + - soc.fields.requestPath + - soc.fields.statusCode + - event.action + - soc.fields.error server: bindAddress: 0.0.0.0:9822 baseUrl: / @@ -1278,7 +1306,7 @@ soc: so-import: - securityonion-resources+critical - securityonion-resources+high - communityRulesImportFrequencySeconds: 28800 + communityRulesImportFrequencySeconds: 86400 communityRulesImportErrorSeconds: 300 failAfterConsecutiveErrorCount: 10 denyRegex: '' @@ -1286,6 +1314,7 @@ soc: reposFolder: /opt/sensoroni/sigma/repos rulesFingerprintFile: /opt/sensoroni/fingerprints/sigma.fingerprint stateFilePath: /opt/sensoroni/fingerprints/elastalertengine.state + integrityCheckFrequencySeconds: 600 rulesRepos: default: - repo: https://github.com/Security-Onion-Solutions/securityonion-resources @@ -1347,7 +1376,7 @@ soc: autoEnabledYaraRules: - securityonion-yara autoUpdateEnabled: true - communityRulesImportFrequencySeconds: 28800 + communityRulesImportFrequencySeconds: 86400 communityRulesImportErrorSeconds: 300 failAfterConsecutiveErrorCount: 10 compileYaraPythonScriptPath: /opt/sensoroni/yara/compile_yara.py @@ -1364,16 +1393,18 @@ soc: community: true yaraRulesFolder: /opt/sensoroni/yara/rules stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state + integrityCheckFrequencySeconds: 600 suricataengine: allowRegex: '' autoUpdateEnabled: true - communityRulesImportFrequencySeconds: 28800 + communityRulesImportFrequencySeconds: 86400 communityRulesImportErrorSeconds: 300 failAfterConsecutiveErrorCount: 10 communityRulesFile: /nsm/rules/suricata/emerging-all.rules denyRegex: '' rulesFingerprintFile: /opt/sensoroni/fingerprints/emerging-all.fingerprint stateFilePath: /opt/sensoroni/fingerprints/suricataengine.state + integrityCheckFrequencySeconds: 600 client: enableReverseLookup: false docsUrl: /docs/ @@ -1460,7 +1491,7 @@ soc: showSubtitle: true - name: Elastalerts description: '' - query: '_type:elastalert | groupby rule.name' + query: 'event.dataset:sigma.alert | groupby rule.name' showSubtitle: true - name: Alerts description: Show all alerts grouped by alert source @@ -1795,7 +1826,7 @@ soc: query: 'tags:dhcp | groupby host.hostname | groupby -sankey host.hostname client.address | groupby client.address | groupby -sankey client.address server.address | groupby server.address | groupby dhcp.message_types | groupby host.domain' - name: DNS description: DNS (Domain Name System) queries - query: 'tags:dns | groupby dns.query.name | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby dns.highest_registered_domain | groupby dns.parent_domain | groupby dns.response.code_name | groupby dns.answers.name | groupby dns.query.type_name | groupby dns.response.code_name | groupby destination_geo.organization_name' + query: 'tags:dns | groupby dns.query.name | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby dns.highest_registered_domain | groupby dns.parent_domain | groupby dns.query.type_name | groupby dns.response.code_name | groupby dns.answers.name | groupby destination_geo.organization_name' - name: DPD description: DPD (Dynamic Protocol Detection) errors query: 'tags:dpd | groupby error.reason | groupby -sankey error.reason source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby network.protocol | groupby destination_geo.organization_name' @@ -1925,6 +1956,9 @@ soc: - name: ICS S7 description: S7 (Siemens) network metadata query: 'tags:s7* | groupby event.dataset | groupby -sankey event.dataset source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port' + - name: NetFlow + description: NetFlow records + query: 'event.module:netflow | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby network.type | groupby network.transport | groupby network.direction | groupby netflow.type | groupby netflow.exporter.version | groupby observer.ip | groupby source.as.organization.name | groupby source.geo.country_name | groupby destination.as.organization.name | groupby destination.geo.country_name' - name: Firewall description: Firewall logs query: 'observer.type:firewall | groupby event.action | groupby -sankey event.action observer.ingress.interface.name | groupby observer.ingress.interface.name | groupby network.type | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port' @@ -2031,17 +2065,17 @@ soc: - acknowledged queries: - name: 'Group By Name, Module' - query: '* | groupby rule.name event.module* event.severity_label' + query: '* | groupby rule.name event.module* event.severity_label rule.uuid' - name: 'Group By Sensor, Source IP/Port, Destination IP/Port, Name' - query: '* | groupby observer.name source.ip source.port destination.ip destination.port rule.name network.community_id event.severity_label' + query: '* | groupby observer.name source.ip source.port destination.ip destination.port rule.name network.community_id event.severity_label rule.uuid' - name: 'Group By Source IP, Name' - query: '* | groupby source.ip rule.name event.severity_label' + query: '* | groupby source.ip rule.name event.severity_label rule.uuid' - name: 'Group By Source Port, Name' - query: '* | groupby source.port rule.name event.severity_label' + query: '* | groupby source.port rule.name event.severity_label rule.uuid' - name: 'Group By Destination IP, Name' - query: '* | groupby destination.ip rule.name event.severity_label' + query: '* | groupby destination.ip rule.name event.severity_label rule.uuid' - name: 'Group By Destination Port, Name' - query: '* | groupby destination.port rule.name event.severity_label' + query: '* | groupby destination.port rule.name event.severity_label rule.uuid' - name: Ungroup query: '*' grid: @@ -2160,9 +2194,9 @@ soc: manualSync: customEnabled: false labels: - - Suricata - - Strelka - ElastAlert + - Strelka + - Suricata eventFields: default: - so_detection.title diff --git a/salt/soc/enabled.sls b/salt/soc/enabled.sls index 6cea0c70d..4d4b5f6fd 100644 --- a/salt/soc/enabled.sls +++ b/salt/soc/enabled.sls @@ -44,6 +44,9 @@ so-soc: - /opt/so/conf/soc/soc_users_roles:/opt/sensoroni/rbac/users_roles:rw - /opt/so/conf/soc/queue:/opt/sensoroni/queue:rw - /opt/so/saltstack:/opt/so/saltstack:rw + - /opt/so/conf/soc/migrations:/opt/so/conf/soc/migrations:rw + - /nsm/backup/detections-migration:/nsm/backup/detections-migration:ro + - /opt/so/state:/opt/so/state:rw - extra_hosts: {% for node in DOCKER_EXTRA_HOSTS %} {% for hostname, ip in node.items() %} diff --git a/salt/soc/files/soc/sigma_so_pipeline.yaml b/salt/soc/files/soc/sigma_so_pipeline.yaml index 312d07965..8314361f5 100644 --- a/salt/soc/files/soc/sigma_so_pipeline.yaml +++ b/salt/soc/files/soc/sigma_so_pipeline.yaml @@ -17,6 +17,16 @@ transformations: dst_ip: destination.ip.keyword dst_port: destination.port winlog.event_data.User: user.name + logtype: event.code # OpenCanary + # Maps "opencanary" product to SO IDH logs + - id: opencanary_idh_add-fields + type: add_condition + conditions: + event.module: 'opencanary' + event.dataset: 'opencanary.idh' + rule_conditions: + - type: logsource + product: opencanary # Maps "antivirus" category to Windows Defender logs shipped by Elastic Agent Winlog Integration # winlog.event_data.threat_name has to be renamed prior to ingestion, it is originally winlog.event_data.Threat Name - id: antivirus_field-mappings_windows-defender @@ -88,3 +98,11 @@ transformations: - type: logsource product: linux service: auth + # event.code should always be a string + - id: convert_event_code_to_string + type: convert_type + target_type: 'str' + field_name_conditions: + - type: include_fields + fields: + - event.code diff --git a/salt/soc/files/soc/so-detections-backup.py b/salt/soc/files/soc/so-detections-backup.py new file mode 100644 index 000000000..085b1e4c7 --- /dev/null +++ b/salt/soc/files/soc/so-detections-backup.py @@ -0,0 +1,113 @@ +# Copyright 2020-2023 Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one +# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at +# https://securityonion.net/license; you may not use this file except in compliance with the +# Elastic License 2.0. + +# This script queries Elasticsearch for Custom Detections and all Overrides, +# and git commits them to disk at $OUTPUT_DIR + +import os +import subprocess +import json +import requests +from requests.auth import HTTPBasicAuth +import urllib3 +from datetime import datetime + +# Suppress SSL warnings +urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) + +# Constants +ES_URL = "https://localhost:9200/so-detection/_search" +QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}' +QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}' +OUTPUT_DIR = "/nsm/backup/detections/repo" +AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config" + +def get_auth_credentials(auth_file): + with open(auth_file, 'r') as file: + for line in file: + if line.startswith('user ='): + return line.split('=', 1)[1].strip().replace('"', '') + +def query_elasticsearch(query, auth): + headers = {"Content-Type": "application/json"} + response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False) + response.raise_for_status() + return response.json() + +def save_content(hit, base_folder, subfolder="", extension="txt"): + so_detection = hit["_source"]["so_detection"] + public_id = so_detection["publicId"] + content = so_detection["content"] + file_dir = os.path.join(base_folder, subfolder) + os.makedirs(file_dir, exist_ok=True) + file_path = os.path.join(file_dir, f"{public_id}.{extension}") + with open(file_path, "w") as f: + f.write(content) + return file_path + +def save_overrides(hit): + so_detection = hit["_source"]["so_detection"] + public_id = so_detection["publicId"] + overrides = so_detection["overrides"] + language = so_detection["language"] + folder = os.path.join(OUTPUT_DIR, language, "overrides") + os.makedirs(folder, exist_ok=True) + extension = "yaml" if language == "sigma" else "txt" + file_path = os.path.join(folder, f"{public_id}.{extension}") + with open(file_path, "w") as f: + f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides) + return file_path + +def ensure_git_repo(): + if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')): + subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True) + subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True) + subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True) + +def commit_changes(): + ensure_git_repo() + subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True) + subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True) + subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True) + status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True) + print(status_result.stdout) + commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True) + if commit_result.returncode == 1: + print("No changes to commit.") + elif commit_result.returncode == 0: + print("Changes committed successfully.") + else: + commit_result.check_returncode() + +def main(): + try: + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n") + + os.makedirs(OUTPUT_DIR, exist_ok=True) + + auth_credentials = get_auth_credentials(AUTH_FILE) + username, password = auth_credentials.split(':', 1) + auth = HTTPBasicAuth(username, password) + + # Query and save custom detections + detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"] + for hit in detections: + save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt") + + # Query and save overrides + overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"] + for hit in overrides: + save_overrides(hit) + + commit_changes() + + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + print(f"Backup Completed - {timestamp}") + except Exception as e: + print(f"An error occurred: {e}") + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/salt/soc/files/soc/so-detections-backup_test.py b/salt/soc/files/soc/so-detections-backup_test.py new file mode 100644 index 000000000..3afa11886 --- /dev/null +++ b/salt/soc/files/soc/so-detections-backup_test.py @@ -0,0 +1,159 @@ +# Copyright 2020-2023 Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one +# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at +# https://securityonion.net/license; you may not use this file except in compliance with the +# Elastic License 2.0. + +import unittest +from unittest.mock import patch, MagicMock, mock_open, call +import requests +import os +import subprocess +import json +from datetime import datetime +import importlib + +ds = importlib.import_module('so-detections-backup') + +class TestBackupScript(unittest.TestCase): + + def setUp(self): + self.output_dir = '/nsm/backup/detections/repo' + self.auth_file_path = '/nsm/backup/detections/repo' + self.mock_auth_data = 'user = "so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS"' + self.auth_credentials = 'so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS' + self.auth = requests.auth.HTTPBasicAuth('so_elastic', '@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS') + self.mock_detection_hit = { + "_source": { + "so_detection": { + "publicId": "test_id", + "content": "test_content", + "language": "suricata" + } + } + } + self.mock_override_hit = { + "_source": { + "so_detection": { + "publicId": "test_id", + "overrides": [{"key": "value"}], + "language": "sigma" + } + } + } + + def assert_file_written(self, mock_file, expected_path, expected_content): + mock_file.assert_called_once_with(expected_path, 'w') + mock_file().write.assert_called_once_with(expected_content) + + @patch('builtins.open', new_callable=mock_open, read_data='user = "so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS"') + def test_get_auth_credentials(self, mock_file): + credentials = ds.get_auth_credentials(self.auth_file_path) + self.assertEqual(credentials, self.auth_credentials) + mock_file.assert_called_once_with(self.auth_file_path, 'r') + + @patch('requests.get') + def test_query_elasticsearch(self, mock_get): + mock_response = MagicMock() + mock_response.json.return_value = {'hits': {'hits': []}} + mock_response.raise_for_status = MagicMock() + mock_get.return_value = mock_response + + response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth) + + self.assertEqual(response, {'hits': {'hits': []}}) + mock_get.assert_called_once_with( + ds.ES_URL, + headers={"Content-Type": "application/json"}, + data=ds.QUERY_DETECTIONS, + auth=self.auth, + verify=False + ) + + @patch('os.makedirs') + @patch('builtins.open', new_callable=mock_open) + def test_save_content(self, mock_file, mock_makedirs): + file_path = ds.save_content(self.mock_detection_hit, self.output_dir, 'subfolder', 'txt') + expected_path = f'{self.output_dir}/subfolder/test_id.txt' + self.assertEqual(file_path, expected_path) + mock_makedirs.assert_called_once_with(f'{self.output_dir}/subfolder', exist_ok=True) + self.assert_file_written(mock_file, expected_path, 'test_content') + + @patch('os.makedirs') + @patch('builtins.open', new_callable=mock_open) + def test_save_overrides(self, mock_file, mock_makedirs): + file_path = ds.save_overrides(self.mock_override_hit) + expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml' + self.assertEqual(file_path, expected_path) + mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True) + self.assert_file_written(mock_file, expected_path, json.dumps({"key": "value"})) + + @patch('subprocess.run') + def test_ensure_git_repo(self, mock_run): + mock_run.return_value = MagicMock(returncode=0) + + ds.ensure_git_repo() + + mock_run.assert_has_calls([ + call(["git", "config", "--global", "init.defaultBranch", "main"], check=True), + call(["git", "-C", self.output_dir, "init"], check=True), + call(["git", "-C", self.output_dir, "remote", "add", "origin", "default"], check=True) + ]) + + @patch('subprocess.run') + def test_commit_changes(self, mock_run): + mock_status_result = MagicMock() + mock_status_result.stdout = "On branch main\nnothing to commit, working tree clean" + mock_commit_result = MagicMock(returncode=1) + # Ensure sufficient number of MagicMock instances for each subprocess.run call + mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)] + + print("Running test_commit_changes...") + ds.commit_changes() + print("Finished test_commit_changes.") + + mock_run.assert_has_calls([ + call(["git", "-C", self.output_dir, "config", "user.email", "securityonion@local.invalid"], check=True), + call(["git", "-C", self.output_dir, "config", "user.name", "securityonion"], check=True), + call(["git", "-C", self.output_dir, "add", "."], check=True), + call(["git", "-C", self.output_dir, "status"], capture_output=True, text=True), + call(["git", "-C", self.output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True) + ]) + + @patch('builtins.print') + @patch('so-detections-backup.commit_changes') + @patch('so-detections-backup.save_overrides') + @patch('so-detections-backup.save_content') + @patch('so-detections-backup.query_elasticsearch') + @patch('so-detections-backup.get_auth_credentials') + @patch('os.makedirs') + def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print): + mock_get_auth.return_value = self.auth_credentials + mock_query.side_effect = [ + {'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}}, + {'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}} + ] + + with patch('datetime.datetime') as mock_datetime: + mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44" + ds.main() + + mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True) + mock_get_auth.assert_called_once_with(ds.AUTH_FILE) + mock_query.assert_has_calls([ + call(ds.QUERY_DETECTIONS, self.auth), + call(ds.QUERY_OVERRIDES, self.auth) + ]) + mock_save_content.assert_called_once_with( + {"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}, + self.output_dir, + "sigma", + "yaml" + ) + mock_save_overrides.assert_called_once_with( + {"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}} + ) + mock_commit.assert_called_once() + mock_print.assert_called() + +if __name__ == '__main__': + unittest.main(verbosity=2) diff --git a/salt/soc/merged.map.jinja b/salt/soc/merged.map.jinja index f23d9c115..f2c88fde9 100644 --- a/salt/soc/merged.map.jinja +++ b/salt/soc/merged.map.jinja @@ -90,6 +90,7 @@ {% do SOCMERGED.config.server.client.update({'job': {'actions': standard_actions}}) %} {% do SOCMERGED.config.server.client.alerts.update({'actions': standard_actions}) %} {% do SOCMERGED.config.server.client.cases.update({'actions': standard_actions}) %} +{% do SOCMERGED.config.server.client.detections.update({'actions': standard_actions}) %} {# replace the _x_ with . for soc ui to config conversion #} {% do SOCMERGED.config.eventFields.update({':endpoint:events.api': SOCMERGED.config.eventFields.pop(':endpoint:events_x_api') }) %} diff --git a/salt/soc/soc_soc.yaml b/salt/soc/soc_soc.yaml index c908521fa..1f64eb0bc 100644 --- a/salt/soc/soc_soc.yaml +++ b/salt/soc/soc_soc.yaml @@ -39,7 +39,7 @@ soc: helpLink: soc-customization.html sigma_final_pipeline__yaml: title: Final Sigma Pipeline - description: Final Processing Pipeline for Sigma Rules (future use, not yet complete) + description: Final Processing Pipeline for Sigma Rules. syntax: yaml file: True global: True @@ -83,6 +83,13 @@ soc: advanced: True modules: elastalertengine: + additionalAlerters: + title: Additional Alerters + description: Specify additional alerters to enable for all Sigma rules, one alerter name per line. Alerters refers to ElastAlert 2 alerters, as documented at https://elastalert2.readthedocs.io. Note that the configuration parameters for these alerters must be provided in the ElastAlert configuration section. Filter for 'Alerter' to find this related setting. A full update of the ElastAlert rule engine, via the Detections screen, is required in order to apply these changes. Requires a valid Security Onion license key. + global: True + helpLink: sigma.html + forcedType: "[]string" + multiline: True allowRegex: description: 'Regex used to filter imported Sigma rules. Deny regex takes precedence over the Allow regex setting.' global: True @@ -106,16 +113,20 @@ soc: global: True advanced: True helpLink: sigma.html + integrityCheckFrequencySeconds: + description: 'How often the ElastAlert integrity checker runs (in seconds). This verifies the integrity of deployed rules.' + global: True + advanced: True rulesRepos: default: &eerulesRepos - description: "Custom Git repos to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled." + description: "Custom Git repositories to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled. The new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update." global: True advanced: True forcedType: "[]{}" helpLink: sigma.html airgap: *eerulesRepos sigmaRulePackages: - description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, you will need to wait for the rule update to take place (every 8 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.' + description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, the new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.' global: True advanced: False helpLink: sigma.html @@ -189,7 +200,7 @@ soc: global: True advanced: True helpLink: yara.html - autoEnabledYARARules: + autoEnabledYaraRules: description: 'YARA rules to automatically enable on initial import. Format is $Ruleset - for example, for the default shipped ruleset: securityonion-yara' global: True advanced: True @@ -204,9 +215,13 @@ soc: global: True advanced: True helpLink: yara.html + integrityCheckFrequencySeconds: + description: 'How often the Strelka integrity checker runs (in seconds). This verifies the integrity of deployed rules.' + global: True + advanced: True rulesRepos: default: &serulesRepos - description: "Custom Git repos to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled." + description: "Custom Git repositories to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled. The new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Strelka --> Full Update." global: True advanced: True forcedType: "[]{}" @@ -228,6 +243,10 @@ soc: global: True advanced: True helpLink: suricata.html + integrityCheckFrequencySeconds: + description: 'How often the Suricata integrity checker runs (in seconds). This verifies the integrity of deployed rules.' + global: True + advanced: True client: enableReverseLookup: description: Set to true to enable reverse DNS lookups for IP addresses in the SOC UI. @@ -255,7 +274,7 @@ soc: description: Set to true to enable case management in SOC. global: True detectionsEnabled: - description: Set to true to enable the Detections module in SOC. (future use, not yet complete) + description: Set to true to enable the Detections module in SOC. global: True inactiveTools: description: List of external tools to remove from the SOC UI. diff --git a/salt/stig/soc_stig.yaml b/salt/stig/soc_stig.yaml index 1fb030c31..597aab809 100644 --- a/salt/stig/soc_stig.yaml +++ b/salt/stig/soc_stig.yaml @@ -1,6 +1,6 @@ stig: enabled: - description: You can enable or disable the application of STIGS using oscap. Note that the actions performed by OSCAP are not automatically reversible. + description: You can enable or disable the application of STIGS using oscap. Note that the actions performed by OSCAP are not automatically reversible. Requires a valid Security Onion license key. forcedType: bool advanced: True run_interval: diff --git a/salt/strelka/backend/enabled.sls b/salt/strelka/backend/enabled.sls index 863115eda..a26905e1f 100644 --- a/salt/strelka/backend/enabled.sls +++ b/salt/strelka/backend/enabled.sls @@ -43,7 +43,7 @@ strelka_backend: {% endif %} - restart_policy: on-failure - watch: - - file: /opt/so/conf/strelka/rules/compiled/* + - file: strelkasensorcompiledrules delete_so-strelka-backend_so-status.disabled: file.uncomment: diff --git a/salt/strelka/compile_yara/compile_yara.py b/salt/strelka/compile_yara/compile_yara.py index dc77980d2..09e3f4680 100644 --- a/salt/strelka/compile_yara/compile_yara.py +++ b/salt/strelka/compile_yara/compile_yara.py @@ -3,10 +3,13 @@ # https://securityonion.net/license; you may not use this file except in compliance with the # Elastic License 2.0. +import argparse +import glob +import hashlib +import json import os import yara -import glob -import json +from datetime import datetime from concurrent.futures import ThreadPoolExecutor def check_syntax(rule_file): @@ -20,24 +23,29 @@ def check_syntax(rule_file): def compile_yara_rules(rules_dir): compiled_dir = os.path.join(rules_dir, "compiled") - compiled_rules_path = [ os.path.join(compiled_dir, "rules.compiled"), "/opt/so/saltstack/default/salt/strelka/rules/compiled/rules.compiled" ] + compiled_rules_path = "/opt/so/saltstack/local/salt/strelka/rules/compiled/rules.compiled" rule_files = glob.glob(os.path.join(rules_dir, '**/*.yar'), recursive=True) files_to_compile = {} removed_count = 0 success_count = 0 - + # Use ThreadPoolExecutor to parallelize syntax checks with ThreadPoolExecutor() as executor: results = executor.map(check_syntax, rule_files) - + # Collect yara files and prepare for batch compilation + ts = str(datetime.utcnow().isoformat()) + failure_ids = [] + success_ids = [] for success, rule_file, error_message in results: + rule_id = os.path.splitext(os.path.basename(rule_file))[0] if success: files_to_compile[os.path.basename(rule_file)] = rule_file success_count += 1 + success_ids.append(rule_id) else: + failure_ids.append(rule_id) # Extract just the UUID from the rule file name - rule_id = os.path.splitext(os.path.basename(rule_file))[0] log_entry = { "event_module": "soc", "event_dataset": "soc.detections", @@ -55,13 +63,37 @@ def compile_yara_rules(rules_dir): removed_count += 1 # Compile all remaining valid rules into a single file + compiled_sha256="" if files_to_compile: compiled_rules = yara.compile(filepaths=files_to_compile) - for path in compiled_rules_path: - compiled_rules.save(path) - print(f"All remaining rules compiled and saved into {path}") + compiled_rules.save(compiled_rules_path) + print(f"All remaining rules compiled and saved into {compiled_rules_path}") + # Hash file + with open(compiled_rules_path, 'rb') as hash_file: + compiled_sha256=hashlib.sha256(hash_file.read()).hexdigest() + # Remove the rules.compiled if there aren't any files to be compiled + else: + if os.path.exists(compiled_rules_path): + os.remove(compiled_rules_path) + + # Create compilation report + compilation_report = { + "timestamp": ts, + "compiled_sha256": compiled_sha256, + "failure": failure_ids, + "success": success_ids + } + + # Write total + with open('/opt/so/state/detections_yara_compilation-total.log', 'w+') as report_file: + json.dump(compilation_report, report_file) # Print summary of compilation results print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.") -compile_yara_rules("/opt/sensoroni/yara/rules/") +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Compile YARA rules from the specified directory") + parser.add_argument("rules_dir", help="Directory containing YARA rules to compile") + args = parser.parse_args() + +compile_yara_rules(args.rules_dir) diff --git a/salt/strelka/config.sls b/salt/strelka/config.sls index cd8fb2667..f03afa61b 100644 --- a/salt/strelka/config.sls +++ b/salt/strelka/config.sls @@ -5,45 +5,21 @@ {% from 'allowed_states.map.jinja' import allowed_states %} {% if sls.split('.')[0] in allowed_states %} +{% from 'vars/globals.map.jinja' import GLOBALS %} + +{% if GLOBALS.is_manager %} +include: + - strelka.manager +{% endif %} # Strelka config -strelkaconfdir: - file.directory: - - name: /opt/so/conf/strelka/rules/compiled/ - - user: 939 - - group: 939 - - makedirs: True - -strelkacompileyara: - file.managed: - - name: /opt/so/conf/strelka/compile_yara.py - - source: salt://strelka/compile_yara/compile_yara.py - - user: 939 - - group: 939 - - makedirs: True - -strelkarulesdir: - file.directory: - - name: /opt/so/conf/strelka/rules - - user: 939 - - group: 939 - - makedirs: True - -{%- if grains.role in ['so-sensor', 'so-heavynode'] %} -strelkasensorrules: +strelkasensorcompiledrules: file.recurse: - name: /opt/so/conf/strelka/rules/compiled/ - source: salt://strelka/rules/compiled/ - user: 939 - group: 939 - clean: True -{%- endif %} - -strelkareposdir: - file.directory: - - name: /opt/so/conf/strelka/repos - - user: 939 - - group: 939 - makedirs: True strelkadatadir: @@ -58,7 +34,18 @@ strelkalogdir: - name: /nsm/strelka/log - user: 939 - group: 939 - - makedirs: True + +strelkagkredisdatadir: + file.directory: + - name: /nsm/strelka/gk-redis-data + - user: 939 + - group: 939 + +strelkacoordredisdatadir: + file.directory: + - name: /nsm/strelka/coord-redis-data + - user: 939 + - group: 939 strelka_sbin: file.recurse: @@ -68,20 +55,6 @@ strelka_sbin: - group: 939 - file_mode: 755 -strelkagkredisdatadir: - file.directory: - - name: /nsm/strelka/gk-redis-data - - user: 939 - - group: 939 - - makedirs: True - -strelkacoordredisdatadir: - file.directory: - - name: /nsm/strelka/coord-redis-data - - user: 939 - - group: 939 - - makedirs: True - {% else %} {{sls}}_state_not_allowed: diff --git a/salt/strelka/defaults.yaml b/salt/strelka/defaults.yaml index f91ad8691..4d69bf53b 100644 --- a/salt/strelka/defaults.yaml +++ b/salt/strelka/defaults.yaml @@ -733,28 +733,6 @@ strelka: enabled: False rules: enabled: True - repos: - - https://github.com/Security-Onion-Solutions/securityonion-yara.git - excluded: - - apt_flame2_orchestrator.yar - - apt_tetris.yar - - gen_susp_js_obfuscatorio.yar - - gen_webshells.yar - - generic_anomalies.yar - - general_cloaking.yar - - thor_inverse_matches.yar - - yara_mixed_ext_vars.yar - - apt_apt27_hyperbro.yar - - apt_turla_gazer.yar - - gen_google_anomaly.yar - - gen_icon_anomalies.yar - - gen_nvidia_leaked_cert.yar - - gen_sign_anomalies.yar - - gen_susp_xor.yar - - gen_webshells_ext_vars.yar - - configured_vulns_ext_vars.yar - - expl_outlook_cve_2023_23397.yar - - gen_mal_3cx_compromise_mar23.yar filecheck: historypath: '/nsm/strelka/history/' strelkapath: '/nsm/strelka/unprocessed/' diff --git a/salt/strelka/manager.sls b/salt/strelka/manager.sls index 1c56a18fd..6a4aea416 100644 --- a/salt/strelka/manager.sls +++ b/salt/strelka/manager.sls @@ -4,12 +4,13 @@ # Elastic License 2.0. {% from 'allowed_states.map.jinja' import allowed_states %} -{% if sls in allowed_states %} +{# if strelka.manager or strelka in allowed_states #} +{% if sls in allowed_states or sls.split('.')[0] in allowed_states %} # Strelka config -strelkaconfdir: +strelkarulesdir: file.directory: - - name: /opt/so/conf/strelka/rules/compiled/ + - name: /opt/so/conf/strelka/rules - user: 939 - group: 939 - makedirs: True @@ -20,21 +21,12 @@ strelkacompileyara: - source: salt://strelka/compile_yara/compile_yara.py - user: 939 - group: 939 - - makedirs: True - -strelkarulesdir: - file.directory: - - name: /opt/so/conf/strelka/rules - - user: 939 - - group: 939 - - makedirs: True strelkareposdir: file.directory: - name: /opt/so/conf/strelka/repos - user: 939 - group: 939 - - makedirs: True {% else %} diff --git a/salt/strelka/rules/repos.txt.jinja b/salt/strelka/rules/repos.txt.jinja deleted file mode 100644 index 043a02203..000000000 --- a/salt/strelka/rules/repos.txt.jinja +++ /dev/null @@ -1,2 +0,0 @@ -# DO NOT EDIT THIS FILE! Strelka YARA rule repos are stored here from the strelka:rules:repos pillar section -{{ STRELKAREPOS | join('\n') }} diff --git a/salt/strelka/soc_strelka.yaml b/salt/strelka/soc_strelka.yaml index e5240b9c9..947215bd5 100644 --- a/salt/strelka/soc_strelka.yaml +++ b/salt/strelka/soc_strelka.yaml @@ -578,18 +578,6 @@ strelka: global: False helpLink: strelka.html advanced: False - repos: - description: List of repos for so-yara-download to use to download rules. - readonly: False - global: False - helpLink: strelka.html - advanced: False - excluded: - description: List of rules to exclude so-yara-update from download and propagating to backend nodes. - readonly: False - global: False - helpLink: strelka.html - advanced: False filecheck: historypath: description: The path for previously scanned files. diff --git a/salt/suricata/soc_suricata.yaml b/salt/suricata/soc_suricata.yaml index 75ad1e476..e157ff852 100644 --- a/salt/suricata/soc_suricata.yaml +++ b/salt/suricata/soc_suricata.yaml @@ -12,7 +12,7 @@ suricata: title: SIDS helpLink: suricata.html readonlyUi: True - advanced: true + advanced: True classification: classification__config: description: Classifications config file. diff --git a/setup/so-functions b/setup/so-functions index 0ca711e32..77d2f0872 100755 --- a/setup/so-functions +++ b/setup/so-functions @@ -762,21 +762,6 @@ copy_salt_master_config() { logCmd "systemctl restart salt-master" } -create_local_directories() { - info "Creating local pillar and salt directories" - PILLARSALTDIR=${SCRIPTDIR::-5} - for i in "pillar" "salt"; do - for d in $(find $PILLARSALTDIR/$i -type d); do - suffixdir=${d//$PILLARSALTDIR/} - if [ ! -d "$local_salt_dir/$suffixdir" ]; then - logCmd "mkdir -pv $local_salt_dir$suffixdir" - fi - done - logCmd "chown -R socore:socore $local_salt_dir/$i" - done - -} - create_local_nids_rules() { title "Create a local.rules file so it doesn't get removed on updates" logCmd "mkdir -p /opt/so/saltstack/local/salt/idstools" @@ -1272,12 +1257,27 @@ soc_pillar() { " srvKey: '$SOCSRVKEY'"\ " modules:"\ " elastalertengine:"\ - " allowRegex: '$ELASTALERT_ALLOW_REGEX'"\ + " allowRegex: '$ELASTALERT_ALLOW_REGEX'" > "$soc_pillar_file" + if [[ -n "$ELASTALERT_FAIL_ERROR_COUNT" ]]; then + printf '%s\n'\ + " failAfterConsecutiveErrorCount: $ELASTALERT_FAIL_ERROR_COUNT" >> "$soc_pillar_file" + fi + + printf '%s\n'\ " strelkaengine:"\ - " allowRegex: '$STRELKA_ALLOW_REGEX'"\ + " allowRegex: '$STRELKA_ALLOW_REGEX'" >> "$soc_pillar_file" + if [[ -n "$STRELKA_FAIL_ERROR_COUNT" ]]; then + printf '%s\n'\ + " failAfterConsecutiveErrorCount: $STRELKA_FAIL_ERROR_COUNT" >> "$soc_pillar_file" + fi + + printf '%s\n'\ " suricataengine:"\ - " allowRegex: '$SURICATA_ALLOW_REGEX'"\ - "" > "$soc_pillar_file" + " allowRegex: '$SURICATA_ALLOW_REGEX'" >> "$soc_pillar_file" + if [[ -n "$SURICATA_FAIL_ERROR_COUNT" ]]; then + printf '%s\n'\ + " failAfterConsecutiveErrorCount: $SURICATA_FAIL_ERROR_COUNT" >> "$soc_pillar_file" + fi if [[ $telemetry -ne 0 ]]; then echo " telemetryEnabled: false" >> $soc_pillar_file diff --git a/setup/so-setup b/setup/so-setup index 2ab105b60..10f54ad10 100755 --- a/setup/so-setup +++ b/setup/so-setup @@ -246,8 +246,11 @@ if [ -n "$test_profile" ]; then WEBPASSWD1=0n10nus3r WEBPASSWD2=0n10nus3r STRELKA_ALLOW_REGEX="EquationGroup_Toolset_Apr17__ELV_.*" + STRELKA_FAIL_ERROR_COUNT=1 ELASTALERT_ALLOW_REGEX="Security Onion" + ELASTALERT_FAIL_ERROR_COUNT=1 SURICATA_ALLOW_REGEX="(200033\\d|2100538|2102466)" + SURICATA_FAIL_ERROR_COUNT=1 update_sudoers_for_testing fi @@ -679,7 +682,7 @@ if ! [[ -f $install_opt_file ]]; then # Add the socore user add_socore_user_manager - create_local_directories + create_local_directories ${SCRIPTDIR::-5} setup_salt_master_dirs create_manager_pillars @@ -771,16 +774,10 @@ if ! [[ -f $install_opt_file ]]; then if [[ ! $is_airgap ]]; then title "Downloading IDS Rules" logCmd "so-rule-update" - title "Downloading YARA rules" - logCmd "su socore -c '/usr/sbin/so-yara-download'" if [[ $monints || $is_import ]]; then title "Restarting Suricata to pick up the new rules" logCmd "so-suricata-restart" fi - if [[ $monints ]]; then - title "Restarting Strelka to use new rules" - logCmd "so-strelka-restart" - fi fi title "Setting up Kibana Default Space" logCmd "so-kibana-space-defaults" diff --git a/setup/so-whiptail b/setup/so-whiptail index 97d1eb2b3..c9e99d1f4 100755 --- a/setup/so-whiptail +++ b/setup/so-whiptail @@ -451,7 +451,7 @@ whiptail_end_settings() { done fi - if [[ ! $is_airgap ]]; then + if [[ ! $is_airgap ]] && [[ $waitforstate ]]; then if [[ $telemetry -eq 0 ]]; then __append_end_msg "SOC Telemetry: enabled" else @@ -475,7 +475,7 @@ whiptail_end_settings() { read -r -d '' msg <<-EOM $end_msg - Press the Tab key to select yes or no. + Press the TAB key to select yes or no. EOM whiptail --title "The following options have been set, would you like to proceed?" --yesno "$msg" 24 75 --scrolltext diff --git a/sigs/securityonion-2.4.70-20240529.iso.sig b/sigs/securityonion-2.4.70-20240529.iso.sig new file mode 100644 index 000000000..c3825eb6e Binary files /dev/null and b/sigs/securityonion-2.4.70-20240529.iso.sig differ