Merge remote-tracking branch 'origin/2.4/dev' into sotcprp

This commit is contained in:
m0duspwnens
2024-05-30 16:34:37 -04:00
16 changed files with 473 additions and 24 deletions

View File

@@ -1,17 +1,17 @@
### 2.4.60-20240320 ISO image released on 2024/03/20 ### 2.4.70-20240529 ISO image released on 2024/05/29
### Download and Verify ### Download and Verify
2.4.60-20240320 ISO image: 2.4.70-20240529 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso https://download.securityonion.net/file/securityonion/securityonion-2.4.70-20240529.iso
MD5: 178DD42D06B2F32F3870E0C27219821E MD5: 8FCCF31C2470D1ABA380AF196B611DEC
SHA1: 73EDCD50817A7F6003FE405CF1808A30D034F89D SHA1: EE5E8F8C14819E7A1FE423E6920531A97F39600B
SHA256: DD334B8D7088A7B78160C253B680D645E25984BA5CCAB5CC5C327CA72137FC06 SHA256: EF5E781D50D50660F452ADC54FD4911296ECBECED7879FA8E04687337CA89BEC
Signature for ISO image: Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.70-20240529.iso.sig
Signing key: Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
Download the signature file for the ISO: Download the signature file for the ISO:
``` ```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.70-20240529.iso.sig
``` ```
Download the ISO image: Download the ISO image:
``` ```
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso wget https://download.securityonion.net/file/securityonion/securityonion-2.4.70-20240529.iso
``` ```
Verify the downloaded ISO image using the signature file: Verify the downloaded ISO image using the signature file:
``` ```
gpg --verify securityonion-2.4.60-20240320.iso.sig securityonion-2.4.60-20240320.iso gpg --verify securityonion-2.4.70-20240529.iso.sig securityonion-2.4.70-20240529.iso
``` ```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below: The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
``` ```
gpg: Signature made Tue 19 Mar 2024 03:17:58 PM EDT using RSA key ID FE507013 gpg: Signature made Wed 29 May 2024 11:40:59 AM EDT using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>" gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature! gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner. gpg: There is no indication that the signature belongs to the owner.

View File

@@ -1 +1 @@
2.4.70 2.4.80

View File

@@ -11088,6 +11088,117 @@ elasticsearch:
set_priority: set_priority:
priority: 50 priority: 50
min_age: 30d min_age: 30d
so-suricata_x_alerts:
index_sorting: false
index_template:
composed_of:
- agent-mappings
- dtc-agent-mappings
- base-mappings
- dtc-base-mappings
- client-mappings
- dtc-client-mappings
- cloud-mappings
- container-mappings
- data_stream-mappings
- destination-mappings
- dtc-destination-mappings
- pb-override-destination-mappings
- dll-mappings
- dns-mappings
- dtc-dns-mappings
- ecs-mappings
- dtc-ecs-mappings
- error-mappings
- event-mappings
- dtc-event-mappings
- file-mappings
- dtc-file-mappings
- group-mappings
- host-mappings
- dtc-host-mappings
- http-mappings
- dtc-http-mappings
- log-mappings
- network-mappings
- dtc-network-mappings
- observer-mappings
- dtc-observer-mappings
- orchestrator-mappings
- organization-mappings
- package-mappings
- process-mappings
- dtc-process-mappings
- registry-mappings
- related-mappings
- rule-mappings
- dtc-rule-mappings
- server-mappings
- service-mappings
- dtc-service-mappings
- source-mappings
- dtc-source-mappings
- pb-override-source-mappings
- suricata-mappings
- threat-mappings
- tls-mappings
- tracing-mappings
- url-mappings
- user_agent-mappings
- dtc-user_agent-mappings
- vulnerability-mappings
- common-settings
- common-dynamic-mappings
data_stream: {}
index_patterns:
- logs-suricata.alerts-*
priority: 500
template:
mappings:
date_detection: false
dynamic_templates:
- strings_as_keyword:
mapping:
ignore_above: 1024
type: keyword
match_mapping_type: string
settings:
index:
lifecycle:
name: so-suricata.alerts-logs
mapping:
total_fields:
limit: 5000
number_of_replicas: 0
number_of_shards: 1
refresh_interval: 30s
sort:
field: '@timestamp'
order: desc
policy:
phases:
cold:
actions:
set_priority:
priority: 0
min_age: 60d
delete:
actions:
delete: {}
min_age: 365d
hot:
actions:
rollover:
max_age: 1d
max_primary_shard_size: 50gb
set_priority:
priority: 100
min_age: 0ms
warm:
actions:
set_priority:
priority: 50
min_age: 30d
so-syslog: so-syslog:
index_sorting: false index_sorting: false
index_template: index_template:

View File

@@ -1,6 +1,7 @@
{ {
"description" : "suricata.alert", "description" : "suricata.alert",
"processors" : [ "processors" : [
{ "set": { "field": "_index", "value": "logs-suricata.alerts-so" } },
{ "set": { "field": "tags","value": "alert" }}, { "set": { "field": "tags","value": "alert" }},
{ "rename":{ "field": "message2.alert", "target_field": "rule", "ignore_failure": true } }, { "rename":{ "field": "message2.alert", "target_field": "rule", "ignore_failure": true } },
{ "rename":{ "field": "rule.signature", "target_field": "rule.name", "ignore_failure": true } }, { "rename":{ "field": "rule.signature", "target_field": "rule.name", "ignore_failure": true } },

View File

@@ -521,6 +521,7 @@ elasticsearch:
so-endgame: *indexSettings so-endgame: *indexSettings
so-idh: *indexSettings so-idh: *indexSettings
so-suricata: *indexSettings so-suricata: *indexSettings
so-suricata_x_alerts: *indexSettings
so-import: *indexSettings so-import: *indexSettings
so-kratos: *indexSettings so-kratos: *indexSettings
so-kismet: *indexSettings so-kismet: *indexSettings

View File

@@ -358,6 +358,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.40 ]] && up_to_2.4.50 [[ "$INSTALLEDVERSION" == 2.4.40 ]] && up_to_2.4.50
[[ "$INSTALLEDVERSION" == 2.4.50 ]] && up_to_2.4.60 [[ "$INSTALLEDVERSION" == 2.4.50 ]] && up_to_2.4.60
[[ "$INSTALLEDVERSION" == 2.4.60 ]] && up_to_2.4.70 [[ "$INSTALLEDVERSION" == 2.4.60 ]] && up_to_2.4.70
[[ "$INSTALLEDVERSION" == 2.4.70 ]] && up_to_2.4.80
true true
} }
@@ -375,6 +376,7 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.40 ]] && post_to_2.4.50 [[ "$POSTVERSION" == 2.4.40 ]] && post_to_2.4.50
[[ "$POSTVERSION" == 2.4.50 ]] && post_to_2.4.60 [[ "$POSTVERSION" == 2.4.50 ]] && post_to_2.4.60
[[ "$POSTVERSION" == 2.4.60 ]] && post_to_2.4.70 [[ "$POSTVERSION" == 2.4.60 ]] && post_to_2.4.70
[[ "$POSTVERSION" == 2.4.70 ]] && post_to_2.4.80
true true
} }
@@ -448,6 +450,11 @@ post_to_2.4.70() {
POSTVERSION=2.4.70 POSTVERSION=2.4.70
} }
post_to_2.4.80() {
echo "Nothing to apply"
POSTVERSION=2.4.80
}
repo_sync() { repo_sync() {
echo "Sync the local repo." echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync." su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -595,6 +602,11 @@ up_to_2.4.70() {
INSTALLEDVERSION=2.4.70 INSTALLEDVERSION=2.4.70
} }
up_to_2.4.80() {
echo "Nothing to do for 2.4.80"
INSTALLEDVERSION=2.4.80
}
add_detection_test_pillars() { add_detection_test_pillars() {
if [[ -n "$SOUP_INTERNAL_TESTING" ]]; then if [[ -n "$SOUP_INTERNAL_TESTING" ]]; then
echo "Adding detection pillar values for automated testing" echo "Adding detection pillar values for automated testing"
@@ -660,6 +672,13 @@ suricata_idstools_migration() {
fail "Error: rsync failed to copy the files. Thresholds have not been backed up." fail "Error: rsync failed to copy the files. Thresholds have not been backed up."
fi fi
#Backup local rules
mkdir -p /nsm/backup/detections-migration/suricata/local-rules
rsync -av /opt/so/rules/nids/suri/local.rules /nsm/backup/detections-migration/suricata/local-rules
if [[ -f /opt/so/saltstack/local/salt/idstools/rules/local.rules ]]; then
rsync -av /opt/so/saltstack/local/salt/idstools/rules/local.rules /nsm/backup/detections-migration/suricata/local-rules/local.rules.bak
fi
#Tell SOC to migrate #Tell SOC to migrate
mkdir -p /opt/so/conf/soc/migrations mkdir -p /opt/so/conf/soc/migrations
echo "0" > /opt/so/conf/soc/migrations/suricata-migration-2.4.70 echo "0" > /opt/so/conf/soc/migrations/suricata-migration-2.4.70
@@ -677,22 +696,21 @@ playbook_migration() {
if grep -A 1 'playbook:' /opt/so/saltstack/local/pillar/minions/* | grep -q 'enabled: True'; then if grep -A 1 'playbook:' /opt/so/saltstack/local/pillar/minions/* | grep -q 'enabled: True'; then
# Check for active Elastalert rules # Check for active Elastalert rules
active_rules_count=$(find /opt/so/rules/elastalert/playbook/ -type f -name "*.yaml" | wc -l) active_rules_count=$(find /opt/so/rules/elastalert/playbook/ -type f \( -name "*.yaml" -o -name "*.yml" \) | wc -l)
if [[ "$active_rules_count" -gt 0 ]]; then if [[ "$active_rules_count" -gt 0 ]]; then
# Prompt the user to AGREE if active Elastalert rules found # Prompt the user to press ENTER if active Elastalert rules found
echo echo
echo "$active_rules_count Active Elastalert/Playbook rules found." echo "$active_rules_count Active Elastalert/Playbook rules found."
echo "In preparation for the new Detections module, they will be backed up and then disabled." echo "In preparation for the new Detections module, they will be backed up and then disabled."
echo echo
echo "If you would like to proceed, then type AGREE and press ENTER." echo "Press ENTER to proceed."
echo echo
# Read user input # Read user input
read INPUT read -r
if [ "${INPUT^^}" != 'AGREE' ]; then fail "SOUP canceled."; fi
echo "Backing up the Elastalert rules..." echo "Backing up the Elastalert rules..."
rsync -av --stats /opt/so/rules/elastalert/playbook/*.yaml /nsm/backup/detections-migration/elastalert/ rsync -av --ignore-missing-args --stats /opt/so/rules/elastalert/playbook/*.{yaml,yml} /nsm/backup/detections-migration/elastalert/
# Verify that rsync completed successfully # Verify that rsync completed successfully
if [[ $? -eq 0 ]]; then if [[ $? -eq 0 ]]; then

View File

@@ -1,4 +1,4 @@
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched # version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
salt: salt:
master: master:
version: 3006.6 version: 3006.8

View File

@@ -1,6 +1,6 @@
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched # version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
salt: salt:
minion: minion:
version: 3006.6 version: 3006.8
check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default
service_start_delay: 30 # in seconds. service_start_delay: 30 # in seconds.

View File

@@ -80,6 +80,14 @@ socmotd:
- mode: 600 - mode: 600
- template: jinja - template: jinja
filedetectionsbackup:
file.managed:
- name: /opt/so/conf/soc/so-detections-backup.py
- source: salt://soc/files/soc/so-detections-backup.py
- user: 939
- group: 939
- mode: 600
crondetectionsruntime: crondetectionsruntime:
cron.present: cron.present:
- name: /usr/sbin/so-detections-runtime-status cron - name: /usr/sbin/so-detections-runtime-status cron
@@ -91,6 +99,17 @@ crondetectionsruntime:
- month: '*' - month: '*'
- dayweek: '*' - dayweek: '*'
crondetectionsbackup:
cron.present:
- name: python3 /opt/so/conf/soc/so-detections-backup.py &>> /opt/so/log/soc/detections-backup.log
- identifier: detections-backup
- user: root
- minute: '0'
- hour: '0'
- daymonth: '*'
- month: '*'
- dayweek: '*'
socsigmafinalpipeline: socsigmafinalpipeline:
file.managed: file.managed:
- name: /opt/so/conf/soc/sigma_final_pipeline.yaml - name: /opt/so/conf/soc/sigma_final_pipeline.yaml

View File

@@ -1271,6 +1271,15 @@ soc:
- netflow.type - netflow.type
- netflow.exporter.version - netflow.exporter.version
- observer.ip - observer.ip
':soc:':
- soc_timestamp
- event.dataset
- source.ip
- soc.fields.requestMethod
- soc.fields.requestPath
- soc.fields.statusCode
- event.action
- soc.fields.error
server: server:
bindAddress: 0.0.0.0:9822 bindAddress: 0.0.0.0:9822
baseUrl: / baseUrl: /

View File

@@ -17,6 +17,16 @@ transformations:
dst_ip: destination.ip.keyword dst_ip: destination.ip.keyword
dst_port: destination.port dst_port: destination.port
winlog.event_data.User: user.name winlog.event_data.User: user.name
logtype: event.code # OpenCanary
# Maps "opencanary" product to SO IDH logs
- id: opencanary_idh_add-fields
type: add_condition
conditions:
event.module: 'opencanary'
event.dataset: 'opencanary.idh'
rule_conditions:
- type: logsource
product: opencanary
# Maps "antivirus" category to Windows Defender logs shipped by Elastic Agent Winlog Integration # Maps "antivirus" category to Windows Defender logs shipped by Elastic Agent Winlog Integration
# winlog.event_data.threat_name has to be renamed prior to ingestion, it is originally winlog.event_data.Threat Name # winlog.event_data.threat_name has to be renamed prior to ingestion, it is originally winlog.event_data.Threat Name
- id: antivirus_field-mappings_windows-defender - id: antivirus_field-mappings_windows-defender
@@ -88,3 +98,11 @@ transformations:
- type: logsource - type: logsource
product: linux product: linux
service: auth service: auth
# event.code should always be a string
- id: convert_event_code_to_string
type: convert_type
target_type: 'str'
field_name_conditions:
- type: include_fields
fields:
- event.code

View File

@@ -0,0 +1,113 @@
# Copyright 2020-2023 Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
# This script queries Elasticsearch for Custom Detections and all Overrides,
# and git commits them to disk at $OUTPUT_DIR
import os
import subprocess
import json
import requests
from requests.auth import HTTPBasicAuth
import urllib3
from datetime import datetime
# Suppress SSL warnings
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Constants
ES_URL = "https://localhost:9200/so-detection/_search"
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
OUTPUT_DIR = "/nsm/backup/detections/repo"
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
def get_auth_credentials(auth_file):
with open(auth_file, 'r') as file:
for line in file:
if line.startswith('user ='):
return line.split('=', 1)[1].strip().replace('"', '')
def query_elasticsearch(query, auth):
headers = {"Content-Type": "application/json"}
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
response.raise_for_status()
return response.json()
def save_content(hit, base_folder, subfolder="", extension="txt"):
so_detection = hit["_source"]["so_detection"]
public_id = so_detection["publicId"]
content = so_detection["content"]
file_dir = os.path.join(base_folder, subfolder)
os.makedirs(file_dir, exist_ok=True)
file_path = os.path.join(file_dir, f"{public_id}.{extension}")
with open(file_path, "w") as f:
f.write(content)
return file_path
def save_overrides(hit):
so_detection = hit["_source"]["so_detection"]
public_id = so_detection["publicId"]
overrides = so_detection["overrides"]
language = so_detection["language"]
folder = os.path.join(OUTPUT_DIR, language, "overrides")
os.makedirs(folder, exist_ok=True)
extension = "yaml" if language == "sigma" else "txt"
file_path = os.path.join(folder, f"{public_id}.{extension}")
with open(file_path, "w") as f:
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
return file_path
def ensure_git_repo():
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
def commit_changes():
ensure_git_repo()
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
print(status_result.stdout)
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
if commit_result.returncode == 1:
print("No changes to commit.")
elif commit_result.returncode == 0:
print("Changes committed successfully.")
else:
commit_result.check_returncode()
def main():
try:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
os.makedirs(OUTPUT_DIR, exist_ok=True)
auth_credentials = get_auth_credentials(AUTH_FILE)
username, password = auth_credentials.split(':', 1)
auth = HTTPBasicAuth(username, password)
# Query and save custom detections
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
for hit in detections:
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
# Query and save overrides
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
for hit in overrides:
save_overrides(hit)
commit_changes()
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backup Completed - {timestamp}")
except Exception as e:
print(f"An error occurred: {e}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,159 @@
# Copyright 2020-2023 Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
import unittest
from unittest.mock import patch, MagicMock, mock_open, call
import requests
import os
import subprocess
import json
from datetime import datetime
import importlib
ds = importlib.import_module('so-detections-backup')
class TestBackupScript(unittest.TestCase):
def setUp(self):
self.output_dir = '/nsm/backup/detections/repo'
self.auth_file_path = '/nsm/backup/detections/repo'
self.mock_auth_data = 'user = "so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS"'
self.auth_credentials = 'so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS'
self.auth = requests.auth.HTTPBasicAuth('so_elastic', '@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS')
self.mock_detection_hit = {
"_source": {
"so_detection": {
"publicId": "test_id",
"content": "test_content",
"language": "suricata"
}
}
}
self.mock_override_hit = {
"_source": {
"so_detection": {
"publicId": "test_id",
"overrides": [{"key": "value"}],
"language": "sigma"
}
}
}
def assert_file_written(self, mock_file, expected_path, expected_content):
mock_file.assert_called_once_with(expected_path, 'w')
mock_file().write.assert_called_once_with(expected_content)
@patch('builtins.open', new_callable=mock_open, read_data='user = "so_elastic:@Tu_dv_[7SvK7[-JZN39BBlSa;WAyf8rCY+3w~Sntp=7oR9*~34?Csi)a@v?)K*vK4vQAywS"')
def test_get_auth_credentials(self, mock_file):
credentials = ds.get_auth_credentials(self.auth_file_path)
self.assertEqual(credentials, self.auth_credentials)
mock_file.assert_called_once_with(self.auth_file_path, 'r')
@patch('requests.get')
def test_query_elasticsearch(self, mock_get):
mock_response = MagicMock()
mock_response.json.return_value = {'hits': {'hits': []}}
mock_response.raise_for_status = MagicMock()
mock_get.return_value = mock_response
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
self.assertEqual(response, {'hits': {'hits': []}})
mock_get.assert_called_once_with(
ds.ES_URL,
headers={"Content-Type": "application/json"},
data=ds.QUERY_DETECTIONS,
auth=self.auth,
verify=False
)
@patch('os.makedirs')
@patch('builtins.open', new_callable=mock_open)
def test_save_content(self, mock_file, mock_makedirs):
file_path = ds.save_content(self.mock_detection_hit, self.output_dir, 'subfolder', 'txt')
expected_path = f'{self.output_dir}/subfolder/test_id.txt'
self.assertEqual(file_path, expected_path)
mock_makedirs.assert_called_once_with(f'{self.output_dir}/subfolder', exist_ok=True)
self.assert_file_written(mock_file, expected_path, 'test_content')
@patch('os.makedirs')
@patch('builtins.open', new_callable=mock_open)
def test_save_overrides(self, mock_file, mock_makedirs):
file_path = ds.save_overrides(self.mock_override_hit)
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
self.assertEqual(file_path, expected_path)
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
self.assert_file_written(mock_file, expected_path, json.dumps({"key": "value"}))
@patch('subprocess.run')
def test_ensure_git_repo(self, mock_run):
mock_run.return_value = MagicMock(returncode=0)
ds.ensure_git_repo()
mock_run.assert_has_calls([
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
call(["git", "-C", self.output_dir, "init"], check=True),
call(["git", "-C", self.output_dir, "remote", "add", "origin", "default"], check=True)
])
@patch('subprocess.run')
def test_commit_changes(self, mock_run):
mock_status_result = MagicMock()
mock_status_result.stdout = "On branch main\nnothing to commit, working tree clean"
mock_commit_result = MagicMock(returncode=1)
# Ensure sufficient number of MagicMock instances for each subprocess.run call
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
print("Running test_commit_changes...")
ds.commit_changes()
print("Finished test_commit_changes.")
mock_run.assert_has_calls([
call(["git", "-C", self.output_dir, "config", "user.email", "securityonion@local.invalid"], check=True),
call(["git", "-C", self.output_dir, "config", "user.name", "securityonion"], check=True),
call(["git", "-C", self.output_dir, "add", "."], check=True),
call(["git", "-C", self.output_dir, "status"], capture_output=True, text=True),
call(["git", "-C", self.output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
])
@patch('builtins.print')
@patch('so-detections-backup.commit_changes')
@patch('so-detections-backup.save_overrides')
@patch('so-detections-backup.save_content')
@patch('so-detections-backup.query_elasticsearch')
@patch('so-detections-backup.get_auth_credentials')
@patch('os.makedirs')
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
mock_get_auth.return_value = self.auth_credentials
mock_query.side_effect = [
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
]
with patch('datetime.datetime') as mock_datetime:
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
ds.main()
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
mock_query.assert_has_calls([
call(ds.QUERY_DETECTIONS, self.auth),
call(ds.QUERY_OVERRIDES, self.auth)
])
mock_save_content.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
"yaml"
)
mock_save_overrides.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
)
mock_commit.assert_called_once()
mock_print.assert_called()
if __name__ == '__main__':
unittest.main(verbosity=2)

View File

@@ -119,14 +119,14 @@ soc:
advanced: True advanced: True
rulesRepos: rulesRepos:
default: &eerulesRepos default: &eerulesRepos
description: "Custom Git repos to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled." description: "Custom Git repositories to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled. The new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update."
global: True global: True
advanced: True advanced: True
forcedType: "[]{}" forcedType: "[]{}"
helpLink: sigma.html helpLink: sigma.html
airgap: *eerulesRepos airgap: *eerulesRepos
sigmaRulePackages: sigmaRulePackages:
description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.' description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, the new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.'
global: True global: True
advanced: False advanced: False
helpLink: sigma.html helpLink: sigma.html
@@ -221,7 +221,7 @@ soc:
advanced: True advanced: True
rulesRepos: rulesRepos:
default: &serulesRepos default: &serulesRepos
description: "Custom Git repos to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled." description: "Custom Git repositories to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled. The new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Strelka --> Full Update."
global: True global: True
advanced: True advanced: True
forcedType: "[]{}" forcedType: "[]{}"

View File

@@ -451,7 +451,7 @@ whiptail_end_settings() {
done done
fi fi
if [[ ! $is_airgap ]]; then if [[ ! $is_airgap ]] && [[ $waitforstate ]]; then
if [[ $telemetry -eq 0 ]]; then if [[ $telemetry -eq 0 ]]; then
__append_end_msg "SOC Telemetry: enabled" __append_end_msg "SOC Telemetry: enabled"
else else

Binary file not shown.