Compare commits

..

43 Commits

Author SHA1 Message Date
Jorge Reyes
4630eaef43 Update version to 2.4.0-foxtrot 2025-12-16 16:05:24 -06:00
coreyogburn
1d63269883 Merge pull request #15323 from Security-Onion-Solutions/cogburn/non-advanced-apiurl
Un-Advanced Assistant ApiUrl
2025-12-16 12:08:14 -07:00
Corey Ogburn
dd8027480b Un-Advanced Assistant ApiUrl 2025-12-16 12:02:01 -07:00
Mike Reeves
c45bd77e44 Merge pull request #15320 from Security-Onion-Solutions/TOoSmOotH-patch-1
Update VERSION
2025-12-16 11:25:35 -05:00
Mike Reeves
032e0abd61 Update 2-4.yml 2025-12-16 11:23:53 -05:00
Mike Reeves
8509d1e454 Update VERSION 2025-12-16 11:23:12 -05:00
Mike Reeves
ddd6935e50 Merge pull request #15318 from Security-Onion-Solutions/2.4.200
2.4.200
2025-12-16 09:15:32 -05:00
Mike Reeves
5588a56b24 2.4.200 2025-12-16 09:07:29 -05:00
Mike Reeves
12aed6e280 Merge pull request #15311 from Security-Onion-Solutions/TOoSmOotH-patch-5
Update so-minion
2025-12-15 12:07:37 -05:00
Mike Reeves
b2a469e08c Update so-minion 2025-12-15 11:56:23 -05:00
Jason Ertel
285b0e4af9 Merge pull request #15308 from Security-Onion-Solutions/idstools-refactor
Add trailing nl if it doesnt already exist
2025-12-14 15:35:24 -05:00
DefensiveDepth
f9edfd6391 Add trailing nl if it doesnt already exist 2025-12-14 12:03:44 -05:00
Josh Patterson
f6301bc3e5 Merge pull request #15304 from Security-Onion-Solutions/ggjorge
fix cleaning repos on remote nodes if airgap
2025-12-12 14:22:21 -05:00
Josh Patterson
6c5c176b7d fix cleaning repos on remote nodes if airgap 2025-12-12 14:18:54 -05:00
Josh Brower
c6d52b5eb1 Merge pull request #15303 from Security-Onion-Solutions/idstools-refactor
Add Airgap check
2025-12-12 09:59:19 -05:00
DefensiveDepth
7cac528389 Add Airgap check 2025-12-12 09:52:01 -05:00
Josh Brower
6fe817ca4a Merge pull request #15301 from Security-Onion-Solutions/idstools-refactor
Rework backup
2025-12-11 13:57:25 -05:00
DefensiveDepth
cb9a6fac25 Update tests for rework 2025-12-11 12:14:37 -05:00
DefensiveDepth
a945768251 Refactor backup 2025-12-11 11:15:30 -05:00
Mike Reeves
c6646e3821 Merge pull request #15289 from Security-Onion-Solutions/TOoSmOotH-patch-3
Update Assistant Models
2025-12-10 17:22:13 -05:00
Mike Reeves
99dc72cece Merge branch '2.4/dev' into TOoSmOotH-patch-3 2025-12-10 17:19:32 -05:00
Josh Brower
04d6cca204 Merge pull request #15298 from Security-Onion-Solutions/idstools-refactor
Fixup logic
2025-12-10 17:18:59 -05:00
DefensiveDepth
5ab6bda639 Fixup logic 2025-12-10 17:16:35 -05:00
Josh Brower
f433de7e12 Merge pull request #15297 from Security-Onion-Solutions/idstools-refactor
small fixes
2025-12-10 15:23:12 -05:00
DefensiveDepth
8ef6c2f91d small fixes 2025-12-10 15:19:44 -05:00
Mike Reeves
7575218697 Merge pull request #15293 from Security-Onion-Solutions/TOoSmOotH-patch-4
Remove Claude Sonnet 4 model configuration
2025-12-09 11:04:38 -05:00
Mike Reeves
dc945dad00 Remove Claude Sonnet 4 model configuration
Removed configuration for Claude Sonnet 4 model.
2025-12-09 11:00:53 -05:00
Josh Brower
ddcd74ffd2 Merge pull request #15292 from Security-Onion-Solutions/idstools-refactor
Fix custom name
2025-12-09 10:12:41 -05:00
DefensiveDepth
e105bd12e6 Fix custom name 2025-12-09 09:49:27 -05:00
Josh Brower
f5688175b6 Merge pull request #15290 from Security-Onion-Solutions/idstools-refactor
match correct custom ruleset name
2025-12-08 18:25:46 -05:00
DefensiveDepth
72a4ba405f match correct custom ruleset name 2025-12-08 16:45:40 -05:00
Mike Reeves
94694d394e Add origin field to model training configuration 2025-12-08 16:36:09 -05:00
Mike Reeves
03dd746601 Add origin field to model configurations 2025-12-08 16:34:19 -05:00
Mike Reeves
eec3373ae7 Update display name for Claude Sonnet 4 2025-12-08 16:30:50 -05:00
Mike Reeves
db45ce07ed Modify model display names and remove GPT-OSS 120B
Updated display names for models and removed GPT-OSS 120B.
2025-12-08 16:26:45 -05:00
Josh Brower
ba49765312 Merge pull request #15287 from Security-Onion-Solutions/idstools-refactor
Rework ordering
2025-12-08 12:42:48 -05:00
DefensiveDepth
72c8c2371e Rework ordering 2025-12-08 12:39:30 -05:00
Josh Brower
80411ab6cf Merge pull request #15286 from Security-Onion-Solutions/idstools-refactor
be more verbose
2025-12-08 10:31:39 -05:00
DefensiveDepth
0ff8fa57e7 be more verbose 2025-12-08 10:29:24 -05:00
Josh Brower
411f28a049 Merge pull request #15284 from Security-Onion-Solutions/idstools-refactor
Make sure local salt dir is created
2025-12-07 17:49:56 -05:00
DefensiveDepth
0f42233092 Make sure local salt dir is created 2025-12-07 16:13:55 -05:00
Josh Brower
2dd49f6d9b Merge pull request #15283 from Security-Onion-Solutions/idstools-refactor
Fixup Airgap
2025-12-06 16:06:57 -05:00
DefensiveDepth
271f545f4f Fixup Airgap 2025-12-06 15:26:44 -05:00
15 changed files with 222 additions and 146 deletions

View File

@@ -33,6 +33,7 @@ body:
- 2.4.180
- 2.4.190
- 2.4.200
- 2.4.210
- Other (please provide detail below)
validations:
required: true

View File

@@ -1,17 +1,17 @@
### 2.4.190-20251024 ISO image released on 2025/10/24
### 2.4.200-20251216 ISO image released on 2025/12/16
### Download and Verify
2.4.190-20251024 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
2.4.200-20251216 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
MD5: 25358481FB876226499C011FC0710358
SHA1: 0B26173C0CE136F2CA40A15046D1DFB78BCA1165
SHA256: 4FD9F62EDA672408828B3C0C446FE5EA9FF3C4EE8488A7AB1101544A3C487872
MD5: 07B38499952D1F2FD7B5AF10096D0043
SHA1: 7F3A26839CA3CAEC2D90BB73D229D55E04C7D370
SHA256: 8D3AC735873A2EA8527E16A6A08C34BD5018CBC0925AC4096E15A0C99F591D5F
Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
Download the signature file for the ISO:
```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
```
Download the ISO image:
```
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
```
Verify the downloaded ISO image using the signature file:
```
gpg --verify securityonion-2.4.190-20251024.iso.sig securityonion-2.4.190-20251024.iso
gpg --verify securityonion-2.4.200-20251216.iso.sig securityonion-2.4.200-20251216.iso
```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
```
gpg: Signature made Thu 23 Oct 2025 07:21:46 AM EDT using RSA key ID FE507013
gpg: Signature made Mon 15 Dec 2025 05:24:11 PM EST using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner.

View File

@@ -1 +1 @@
2.4.200
2.4.0-foxtrot

View File

@@ -299,6 +299,19 @@ elasticsearch:
hot:
actions: {}
min_age: 0ms
sos-backup:
index_sorting: false
index_template:
composed_of: []
ignore_missing_component_templates: []
index_patterns:
- sos-backup-*
priority: 501
template:
settings:
index:
number_of_replicas: 0
number_of_shards: 1
so-assistant-chat:
index_sorting: false
index_template:

View File

@@ -214,7 +214,7 @@ git_config_set_safe_dirs:
surinsmrulesdir:
file.directory:
- name: /nsm/rules/suricata
- name: /nsm/rules/suricata/etopen
- user: 939
- group: 939
- makedirs: True

View File

@@ -133,7 +133,7 @@ function getinstallinfo() {
return 1
fi
source <(echo $INSTALLVARS)
export $(echo "$INSTALLVARS" | xargs)
if [ $? -ne 0 ]; then
log "ERROR" "Failed to source install variables"
return 1

View File

@@ -1113,47 +1113,47 @@ suricata_idstools_removal_pre() {
install -d -o 939 -g 939 -m 755 /opt/so/conf/soc/fingerprints
install -o 939 -g 939 -m 644 /dev/null /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
cat > /opt/so/conf/soc/fingerprints/suricataengine.syncBlock << EOF
Suricata ruleset sync is blocked until this file is removed. Make sure that you have manually added any custom Suricata rulesets via SOC config - review the documentation for more details: securityonion.net/docs
Suricata ruleset sync is blocked until this file is removed. **CRITICAL** Make sure that you have manually added any custom Suricata rulesets via SOC config before removing this file - review the documentation for more details: https://docs.securityonion.net/en/2.4/nids.html#sync-block
EOF
# Remove possible symlink & create salt local rules dir
[ -L /opt/so/saltstack/local/salt/suricata/rules ] && rm -f /opt/so/saltstack/local/salt/suricata/rules
install -d -o 939 -g 939 /opt/so/saltstack/local/salt/suricata/rules/ || echo "Failed to create Suricata local rules directory"
# Backup custom rules & overrides
mkdir -p /nsm/backup/detections-migration/2-4-200
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
cp /opt/so/conf/idstools/etc/rulecat.conf /nsm/backup/detections-migration/2-4-200
if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
python3 /opt/so/conf/soc/so-detections-backup.py
# Backup so-detection index via reindex
echo "Creating sos-backup index template..."
template_result=$(/sbin/so-elasticsearch-query '_index_template/sos-backup' -X PUT \
--retry 5 --retry-delay 15 --retry-all-errors \
-d '{"index_patterns":["sos-backup-*"],"priority":501,"template":{"settings":{"index":{"number_of_replicas":0,"number_of_shards":1}}}}')
# Verify backup by comparing counts
echo "Verifying detection overrides backup..."
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
echo " Error: Failed to query Elasticsearch for override count"
exit 1
}
if [[ -z "$template_result" ]] || ! echo "$template_result" | jq -e '.acknowledged == true' > /dev/null 2>&1; then
echo "Error: Failed to create sos-backup index template"
echo "$template_result"
exit 1
fi
if [[ ! "$es_override_count" =~ ^[0-9]+$ ]]; then
echo " Error: Invalid override count from Elasticsearch: '$es_override_count'"
exit 1
fi
BACKUP_INDEX="sos-backup-detection-$(date +%Y%m%d-%H%M%S)"
echo "Backing up so-detection index to $BACKUP_INDEX..."
reindex_result=$(/sbin/so-elasticsearch-query '_reindex?wait_for_completion=true' \
--retry 5 --retry-delay 15 --retry-all-errors \
-X POST -d "{\"source\": {\"index\": \"so-detection\"}, \"dest\": {\"index\": \"$BACKUP_INDEX\"}}")
backup_override_count=$(find /nsm/backup/detections/repo/*/overrides -type f 2>/dev/null | wc -l)
echo " Elasticsearch overrides: $es_override_count"
echo " Backed up overrides: $backup_override_count"
if [[ "$es_override_count" -gt 0 ]]; then
if [[ "$backup_override_count" -gt 0 ]]; then
echo " Override backup verified successfully"
else
echo " Error: Elasticsearch has $es_override_count overrides but backup has 0 files"
exit 1
fi
else
echo " No overrides to backup"
fi
if [[ -z "$reindex_result" ]]; then
echo "Error: Backup of detections failed - no response from Elasticsearch"
exit 1
elif echo "$reindex_result" | jq -e '.created >= 0' > /dev/null 2>&1; then
echo "Backup complete: $(echo "$reindex_result" | jq -r '.created') documents copied"
elif echo "$reindex_result" | grep -q "index_not_found_exception"; then
echo "so-detection index does not exist, skipping backup"
else
echo "SOC Detections backup script not found, skipping detection backup"
echo "Error: Backup of detections failed"
echo "$reindex_result"
exit 1
fi
}
@@ -1174,11 +1174,12 @@ hash_normalized_file() {
return 1
fi
sed -E \
# Ensure trailing newline for consistent hashing regardless of source file
{ sed -E \
-e 's/^[[:space:]]+//; s/[[:space:]]+$//' \
-e '/^$/d' \
-e 's|--url=http://[^:]+:7788|--url=http://MANAGER:7788|' \
"$file" | sha256sum | awk '{print $1}'
"$file"; echo; } | sed '/^$/d' | sha256sum | awk '{print $1}'
}
# Known-default hashes for so-rule-update (ETOPEN ruleset)
@@ -1274,6 +1275,13 @@ custom_found=0
check_config_file "$SO_RULE_UPDATE" "KNOWN_SO_RULE_UPDATE_HASHES" || custom_found=1
check_config_file "$RULECAT_CONF" "KNOWN_RULECAT_CONF_HASHES" || custom_found=1
# Check for ETPRO rules on airgap systems
if [[ $is_airgap -eq 0 ]] && grep -q 'ETPRO ' /nsm/rules/suricata/emerging-all.rules 2>/dev/null; then
echo "ETPRO rules detected on airgap system - custom configuration"
echo "ETPRO rules detected on Airgap in /nsm/rules/suricata/emerging-all.rules" >> /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
custom_found=1
fi
# If no custom configs found, remove syncBlock
if [[ $custom_found -eq 0 ]]; then
echo "idstools migration completed successfully - removing Suricata engine syncBlock"
@@ -1297,9 +1305,9 @@ if [ -n "$(docker ps -q -f name=^so-idstools$)" ]; then
fi
echo "Removing idstools symlink and scripts..."
rm /opt/so/saltstack/local/salt/suricata/rules
rm -rf /usr/sbin/so-idstools*
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
crontab -l | grep -v 'so-rule-update' | crontab -
# Backup the salt master config & manager pillar before editing it
cp /opt/so/saltstack/local/pillar/minions/$MINIONID.sls /nsm/backup/detections-migration/2-4-200/
@@ -1355,7 +1363,7 @@ unmount_update() {
update_airgap_rules() {
# Copy the rules over to update them for airgap.
rsync -a $UPDATE_DIR/agrules/suricata/* /nsm/rules/suricata/
rsync -a --delete $UPDATE_DIR/agrules/suricata/ /nsm/rules/suricata/etopen/
rsync -a $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
rsync -a $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
# Copy the securityonion-resorces repo over for SOC Detection Summaries and checkout the published summaries branch
@@ -1865,7 +1873,7 @@ main() {
if [[ $is_airgap -eq 0 ]]; then
echo ""
echo "Cleaning repos on remote Security Onion nodes."
salt -C 'not *_eval and not *_manager and not *_managersearch and not *_standalone and G@os:CentOS' cmd.run "yum clean all"
salt -C 'not *_eval and not *_manager* and not *_standalone and G@os:OEL' cmd.run "dnf clean all"
echo ""
fi
fi

View File

@@ -1622,12 +1622,11 @@ soc:
sourceType: directory
airgap:
- name: Emerging-Threats
description: "Emerging Threats ruleset - To enable ET Pro, enter your license key below. Leave empty for ET Open (free) rules."
description: "Emerging Threats ruleset - To enable ET Pro on Airgap, review the documentation at https://docs.securityonion.net/suricata"
licenseKey: ""
enabled: true
sourceType: url
sourcePath: 'https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz'
urlHash: "https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz.md5"
sourceType: directory
sourcePath: /nsm/rules/suricata/etopen/
license: "BSD"
excludeFiles:
- "*deleted*"
@@ -2653,26 +2652,16 @@ soc:
thresholdColorRatioMed: 0.75
thresholdColorRatioMax: 1
availableModels:
- id: sonnet-4
displayName: Claude Sonnet 4
contextLimitSmall: 200000
contextLimitLarge: 1000000
lowBalanceColorAlert: 500000
enabled: true
- id: sonnet-4.5
displayName: Claude Sonnet 4.5
displayName: Claude Sonnet 4.5 ($$$)
origin: USA
contextLimitSmall: 200000
contextLimitLarge: 1000000
lowBalanceColorAlert: 500000
enabled: true
- id: gptoss-120b
displayName: GPT-OSS 120B
contextLimitSmall: 128000
contextLimitLarge: 128000
lowBalanceColorAlert: 500000
enabled: true
- id: qwen-235b
displayName: QWEN 235B
displayName: QWEN 235B ($)
origin: China
contextLimitSmall: 256000
contextLimitLarge: 256000
lowBalanceColorAlert: 500000

View File

@@ -6,6 +6,7 @@
# This script queries Elasticsearch for Custom Detections and all Overrides,
# and git commits them to disk at $OUTPUT_DIR
import argparse
import os
import subprocess
import json
@@ -18,10 +19,10 @@ from datetime import datetime
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Constants
ES_URL = "https://localhost:9200/so-detection/_search"
DEFAULT_INDEX = "so-detection"
DEFAULT_OUTPUT_DIR = "/nsm/backup/detections/repo"
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
OUTPUT_DIR = "/nsm/backup/detections/repo"
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
def get_auth_credentials(auth_file):
@@ -30,9 +31,10 @@ def get_auth_credentials(auth_file):
if line.startswith('user ='):
return line.split('=', 1)[1].strip().replace('"', '')
def query_elasticsearch(query, auth):
def query_elasticsearch(query, auth, index):
url = f"https://localhost:9200/{index}/_search"
headers = {"Content-Type": "application/json"}
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
response = requests.get(url, headers=headers, data=query, auth=auth, verify=False)
response.raise_for_status()
return response.json()
@@ -47,12 +49,12 @@ def save_content(hit, base_folder, subfolder="", extension="txt"):
f.write(content)
return file_path
def save_overrides(hit):
def save_overrides(hit, output_dir):
so_detection = hit["_source"]["so_detection"]
public_id = so_detection["publicId"]
overrides = so_detection["overrides"]
language = so_detection["language"]
folder = os.path.join(OUTPUT_DIR, language, "overrides")
folder = os.path.join(output_dir, language, "overrides")
os.makedirs(folder, exist_ok=True)
extension = "yaml" if language == "sigma" else "txt"
file_path = os.path.join(folder, f"{public_id}.{extension}")
@@ -60,20 +62,20 @@ def save_overrides(hit):
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
return file_path
def ensure_git_repo():
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
def ensure_git_repo(output_dir):
if not os.path.isdir(os.path.join(output_dir, '.git')):
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
subprocess.run(["git", "-C", output_dir, "init"], check=True)
subprocess.run(["git", "-C", output_dir, "remote", "add", "origin", "default"], check=True)
def commit_changes():
ensure_git_repo()
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
def commit_changes(output_dir):
ensure_git_repo(output_dir)
subprocess.run(["git", "-C", output_dir, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", output_dir, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", output_dir, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", output_dir, "status"], capture_output=True, text=True)
print(status_result.stdout)
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
commit_result = subprocess.run(["git", "-C", output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
if commit_result.returncode == 1:
print("No changes to commit.")
elif commit_result.returncode == 0:
@@ -81,29 +83,41 @@ def commit_changes():
else:
commit_result.check_returncode()
def parse_args():
parser = argparse.ArgumentParser(description="Backup custom detections and overrides from Elasticsearch")
parser.add_argument("--output", "-o", default=DEFAULT_OUTPUT_DIR,
help=f"Output directory for backups (default: {DEFAULT_OUTPUT_DIR})")
parser.add_argument("--index", "-i", default=DEFAULT_INDEX,
help=f"Elasticsearch index to query (default: {DEFAULT_INDEX})")
return parser.parse_args()
def main():
args = parse_args()
output_dir = args.output
index = args.index
try:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
os.makedirs(OUTPUT_DIR, exist_ok=True)
print(f"Backing up Custom Detections and all Overrides to {output_dir} - {timestamp}\n")
os.makedirs(output_dir, exist_ok=True)
auth_credentials = get_auth_credentials(AUTH_FILE)
username, password = auth_credentials.split(':', 1)
auth = HTTPBasicAuth(username, password)
# Query and save custom detections
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
detections = query_elasticsearch(QUERY_DETECTIONS, auth, index)["hits"]["hits"]
for hit in detections:
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
save_content(hit, output_dir, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
# Query and save overrides
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
overrides = query_elasticsearch(QUERY_OVERRIDES, auth, index)["hits"]["hits"]
for hit in overrides:
save_overrides(hit)
commit_changes()
save_overrides(hit, output_dir)
commit_changes(output_dir)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backup Completed - {timestamp}")
except Exception as e:

View File

@@ -57,12 +57,12 @@ class TestBackupScript(unittest.TestCase):
mock_response.json.return_value = {'hits': {'hits': []}}
mock_response.raise_for_status = MagicMock()
mock_get.return_value = mock_response
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX)
self.assertEqual(response, {'hits': {'hits': []}})
mock_get.assert_called_once_with(
ds.ES_URL,
f"https://localhost:9200/{ds.DEFAULT_INDEX}/_search",
headers={"Content-Type": "application/json"},
data=ds.QUERY_DETECTIONS,
auth=self.auth,
@@ -81,7 +81,7 @@ class TestBackupScript(unittest.TestCase):
@patch('os.makedirs')
@patch('builtins.open', new_callable=mock_open)
def test_save_overrides(self, mock_file, mock_makedirs):
file_path = ds.save_overrides(self.mock_override_hit)
file_path = ds.save_overrides(self.mock_override_hit, self.output_dir)
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
self.assertEqual(file_path, expected_path)
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
@@ -90,9 +90,9 @@ class TestBackupScript(unittest.TestCase):
@patch('subprocess.run')
def test_ensure_git_repo(self, mock_run):
mock_run.return_value = MagicMock(returncode=0)
ds.ensure_git_repo()
ds.ensure_git_repo(self.output_dir)
mock_run.assert_has_calls([
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
call(["git", "-C", self.output_dir, "init"], check=True),
@@ -106,9 +106,9 @@ class TestBackupScript(unittest.TestCase):
mock_commit_result = MagicMock(returncode=1)
# Ensure sufficient number of MagicMock instances for each subprocess.run call
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
print("Running test_commit_changes...")
ds.commit_changes()
ds.commit_changes(self.output_dir)
print("Finished test_commit_changes.")
mock_run.assert_has_calls([
@@ -120,39 +120,45 @@ class TestBackupScript(unittest.TestCase):
])
@patch('builtins.print')
@patch('so-detections-backup.commit_changes')
@patch('so-detections-backup.save_overrides')
@patch('so-detections-backup.save_content')
@patch('so-detections-backup.query_elasticsearch')
@patch('so-detections-backup.get_auth_credentials')
@patch.object(ds, 'commit_changes')
@patch.object(ds, 'save_overrides')
@patch.object(ds, 'save_content')
@patch.object(ds, 'query_elasticsearch')
@patch.object(ds, 'get_auth_credentials')
@patch('os.makedirs')
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
@patch.object(ds, 'parse_args')
def test_main(self, mock_parse_args, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
mock_args = MagicMock()
mock_args.output = self.output_dir
mock_args.index = ds.DEFAULT_INDEX
mock_parse_args.return_value = mock_args
mock_get_auth.return_value = self.auth_credentials
mock_query.side_effect = [
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
]
with patch('datetime.datetime') as mock_datetime:
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
ds.main()
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
mock_query.assert_has_calls([
call(ds.QUERY_DETECTIONS, self.auth),
call(ds.QUERY_OVERRIDES, self.auth)
call(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX),
call(ds.QUERY_OVERRIDES, self.auth, ds.DEFAULT_INDEX)
])
mock_save_content.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
"yaml"
)
mock_save_overrides.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}},
self.output_dir
)
mock_commit.assert_called_once()
mock_commit.assert_called_once_with(self.output_dir)
mock_print.assert_called()
if __name__ == '__main__':

View File

@@ -70,7 +70,7 @@
{# Define the Detections custom ruleset that should always be present #}
{% set CUSTOM_RULESET = {
'name': 'custom',
'name': '__custom__',
'description': 'User-created custom rules created via the Detections module in the SOC UI',
'sourceType': 'elasticsearch',
'sourcePath': 'so_detection.ruleset:__custom__',
@@ -83,7 +83,7 @@
{# Always append the custom ruleset to suricataengine.rulesetSources if not already present #}
{% if SOCMERGED.config.server.modules.suricataengine is defined and SOCMERGED.config.server.modules.suricataengine.rulesetSources is defined %}
{% if SOCMERGED.config.server.modules.suricataengine.rulesetSources is not mapping %}
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', 'custom') | list %}
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', '__custom__') | list %}
{% if custom_names | length == 0 %}
{% do SOCMERGED.config.server.modules.suricataengine.rulesetSources.append(CUSTOM_RULESET) %}
{% endif %}
@@ -108,21 +108,39 @@
{% if ruleset.name == 'Emerging-Threats' %}
{% if ruleset.licenseKey and ruleset.licenseKey != '' %}
{# License key is defined - transform to ETPRO #}
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
'license': 'Commercial'
}) %}
{% if ruleset.sourceType == 'directory' %}
{# Airgap mode - update directory path #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': '/nsm/rules/custom-local-repos/local-etpro-suricata/etpro.rules.tar.gz',
'license': 'Commercial'
}) %}
{% else %}
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
'license': 'Commercial'
}) %}
{% endif %}
{% else %}
{# No license key - explicitly set to ETOPEN #}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
'license': 'BSD'
}) %}
{% if ruleset.sourceType == 'directory' %}
{# Airgap mode - update directory path #}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': '/nsm/rules/suricata/etopen/',
'license': 'BSD'
}) %}
{% else %}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
'license': 'BSD'
}) %}
{% endif %}
{% endif %}
{% endif %}
{% endfor %}

View File

@@ -608,6 +608,18 @@ soc:
label: Delete Unreferenced (Deletes rules that are no longer referenced by ruleset source)
forcedType: bool
required: False
- field: proxyURL
label: HTTP/HTTPS proxy URL for downloading the ruleset.
required: False
- field: proxyUsername
label: Proxy authentication username.
required: False
- field: proxyPassword
label: Proxy authentication password.
required: False
- field: proxyCACert
label: Path to CA certificate file for MITM proxy verification.
required: False
airgap: *serulesetSources
navigator:
intervalMinutes:
@@ -640,7 +652,6 @@ soc:
assistant:
apiUrl:
description: The URL of the AI gateway.
advanced: True
global: True
healthTimeoutSeconds:
description: Timeout in seconds for the Onion AI health check.
@@ -696,6 +707,9 @@ soc:
- field: displayName
label: Display Name
required: True
- field: origin
label: Country of Origin for the Model Training
required: false
- field: contextLimitSmall
label: Context Limit (Small)
forcedType: int

View File

@@ -17,14 +17,23 @@ query() {
STATS=$(query "ruleset-stats")
RELOAD=$(query "ruleset-reload-time")
[ -z "$RELOAD" ] && RELOAD='{}'
if echo "$STATS" | jq -e '.return == "OK"' > /dev/null 2>&1; then
LOADED=$(echo "$STATS" | jq -r '.message[0].rules_loaded')
FAILED=$(echo "$STATS" | jq -r '.message[0].rules_failed')
LAST_RELOAD=$(echo "$RELOAD" | jq -r '.message[0].last_reload')
# Outputs valid JSON on success, empty on failure
OUTPUT=$(jq -n \
--argjson stats "$STATS" \
--argjson reload "$RELOAD" \
'if $stats.return == "OK" and ($stats.message[0].rules_loaded | type) == "number" and ($stats.message[0].rules_failed | type) == "number" then
{
rules_loaded: $stats.message[0].rules_loaded,
rules_failed: $stats.message[0].rules_failed,
last_reload: ($reload.message[0].last_reload // ""),
return: "OK"
}
else empty end' 2>/dev/null)
jq -n --argjson loaded "$LOADED" --argjson failed "$FAILED" --arg reload "$LAST_RELOAD" \
'{rules_loaded: $loaded, rules_failed: $failed, last_reload: $reload, return: "OK"}' > "$OUTFILE"
if [ -n "$OUTPUT" ]; then
echo "$OUTPUT" > "$OUTFILE"
else
echo '{"return":"FAIL"}' > "$OUTFILE"
fi

View File

@@ -18,11 +18,15 @@ if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then
if [ -f "$STATSFILE" ] && [ $(($(date +%s) - $(stat -c %Y "$STATSFILE"))) -lt 90 ] && jq -e '.return == "OK" and .rules_loaded != null and .rules_failed != null' "$STATSFILE" > /dev/null 2>&1; then
LOADED=$(jq -r '.rules_loaded' "$STATSFILE")
FAILED=$(jq -r '.rules_failed' "$STATSFILE")
RELOAD_TIME=$(jq -r '.last_reload // ""' "$STATSFILE")
RELOAD_TIME=$(jq -r 'if .last_reload then .last_reload else "" end' "$STATSFILE")
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
if [ -n "$RELOAD_TIME" ]; then
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
else
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,status=\"ok\""
fi
else
echo "surirules loaded=0i,failed=0i,reload_time=\"\",status=\"unknown\""
echo "surirules loaded=0i,failed=0i,status=\"unknown\""
fi
fi

Binary file not shown.