mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 01:02:46 +01:00
Merge branch '2.4/dev' into jertel/eaconfig
This commit is contained in:
98
salt/common/tools/sbin/so-luks-tpm-regen
Normal file
98
salt/common/tools/sbin/so-luks-tpm-regen
Normal file
@@ -0,0 +1,98 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0."
|
||||
|
||||
set -e
|
||||
# This script is intended to be used in the case the ISO install did not properly setup TPM decrypt for LUKS partitions at boot.
|
||||
if [ -z $NOROOT ]; then
|
||||
# Check for prerequisites
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo "This script must be run using sudo!"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
ENROLL_TPM=N
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--enroll-tpm)
|
||||
ENROLL_TPM=Y
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 [options]"
|
||||
echo ""
|
||||
echo "where options are:"
|
||||
echo " --enroll-tpm for when TPM enrollment was not selected during ISO install."
|
||||
echo ""
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
check_for_tpm() {
|
||||
echo -n "Checking for TPM: "
|
||||
if [ -d /sys/class/tpm/tpm0 ]; then
|
||||
echo -e "tpm0 found."
|
||||
TPM="yes"
|
||||
# Check if TPM is using sha1 or sha256
|
||||
if [ -d /sys/class/tpm/tpm0/pcr-sha1 ]; then
|
||||
echo -e "TPM is using sha1.\n"
|
||||
TPM_PCR="sha1"
|
||||
elif [ -d /sys/class/tpm/tpm0/pcr-sha256 ]; then
|
||||
echo -e "TPM is using sha256.\n"
|
||||
TPM_PCR="sha256"
|
||||
fi
|
||||
else
|
||||
echo -e "No TPM found.\n"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_for_luks_partitions() {
|
||||
echo "Checking for LUKS partitions"
|
||||
for part in $(lsblk -o NAME,FSTYPE -ln | grep crypto_LUKS | awk '{print $1}'); do
|
||||
echo "Found LUKS partition: $part"
|
||||
LUKS_PARTITIONS+=("$part")
|
||||
done
|
||||
if [ ${#LUKS_PARTITIONS[@]} -eq 0 ]; then
|
||||
echo -e "No LUKS partitions found.\n"
|
||||
exit 1
|
||||
fi
|
||||
echo ""
|
||||
}
|
||||
|
||||
enroll_tpm_in_luks() {
|
||||
read -s -p "Enter the LUKS passphrase used during ISO install: " LUKS_PASSPHRASE
|
||||
echo ""
|
||||
for part in "${LUKS_PARTITIONS[@]}"; do
|
||||
echo "Enrolling TPM for LUKS device: /dev/$part"
|
||||
if [ "$TPM_PCR" == "sha1" ]; then
|
||||
clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha1","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE
|
||||
elif [ "$TPM_PCR" == "sha256" ]; then
|
||||
clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha256","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
regenerate_tpm_enrollment_token() {
|
||||
for part in "${LUKS_PARTITIONS[@]}"; do
|
||||
clevis luks regen -d /dev/$part -s 1 -q
|
||||
done
|
||||
}
|
||||
|
||||
check_for_tpm
|
||||
check_for_luks_partitions
|
||||
|
||||
if [[ $ENROLL_TPM == "Y" ]]; then
|
||||
enroll_tpm_in_luks
|
||||
else
|
||||
regenerate_tpm_enrollment_token
|
||||
fi
|
||||
|
||||
echo "Running dracut"
|
||||
dracut -fv
|
||||
echo -e "\nTPM configuration complete. Reboot the system to verify the TPM is correctly decrypting the LUKS partition(s) at boot.\n"
|
||||
@@ -1305,6 +1305,7 @@ soc:
|
||||
reposFolder: /opt/sensoroni/sigma/repos
|
||||
rulesFingerprintFile: /opt/sensoroni/fingerprints/sigma.fingerprint
|
||||
stateFilePath: /opt/sensoroni/fingerprints/elastalertengine.state
|
||||
integrityCheckFrequencySeconds: 600
|
||||
rulesRepos:
|
||||
default:
|
||||
- repo: https://github.com/Security-Onion-Solutions/securityonion-resources
|
||||
@@ -1383,6 +1384,7 @@ soc:
|
||||
community: true
|
||||
yaraRulesFolder: /opt/sensoroni/yara/rules
|
||||
stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state
|
||||
integrityCheckFrequencySeconds: 600
|
||||
suricataengine:
|
||||
allowRegex: ''
|
||||
autoUpdateEnabled: true
|
||||
@@ -1393,6 +1395,7 @@ soc:
|
||||
denyRegex: ''
|
||||
rulesFingerprintFile: /opt/sensoroni/fingerprints/emerging-all.fingerprint
|
||||
stateFilePath: /opt/sensoroni/fingerprints/suricataengine.state
|
||||
integrityCheckFrequencySeconds: 600
|
||||
client:
|
||||
enableReverseLookup: false
|
||||
docsUrl: /docs/
|
||||
|
||||
@@ -46,6 +46,7 @@ so-soc:
|
||||
- /opt/so/saltstack:/opt/so/saltstack:rw
|
||||
- /opt/so/conf/soc/migrations:/opt/so/conf/soc/migrations:rw
|
||||
- /nsm/backup/detections-migration:/nsm/backup/detections-migration:ro
|
||||
- /opt/so/state:/opt/so/state:rw
|
||||
- extra_hosts:
|
||||
{% for node in DOCKER_EXTRA_HOSTS %}
|
||||
{% for hostname, ip in node.items() %}
|
||||
|
||||
@@ -113,6 +113,10 @@ soc:
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: sigma.html
|
||||
integrityCheckFrequencySeconds:
|
||||
description: 'How often the ElastAlert integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||
global: True
|
||||
advanced: True
|
||||
rulesRepos:
|
||||
default: &eerulesRepos
|
||||
description: "Custom Git repos to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
||||
@@ -211,6 +215,10 @@ soc:
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: yara.html
|
||||
integrityCheckFrequencySeconds:
|
||||
description: 'How often the Strelka integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||
global: True
|
||||
advanced: True
|
||||
rulesRepos:
|
||||
default: &serulesRepos
|
||||
description: "Custom Git repos to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
||||
@@ -235,6 +243,10 @@ soc:
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: suricata.html
|
||||
integrityCheckFrequencySeconds:
|
||||
description: 'How often the Suricata integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||
global: True
|
||||
advanced: True
|
||||
client:
|
||||
enableReverseLookup:
|
||||
description: Set to true to enable reverse DNS lookups for IP addresses in the SOC UI.
|
||||
|
||||
@@ -3,10 +3,13 @@
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
import argparse
|
||||
import glob
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import yara
|
||||
import glob
|
||||
import json
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
|
||||
def check_syntax(rule_file):
|
||||
@@ -25,19 +28,24 @@ def compile_yara_rules(rules_dir):
|
||||
files_to_compile = {}
|
||||
removed_count = 0
|
||||
success_count = 0
|
||||
|
||||
|
||||
# Use ThreadPoolExecutor to parallelize syntax checks
|
||||
with ThreadPoolExecutor() as executor:
|
||||
results = executor.map(check_syntax, rule_files)
|
||||
|
||||
|
||||
# Collect yara files and prepare for batch compilation
|
||||
ts = str(datetime.utcnow().isoformat())
|
||||
failure_ids = []
|
||||
success_ids = []
|
||||
for success, rule_file, error_message in results:
|
||||
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
|
||||
if success:
|
||||
files_to_compile[os.path.basename(rule_file)] = rule_file
|
||||
success_count += 1
|
||||
success_ids.append(rule_id)
|
||||
else:
|
||||
failure_ids.append(rule_id)
|
||||
# Extract just the UUID from the rule file name
|
||||
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
|
||||
log_entry = {
|
||||
"event_module": "soc",
|
||||
"event_dataset": "soc.detections",
|
||||
@@ -55,16 +63,37 @@ def compile_yara_rules(rules_dir):
|
||||
removed_count += 1
|
||||
|
||||
# Compile all remaining valid rules into a single file
|
||||
compiled_sha256=""
|
||||
if files_to_compile:
|
||||
compiled_rules = yara.compile(filepaths=files_to_compile)
|
||||
compiled_rules.save(compiled_rules_path)
|
||||
print(f"All remaining rules compiled and saved into {compiled_rules_path}")
|
||||
# Hash file
|
||||
with open(compiled_rules_path, 'rb') as hash_file:
|
||||
compiled_sha256=hashlib.sha256(hash_file.read()).hexdigest()
|
||||
# Remove the rules.compiled if there aren't any files to be compiled
|
||||
else:
|
||||
if os.path.exists(compiled_rules_path):
|
||||
os.remove(compiled_rules_path)
|
||||
|
||||
# Create compilation report
|
||||
compilation_report = {
|
||||
"timestamp": ts,
|
||||
"compiled_sha256": compiled_sha256,
|
||||
"failure": failure_ids,
|
||||
"success": success_ids
|
||||
}
|
||||
|
||||
# Write total
|
||||
with open('/opt/so/state/detections_yara_compilation-total.log', 'w+') as report_file:
|
||||
json.dump(compilation_report, report_file)
|
||||
|
||||
# Print summary of compilation results
|
||||
print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.")
|
||||
|
||||
compile_yara_rules("/opt/sensoroni/yara/rules/")
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Compile YARA rules from the specified directory")
|
||||
parser.add_argument("rules_dir", help="Directory containing YARA rules to compile")
|
||||
args = parser.parse_args()
|
||||
|
||||
compile_yara_rules(args.rules_dir)
|
||||
|
||||
Reference in New Issue
Block a user