mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2026-02-21 14:35:27 +01:00
Merge branch '2.4/dev' into jertel/eaconfig
This commit is contained in:
98
salt/common/tools/sbin/so-luks-tpm-regen
Normal file
98
salt/common/tools/sbin/so-luks-tpm-regen
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||||
|
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||||
|
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||||
|
# Elastic License 2.0."
|
||||||
|
|
||||||
|
set -e
|
||||||
|
# This script is intended to be used in the case the ISO install did not properly setup TPM decrypt for LUKS partitions at boot.
|
||||||
|
if [ -z $NOROOT ]; then
|
||||||
|
# Check for prerequisites
|
||||||
|
if [ "$(id -u)" -ne 0 ]; then
|
||||||
|
echo "This script must be run using sudo!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
ENROLL_TPM=N
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--enroll-tpm)
|
||||||
|
ENROLL_TPM=Y
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 [options]"
|
||||||
|
echo ""
|
||||||
|
echo "where options are:"
|
||||||
|
echo " --enroll-tpm for when TPM enrollment was not selected during ISO install."
|
||||||
|
echo ""
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
check_for_tpm() {
|
||||||
|
echo -n "Checking for TPM: "
|
||||||
|
if [ -d /sys/class/tpm/tpm0 ]; then
|
||||||
|
echo -e "tpm0 found."
|
||||||
|
TPM="yes"
|
||||||
|
# Check if TPM is using sha1 or sha256
|
||||||
|
if [ -d /sys/class/tpm/tpm0/pcr-sha1 ]; then
|
||||||
|
echo -e "TPM is using sha1.\n"
|
||||||
|
TPM_PCR="sha1"
|
||||||
|
elif [ -d /sys/class/tpm/tpm0/pcr-sha256 ]; then
|
||||||
|
echo -e "TPM is using sha256.\n"
|
||||||
|
TPM_PCR="sha256"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "No TPM found.\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
check_for_luks_partitions() {
|
||||||
|
echo "Checking for LUKS partitions"
|
||||||
|
for part in $(lsblk -o NAME,FSTYPE -ln | grep crypto_LUKS | awk '{print $1}'); do
|
||||||
|
echo "Found LUKS partition: $part"
|
||||||
|
LUKS_PARTITIONS+=("$part")
|
||||||
|
done
|
||||||
|
if [ ${#LUKS_PARTITIONS[@]} -eq 0 ]; then
|
||||||
|
echo -e "No LUKS partitions found.\n"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo ""
|
||||||
|
}
|
||||||
|
|
||||||
|
enroll_tpm_in_luks() {
|
||||||
|
read -s -p "Enter the LUKS passphrase used during ISO install: " LUKS_PASSPHRASE
|
||||||
|
echo ""
|
||||||
|
for part in "${LUKS_PARTITIONS[@]}"; do
|
||||||
|
echo "Enrolling TPM for LUKS device: /dev/$part"
|
||||||
|
if [ "$TPM_PCR" == "sha1" ]; then
|
||||||
|
clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha1","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE
|
||||||
|
elif [ "$TPM_PCR" == "sha256" ]; then
|
||||||
|
clevis luks bind -d /dev/$part tpm2 '{"pcr_bank":"sha256","pcr_ids":"7"}' <<< $LUKS_PASSPHRASE
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
regenerate_tpm_enrollment_token() {
|
||||||
|
for part in "${LUKS_PARTITIONS[@]}"; do
|
||||||
|
clevis luks regen -d /dev/$part -s 1 -q
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
check_for_tpm
|
||||||
|
check_for_luks_partitions
|
||||||
|
|
||||||
|
if [[ $ENROLL_TPM == "Y" ]]; then
|
||||||
|
enroll_tpm_in_luks
|
||||||
|
else
|
||||||
|
regenerate_tpm_enrollment_token
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Running dracut"
|
||||||
|
dracut -fv
|
||||||
|
echo -e "\nTPM configuration complete. Reboot the system to verify the TPM is correctly decrypting the LUKS partition(s) at boot.\n"
|
||||||
@@ -1305,6 +1305,7 @@ soc:
|
|||||||
reposFolder: /opt/sensoroni/sigma/repos
|
reposFolder: /opt/sensoroni/sigma/repos
|
||||||
rulesFingerprintFile: /opt/sensoroni/fingerprints/sigma.fingerprint
|
rulesFingerprintFile: /opt/sensoroni/fingerprints/sigma.fingerprint
|
||||||
stateFilePath: /opt/sensoroni/fingerprints/elastalertengine.state
|
stateFilePath: /opt/sensoroni/fingerprints/elastalertengine.state
|
||||||
|
integrityCheckFrequencySeconds: 600
|
||||||
rulesRepos:
|
rulesRepos:
|
||||||
default:
|
default:
|
||||||
- repo: https://github.com/Security-Onion-Solutions/securityonion-resources
|
- repo: https://github.com/Security-Onion-Solutions/securityonion-resources
|
||||||
@@ -1383,6 +1384,7 @@ soc:
|
|||||||
community: true
|
community: true
|
||||||
yaraRulesFolder: /opt/sensoroni/yara/rules
|
yaraRulesFolder: /opt/sensoroni/yara/rules
|
||||||
stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state
|
stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state
|
||||||
|
integrityCheckFrequencySeconds: 600
|
||||||
suricataengine:
|
suricataengine:
|
||||||
allowRegex: ''
|
allowRegex: ''
|
||||||
autoUpdateEnabled: true
|
autoUpdateEnabled: true
|
||||||
@@ -1393,6 +1395,7 @@ soc:
|
|||||||
denyRegex: ''
|
denyRegex: ''
|
||||||
rulesFingerprintFile: /opt/sensoroni/fingerprints/emerging-all.fingerprint
|
rulesFingerprintFile: /opt/sensoroni/fingerprints/emerging-all.fingerprint
|
||||||
stateFilePath: /opt/sensoroni/fingerprints/suricataengine.state
|
stateFilePath: /opt/sensoroni/fingerprints/suricataengine.state
|
||||||
|
integrityCheckFrequencySeconds: 600
|
||||||
client:
|
client:
|
||||||
enableReverseLookup: false
|
enableReverseLookup: false
|
||||||
docsUrl: /docs/
|
docsUrl: /docs/
|
||||||
|
|||||||
@@ -46,6 +46,7 @@ so-soc:
|
|||||||
- /opt/so/saltstack:/opt/so/saltstack:rw
|
- /opt/so/saltstack:/opt/so/saltstack:rw
|
||||||
- /opt/so/conf/soc/migrations:/opt/so/conf/soc/migrations:rw
|
- /opt/so/conf/soc/migrations:/opt/so/conf/soc/migrations:rw
|
||||||
- /nsm/backup/detections-migration:/nsm/backup/detections-migration:ro
|
- /nsm/backup/detections-migration:/nsm/backup/detections-migration:ro
|
||||||
|
- /opt/so/state:/opt/so/state:rw
|
||||||
- extra_hosts:
|
- extra_hosts:
|
||||||
{% for node in DOCKER_EXTRA_HOSTS %}
|
{% for node in DOCKER_EXTRA_HOSTS %}
|
||||||
{% for hostname, ip in node.items() %}
|
{% for hostname, ip in node.items() %}
|
||||||
|
|||||||
@@ -113,6 +113,10 @@ soc:
|
|||||||
global: True
|
global: True
|
||||||
advanced: True
|
advanced: True
|
||||||
helpLink: sigma.html
|
helpLink: sigma.html
|
||||||
|
integrityCheckFrequencySeconds:
|
||||||
|
description: 'How often the ElastAlert integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
rulesRepos:
|
rulesRepos:
|
||||||
default: &eerulesRepos
|
default: &eerulesRepos
|
||||||
description: "Custom Git repos to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
description: "Custom Git repos to pull Sigma rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
||||||
@@ -211,6 +215,10 @@ soc:
|
|||||||
global: True
|
global: True
|
||||||
advanced: True
|
advanced: True
|
||||||
helpLink: yara.html
|
helpLink: yara.html
|
||||||
|
integrityCheckFrequencySeconds:
|
||||||
|
description: 'How often the Strelka integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
rulesRepos:
|
rulesRepos:
|
||||||
default: &serulesRepos
|
default: &serulesRepos
|
||||||
description: "Custom Git repos to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
description: "Custom Git repos to pull YARA rules from. 'license' field is required, 'folder' is optional. 'community' disables some management options for the imported rules - they can't be deleted or edited, just tuned, duplicated and Enabled | Disabled."
|
||||||
@@ -235,6 +243,10 @@ soc:
|
|||||||
global: True
|
global: True
|
||||||
advanced: True
|
advanced: True
|
||||||
helpLink: suricata.html
|
helpLink: suricata.html
|
||||||
|
integrityCheckFrequencySeconds:
|
||||||
|
description: 'How often the Suricata integrity checker runs (in seconds). This verifies the integrity of deployed rules.'
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
client:
|
client:
|
||||||
enableReverseLookup:
|
enableReverseLookup:
|
||||||
description: Set to true to enable reverse DNS lookups for IP addresses in the SOC UI.
|
description: Set to true to enable reverse DNS lookups for IP addresses in the SOC UI.
|
||||||
|
|||||||
@@ -3,10 +3,13 @@
|
|||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||||
# Elastic License 2.0.
|
# Elastic License 2.0.
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import glob
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import yara
|
import yara
|
||||||
import glob
|
from datetime import datetime
|
||||||
import json
|
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
def check_syntax(rule_file):
|
def check_syntax(rule_file):
|
||||||
@@ -31,13 +34,18 @@ def compile_yara_rules(rules_dir):
|
|||||||
results = executor.map(check_syntax, rule_files)
|
results = executor.map(check_syntax, rule_files)
|
||||||
|
|
||||||
# Collect yara files and prepare for batch compilation
|
# Collect yara files and prepare for batch compilation
|
||||||
|
ts = str(datetime.utcnow().isoformat())
|
||||||
|
failure_ids = []
|
||||||
|
success_ids = []
|
||||||
for success, rule_file, error_message in results:
|
for success, rule_file, error_message in results:
|
||||||
|
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
|
||||||
if success:
|
if success:
|
||||||
files_to_compile[os.path.basename(rule_file)] = rule_file
|
files_to_compile[os.path.basename(rule_file)] = rule_file
|
||||||
success_count += 1
|
success_count += 1
|
||||||
|
success_ids.append(rule_id)
|
||||||
else:
|
else:
|
||||||
|
failure_ids.append(rule_id)
|
||||||
# Extract just the UUID from the rule file name
|
# Extract just the UUID from the rule file name
|
||||||
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
|
|
||||||
log_entry = {
|
log_entry = {
|
||||||
"event_module": "soc",
|
"event_module": "soc",
|
||||||
"event_dataset": "soc.detections",
|
"event_dataset": "soc.detections",
|
||||||
@@ -55,16 +63,37 @@ def compile_yara_rules(rules_dir):
|
|||||||
removed_count += 1
|
removed_count += 1
|
||||||
|
|
||||||
# Compile all remaining valid rules into a single file
|
# Compile all remaining valid rules into a single file
|
||||||
|
compiled_sha256=""
|
||||||
if files_to_compile:
|
if files_to_compile:
|
||||||
compiled_rules = yara.compile(filepaths=files_to_compile)
|
compiled_rules = yara.compile(filepaths=files_to_compile)
|
||||||
compiled_rules.save(compiled_rules_path)
|
compiled_rules.save(compiled_rules_path)
|
||||||
print(f"All remaining rules compiled and saved into {compiled_rules_path}")
|
print(f"All remaining rules compiled and saved into {compiled_rules_path}")
|
||||||
|
# Hash file
|
||||||
|
with open(compiled_rules_path, 'rb') as hash_file:
|
||||||
|
compiled_sha256=hashlib.sha256(hash_file.read()).hexdigest()
|
||||||
# Remove the rules.compiled if there aren't any files to be compiled
|
# Remove the rules.compiled if there aren't any files to be compiled
|
||||||
else:
|
else:
|
||||||
if os.path.exists(compiled_rules_path):
|
if os.path.exists(compiled_rules_path):
|
||||||
os.remove(compiled_rules_path)
|
os.remove(compiled_rules_path)
|
||||||
|
|
||||||
|
# Create compilation report
|
||||||
|
compilation_report = {
|
||||||
|
"timestamp": ts,
|
||||||
|
"compiled_sha256": compiled_sha256,
|
||||||
|
"failure": failure_ids,
|
||||||
|
"success": success_ids
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write total
|
||||||
|
with open('/opt/so/state/detections_yara_compilation-total.log', 'w+') as report_file:
|
||||||
|
json.dump(compilation_report, report_file)
|
||||||
|
|
||||||
# Print summary of compilation results
|
# Print summary of compilation results
|
||||||
print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.")
|
print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.")
|
||||||
|
|
||||||
compile_yara_rules("/opt/sensoroni/yara/rules/")
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description="Compile YARA rules from the specified directory")
|
||||||
|
parser.add_argument("rules_dir", help="Directory containing YARA rules to compile")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
compile_yara_rules(args.rules_dir)
|
||||||
|
|||||||
Reference in New Issue
Block a user