Create compile report for SOC integrity check

This commit is contained in:
weslambert
2024-05-17 16:10:10 -04:00
committed by GitHub
parent 300d8436a8
commit d9edff38df

View File

@@ -3,10 +3,13 @@
# https://securityonion.net/license; you may not use this file except in compliance with the # https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0. # Elastic License 2.0.
import argparse
import glob
import hashlib
import json
import os import os
import yara import yara
import glob from datetime import datetime
import json
from concurrent.futures import ThreadPoolExecutor from concurrent.futures import ThreadPoolExecutor
def check_syntax(rule_file): def check_syntax(rule_file):
@@ -25,19 +28,24 @@ def compile_yara_rules(rules_dir):
files_to_compile = {} files_to_compile = {}
removed_count = 0 removed_count = 0
success_count = 0 success_count = 0
# Use ThreadPoolExecutor to parallelize syntax checks # Use ThreadPoolExecutor to parallelize syntax checks
with ThreadPoolExecutor() as executor: with ThreadPoolExecutor() as executor:
results = executor.map(check_syntax, rule_files) results = executor.map(check_syntax, rule_files)
# Collect yara files and prepare for batch compilation # Collect yara files and prepare for batch compilation
ts = str(datetime.utcnow().isoformat())
failure_ids = []
success_ids = []
for success, rule_file, error_message in results: for success, rule_file, error_message in results:
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
if success: if success:
files_to_compile[os.path.basename(rule_file)] = rule_file files_to_compile[os.path.basename(rule_file)] = rule_file
success_count += 1 success_count += 1
success_ids.append(rule_id)
else: else:
failure_ids.append(rule_id)
# Extract just the UUID from the rule file name # Extract just the UUID from the rule file name
rule_id = os.path.splitext(os.path.basename(rule_file))[0]
log_entry = { log_entry = {
"event_module": "soc", "event_module": "soc",
"event_dataset": "soc.detections", "event_dataset": "soc.detections",
@@ -55,16 +63,37 @@ def compile_yara_rules(rules_dir):
removed_count += 1 removed_count += 1
# Compile all remaining valid rules into a single file # Compile all remaining valid rules into a single file
compiled_sha256=""
if files_to_compile: if files_to_compile:
compiled_rules = yara.compile(filepaths=files_to_compile) compiled_rules = yara.compile(filepaths=files_to_compile)
compiled_rules.save(compiled_rules_path) compiled_rules.save(compiled_rules_path)
print(f"All remaining rules compiled and saved into {compiled_rules_path}") print(f"All remaining rules compiled and saved into {compiled_rules_path}")
# Hash file
with open(compiled_rules_path, 'rb') as hash_file:
compiled_sha256=hashlib.sha256(hash_file.read()).hexdigest()
# Remove the rules.compiled if there aren't any files to be compiled # Remove the rules.compiled if there aren't any files to be compiled
else: else:
if os.path.exists(compiled_rules_path): if os.path.exists(compiled_rules_path):
os.remove(compiled_rules_path) os.remove(compiled_rules_path)
# Create compilation report
compilation_report = {
"timestamp": ts,
"compiled_sha256": compiled_sha256,
"failure": failure_ids,
"success": success_ids
}
# Write total
with open('/opt/sensoroni/logs/detections_yara_compilation-total.log', 'w+') as report_file:
json.dump(compilation_report, report_file)
# Print summary of compilation results # Print summary of compilation results
print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.") print(f"Summary: {success_count} rules compiled successfully, {removed_count} rules removed due to errors.")
compile_yara_rules("/opt/sensoroni/yara/rules/") if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Compile YARA rules from the specified directory")
parser.add_argument("rules_dir", help="Directory containing YARA rules to compile")
args = parser.parse_args()
compile_yara_rules(args.rules_dir)