mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 01:02:46 +01:00
Initial commit
This commit is contained in:
@@ -73,6 +73,48 @@ eapackageupgrade:
|
||||
- template: jinja
|
||||
|
||||
{% if GLOBALS.role != "so-fleet" %}
|
||||
|
||||
{% if not GLOBALS.airgap %}
|
||||
soresourcesrepoclone:
|
||||
git.latest:
|
||||
- name: https://github.com/Security-Onion-Solutions/securityonion-resources.git
|
||||
- target: /opt/so/saltstack/local/salt/elasticfleet/files/securityonion-resources
|
||||
- rev: 'dev/defend-filters'
|
||||
- depth: 1
|
||||
- force_fetch: true
|
||||
- force_clone: true
|
||||
{% endif %}
|
||||
|
||||
elasticdefendconfdir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets
|
||||
- user: 947
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
elasticdefendrulesets:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets
|
||||
- source: salt://elasticfleet/files/securityonion-resources/event-filters/
|
||||
- user: 947
|
||||
- group: 939
|
||||
|
||||
elasticdefenddisabled:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml
|
||||
- source: salt://elasticfleet/files/soc/elastic-defend-disabled-filters.yaml
|
||||
- user: 947
|
||||
- group: 939
|
||||
- mode: 600
|
||||
|
||||
elasticdefendcustom:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters-raw
|
||||
- source: salt://elasticfleet/files/soc/elastic-defend-custom-filters.yaml
|
||||
- user: 947
|
||||
- group: 939
|
||||
- mode: 600
|
||||
|
||||
eaintegrationsdir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/elastic-fleet/integrations
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
title: 'Template 1'
|
||||
id: '2B160E43-3B95-4B50-AA3F-25D99F51FA69'
|
||||
description: 'Credit @ITProPaul'
|
||||
references: 'https://github.com/Neo23x0/sysmon-config'
|
||||
author: 'SOS'
|
||||
date: '09/18/24'
|
||||
event_type: 'NetworkConnect'
|
||||
filter_type: 'exclude'
|
||||
filter:
|
||||
selection_1:
|
||||
TargetField: 'DestinationIp'
|
||||
Condition: 'begin with'
|
||||
Pattern: 'fe80:0:0:0'
|
||||
---
|
||||
title: 'Template2'
|
||||
id: '3B160E43-3B95-4B50-AA3F-25D99F51FA69'
|
||||
description: 'Credit @ITProPaul'
|
||||
references: 'https://github.com/Neo23x0/sysmon-config'
|
||||
author: 'SOS'
|
||||
date: '09/18/24'
|
||||
event_type: 'NetworkConnect'
|
||||
filter_type: 'exclude'
|
||||
filter:
|
||||
selection_1:
|
||||
TargetField: 'DestinationIp'
|
||||
Condition: 'begin with'
|
||||
Pattern: 'fe80:0:0:0'
|
||||
@@ -0,0 +1,3 @@
|
||||
'9EDAA51C-BB12-49D9-8748-2B61371F2E7D':
|
||||
Date: '10/10/2024'
|
||||
Notes: 'Example Disabled Filter - Leave this entry here, just copy and paste as needed.'
|
||||
@@ -9,6 +9,24 @@ elasticfleet:
|
||||
global: True
|
||||
forcedType: bool
|
||||
helpLink: elastic-fleet.html
|
||||
files:
|
||||
soc:
|
||||
elastic-defend-disabled-filters__yaml:
|
||||
title: Disabled Elastic Defend filters
|
||||
description: Enter the ID of the filter that should be disabled.
|
||||
syntax: yaml
|
||||
file: True
|
||||
global: True
|
||||
helpLink: elastic-fleet.html
|
||||
advanced: True
|
||||
elastic-defend-custom-filters__yaml:
|
||||
title: Custom Elastic Defend filters
|
||||
description: Enter custom filters.
|
||||
syntax: yaml
|
||||
file: True
|
||||
global: True
|
||||
helpLink: elastic-fleet.html
|
||||
advanced: True
|
||||
logging:
|
||||
zeek:
|
||||
excluded:
|
||||
@@ -16,6 +34,12 @@ elasticfleet:
|
||||
forcedType: "[]string"
|
||||
helpLink: zeek.html
|
||||
config:
|
||||
defend_filters:
|
||||
enable_auto_configuration:
|
||||
description: Enable auto-configuration and management of the Elastic Defend Exclusion filters.
|
||||
global: True
|
||||
helpLink: elastic-fleet.html
|
||||
advanced: True
|
||||
server:
|
||||
custom_fqdn:
|
||||
description: Custom FQDN for Agents to connect to. One per line.
|
||||
|
||||
205
salt/elasticfleet/tools/sbin/so-elastic-defend-manage-filters.py
Normal file
205
salt/elasticfleet/tools/sbin/so-elastic-defend-manage-filters.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import yaml
|
||||
import sys
|
||||
import getopt
|
||||
from so_elastic_defend_filters_helper import *
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
|
||||
# Define mappings for conditional_field, event_type, and conditions
|
||||
FIELD_MAPPINGS = {
|
||||
"Image": "process.executable",
|
||||
"ParentImage": "process.parent.executable",
|
||||
"CommandLine": "process.command_line",
|
||||
"ParentCommandLine": "process.parent.command_line",
|
||||
"QueryName": "dns.question.name",
|
||||
"DestinationIp": "destination.ip",
|
||||
"TargetObject": "registry.path"
|
||||
}
|
||||
|
||||
DATASET_MAPPINGS = {
|
||||
"ProcessCreate": "endpoint.events.process",
|
||||
"NetworkConnect": "endpoint.events.network",
|
||||
"FileCreate": "endpoint.events.file",
|
||||
"RegistryEvent": "endpoint.events.registry",
|
||||
"DnsQuery": "endpoint.events.network"
|
||||
}
|
||||
|
||||
CONDITION_MAPPINGS = {
|
||||
"is": ("included", "match"),
|
||||
"end with": ("included", "wildcard"),
|
||||
"begin with": ("included", "wildcard"),
|
||||
"contains": ("included", "wildcard"),
|
||||
"contains any": ("included", "match_any"),
|
||||
"excluded": ("excluded", "match")
|
||||
}
|
||||
|
||||
# Extract entries for a rule
|
||||
def extract_entries(data):
|
||||
entries = []
|
||||
filter_data = data.get('filter', {})
|
||||
for value in filter_data.values():
|
||||
target_field = FIELD_MAPPINGS.get(value.get('TargetField', ''))
|
||||
condition = value.get('Condition', '')
|
||||
pattern = value.get('Pattern', '')
|
||||
|
||||
operator, match_type = CONDITION_MAPPINGS.get(condition, ("included", "match"))
|
||||
|
||||
entries.append({
|
||||
"field": target_field,
|
||||
"operator": operator,
|
||||
"type": match_type,
|
||||
"value": pattern
|
||||
})
|
||||
return entries
|
||||
|
||||
# Build the JSON entry
|
||||
def build_json_entry(data, entries, guid, event_type, dataset, context):
|
||||
return {
|
||||
"comments": [],
|
||||
"entries": entries,
|
||||
"item_id": guid,
|
||||
"name": f"SO - {event_type} - {guid}",
|
||||
"description": f"{context}\n\n <<- Note: This filter is managed by Security Onion. ->>",
|
||||
"namespace_type": "agnostic",
|
||||
"tags": ["policy:all"],
|
||||
"type": "simple",
|
||||
"os_types": ["windows"],
|
||||
"entries": entries
|
||||
}
|
||||
|
||||
# Check to see if the rule is disabled
|
||||
# If it is, make sure it is not active
|
||||
def disable_check(guid, disabled_rules, username, password):
|
||||
if guid in disabled_rules:
|
||||
logging.info(f"Rule {guid} is in the disabled rules list, confirming that is is actually disabled...")
|
||||
existing_rule = api_request("GET", guid, username, password)
|
||||
|
||||
if existing_rule:
|
||||
if api_request("DELETE", guid, username, password):
|
||||
logging.info(f"Successfully deleted rule {guid}")
|
||||
return True, "deleted"
|
||||
else:
|
||||
logging.error(f"Error deleting rule {guid}.")
|
||||
return True, "Error deleting"
|
||||
return True, "NOP"
|
||||
return False, None
|
||||
|
||||
def process_rule_update_or_create(guid, json_entry, username, password):
|
||||
existing_rule = api_request("GET", guid, username, password)
|
||||
|
||||
if existing_rule:
|
||||
existing_rule_data = extract_relevant_fields(existing_rule)
|
||||
new_rule_data = extract_relevant_fields(json_entry)
|
||||
if generate_hash(existing_rule_data) != generate_hash(new_rule_data):
|
||||
logging.info(f"Updating rule {guid}")
|
||||
json_entry.pop("list_id", None)
|
||||
api_request("PUT", guid, username, password, json_data=json_entry)
|
||||
return "updated"
|
||||
logging.info(f"Rule {guid} is up to date.")
|
||||
return "no_change"
|
||||
else:
|
||||
logging.info(f"Creating new rule {guid}")
|
||||
json_entry["list_id"] = "endpoint_event_filters"
|
||||
api_request("POST", guid, username, password, json_data=json_entry)
|
||||
return "new"
|
||||
|
||||
# Main function for processing rules
|
||||
def process_rules(yaml_files, disabled_rules, username, password):
|
||||
stats = {"rule_count": 0, "new": 0, "updated": 0, "no_change": 0, "disabled": 0, "deleted": 0}
|
||||
for data in yaml_files:
|
||||
logging.info(f"Processing rule: {data.get('id', '')}")
|
||||
event_type = data.get('event_type', '')
|
||||
guid = data.get('id', '')
|
||||
dataset = DATASET_MAPPINGS.get(event_type, '')
|
||||
context = data.get('description', '')
|
||||
|
||||
rule_deleted, state = disable_check(guid, disabled_rules, username, password)
|
||||
if rule_deleted:
|
||||
stats["disabled"] += 1
|
||||
if state == "deleted":
|
||||
stats["deleted"] += 1
|
||||
continue
|
||||
|
||||
# Extract entries and build JSON
|
||||
entries = extract_entries(data)
|
||||
json_entry = build_json_entry(data, entries, guid, event_type, dataset, context)
|
||||
|
||||
# Process rule creation or update
|
||||
status = process_rule_update_or_create(guid, json_entry, username, password)
|
||||
|
||||
stats[status] += 1
|
||||
stats["rule_count"] += 1
|
||||
return stats
|
||||
|
||||
def parse_args(argv):
|
||||
try:
|
||||
opts, args = getopt.getopt(argv, "i:d:c:f:", ["input=", "disabled=", "credentials=", "flags_file="])
|
||||
except getopt.GetoptError:
|
||||
print("Usage: python so-elastic-defend-manage-filters.py -c <credentials_file> -d <disabled_file> -i <folder_of_yaml_files> [-f <flags_file>]")
|
||||
sys.exit(2)
|
||||
return opts
|
||||
|
||||
def load_flags(file_path):
|
||||
with open(file_path, 'r') as flags_file:
|
||||
return flags_file.read().splitlines()
|
||||
|
||||
def validate_inputs(credentials_file, disabled_file, yaml_directories):
|
||||
if not credentials_file or not disabled_file or not yaml_directories:
|
||||
print("Usage: python so-elastic-defend-manage-filters.py -c <credentials_file> -d <disabled_file> -i <folder_of_yaml_files> [-f <flags_file>]")
|
||||
sys.exit(2)
|
||||
|
||||
def main(argv):
|
||||
credentials_file = ""
|
||||
disabled_file = ""
|
||||
yaml_directories = []
|
||||
|
||||
opts = parse_args(argv)
|
||||
|
||||
for opt, arg in opts:
|
||||
if opt in ("-c", "--credentials"):
|
||||
credentials_file = arg
|
||||
elif opt in ("-d", "--disabled"):
|
||||
disabled_file = arg
|
||||
elif opt in ("-i", "--input"):
|
||||
yaml_directories.append(arg)
|
||||
elif opt in ("-f", "--flags_file"):
|
||||
flags = load_flags(arg)
|
||||
return main(argv + flags)
|
||||
|
||||
validate_inputs(credentials_file, disabled_file, yaml_directories)
|
||||
|
||||
credentials = load_credentials(credentials_file)
|
||||
if not credentials:
|
||||
raise Exception("Failed to load credentials")
|
||||
|
||||
username, password = extract_auth_details(credentials)
|
||||
if not username or not password:
|
||||
raise Exception("Invalid credentials format")
|
||||
|
||||
custom_rules_input = '/opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters-raw'
|
||||
custom_rules_output = '/opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters'
|
||||
prepare_custom_rules(custom_rules_input, custom_rules_output)
|
||||
disabled_rules = load_disabled(disabled_file)
|
||||
|
||||
total_stats = {"rule_count": 0, "new": 0, "updated": 0, "no_change": 0, "disabled": 0, "deleted": 0}
|
||||
|
||||
for yaml_dir in yaml_directories:
|
||||
yaml_files = load_yaml_files(yaml_dir)
|
||||
stats = process_rules(yaml_files, disabled_rules, username, password)
|
||||
|
||||
for key in total_stats:
|
||||
total_stats[key] += stats[key]
|
||||
|
||||
logging.info(f"\nProcessing Summary")
|
||||
logging.info(f" - Total processed rules: {total_stats['rule_count']}")
|
||||
logging.info(f" - New rules: {total_stats['new']}")
|
||||
logging.info(f" - Updated rules: {total_stats['updated']}")
|
||||
logging.info(f" - Disabled rules: {total_stats['deleted']}")
|
||||
logging.info(f" - Rules with no changes: {total_stats['no_change']}")
|
||||
logging.info(f"Rule status Summary")
|
||||
logging.info(f" - Active rules: {total_stats['rule_count'] - total_stats['disabled']}")
|
||||
logging.info(f" - Disabled rules: {total_stats['disabled']}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv[1:])
|
||||
123
salt/elasticfleet/tools/sbin/so_elastic_defend_filters_helper.py
Normal file
123
salt/elasticfleet/tools/sbin/so_elastic_defend_filters_helper.py
Normal file
@@ -0,0 +1,123 @@
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
import json
|
||||
import yaml
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
import shutil
|
||||
|
||||
# Extract only the 'entries' and 'description' fields
|
||||
def extract_relevant_fields(rule):
|
||||
return {
|
||||
'entries': rule.get('entries', []),
|
||||
'description': rule.get('description', '')
|
||||
}
|
||||
|
||||
# Sort lists and recursively sort keys in dictionaries for consistency
|
||||
def sorted_data(value):
|
||||
if isinstance(value, dict):
|
||||
return {k: sorted_data(v) for k, v in sorted(value.items())}
|
||||
elif isinstance(value, list):
|
||||
return sorted(sorted_data(v) for v in value)
|
||||
return value
|
||||
|
||||
# Generate a hash based on sorted relevant fields
|
||||
def generate_hash(data):
|
||||
sorted_data_string = json.dumps(sorted_data(data), sort_keys=True)
|
||||
return hashlib.sha256(sorted_data_string.encode('utf-8')).hexdigest()
|
||||
|
||||
# Load Elasticsearch credentials from the config file
|
||||
def load_credentials(config_path):
|
||||
with open(config_path, 'r') as file:
|
||||
for line in file:
|
||||
if line.startswith("user"):
|
||||
credentials = line.split('=', 1)[1].strip().strip('"')
|
||||
return credentials
|
||||
return None
|
||||
|
||||
# Extract username and password from credentials
|
||||
def extract_auth_details(credentials):
|
||||
if ':' in credentials:
|
||||
return credentials.split(':', 1)
|
||||
return None, None
|
||||
|
||||
# Generalized API request function
|
||||
def api_request(method, guid, username, password, json_data=None):
|
||||
headers = {
|
||||
'kbn-xsrf': 'true',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
auth = HTTPBasicAuth(username, password)
|
||||
|
||||
if method == "POST":
|
||||
url = "http://localhost:5601/api/exception_lists/items?namespace_type=agnostic"
|
||||
else:
|
||||
url = f"http://localhost:5601/api/exception_lists/items?item_id={guid}&namespace_type=agnostic"
|
||||
|
||||
response = requests.request(method, url, headers=headers, auth=auth, json=json_data)
|
||||
|
||||
if response.status_code in [200, 201]:
|
||||
return response.json() if response.content else True
|
||||
elif response.status_code == 404 and method == "GET":
|
||||
return None
|
||||
else:
|
||||
print(f"Error with {method} request: {response.status_code} - {response.text}")
|
||||
return False
|
||||
|
||||
|
||||
# Load YAML data for GUIDs to skip
|
||||
def load_disabled(disabled_file_path):
|
||||
if os.path.exists(disabled_file_path):
|
||||
with open(disabled_file_path, 'r') as file:
|
||||
return yaml.safe_load(file) or {}
|
||||
return {}
|
||||
|
||||
def load_yaml_files(*dirs):
|
||||
yaml_files = []
|
||||
|
||||
for dir_path in dirs:
|
||||
if os.path.isdir(dir_path):
|
||||
# Recurse through the directory and subdirectories
|
||||
for root, dirs, files in os.walk(dir_path):
|
||||
for file_name in files:
|
||||
if file_name.endswith(".yaml"):
|
||||
full_path = os.path.join(root, file_name)
|
||||
with open(full_path, 'r') as f:
|
||||
try:
|
||||
yaml_content = yaml.safe_load(f)
|
||||
yaml_files.append(yaml_content)
|
||||
except yaml.YAMLError as e:
|
||||
print(f"Error loading {full_path}: {e}")
|
||||
else:
|
||||
print(f"Invalid directory: {dir_path}")
|
||||
|
||||
return yaml_files
|
||||
|
||||
def prepare_custom_rules(input_file, output_dir):
|
||||
# Clear the output directory first
|
||||
if os.path.exists(output_dir):
|
||||
shutil.rmtree(output_dir)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
try:
|
||||
# Load the YAML file
|
||||
with open(input_file, 'r') as f:
|
||||
docs = yaml.safe_load_all(f)
|
||||
|
||||
for doc in docs:
|
||||
if 'id' not in doc:
|
||||
print(f"Skipping rule, no 'id' found: {doc}")
|
||||
continue
|
||||
# Create a filename using the 'id' field
|
||||
file_name = os.path.join(output_dir, f"{doc['id']}.yaml")
|
||||
|
||||
# Write the individual YAML file
|
||||
with open(file_name, 'w') as output_file:
|
||||
yaml.dump(doc, output_file, default_flow_style=False)
|
||||
print(f"Created file: {file_name}")
|
||||
|
||||
except yaml.YAMLError as e:
|
||||
print(f"Error parsing YAML: {e}")
|
||||
except Exception as e:
|
||||
print(f"Error processing file: {e}")
|
||||
@@ -13,6 +13,6 @@ echo "Setting up default Space:"
|
||||
{% if HIGHLANDER %}
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["enterpriseSearch"]} ' >> /opt/so/log/kibana/misc.log
|
||||
{% else %}
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["ml","enterpriseSearch","siem","logs","infrastructure","apm","uptime","monitoring","stackAlerts","actions","securitySolutionCases"]} ' >> /opt/so/log/kibana/misc.log
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["ml","enterpriseSearch","logs","infrastructure","apm","uptime","monitoring","stackAlerts","actions","securitySolutionCases"]} ' >> /opt/so/log/kibana/misc.log
|
||||
{% endif %}
|
||||
echo
|
||||
|
||||
Reference in New Issue
Block a user