mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-11 11:42:50 +01:00
Refactor
This commit is contained in:
@@ -75,14 +75,22 @@ eapackageupgrade:
|
|||||||
{% if GLOBALS.role != "so-fleet" %}
|
{% if GLOBALS.role != "so-fleet" %}
|
||||||
|
|
||||||
{% if not GLOBALS.airgap %}
|
{% if not GLOBALS.airgap %}
|
||||||
|
elasticdefendrepodir:
|
||||||
|
file.directory:
|
||||||
|
- name: /nsm/securityonion-resources
|
||||||
|
- user: 939
|
||||||
|
- group: 939
|
||||||
|
- makedirs: True
|
||||||
|
|
||||||
soresourcesrepoclone:
|
soresourcesrepoclone:
|
||||||
git.latest:
|
git.latest:
|
||||||
- name: https://github.com/Security-Onion-Solutions/securityonion-resources.git
|
- name: https://github.com/Security-Onion-Solutions/securityonion-resources.git
|
||||||
- target: /opt/so/saltstack/local/salt/elasticfleet/files/securityonion-resources
|
- target: /nsm/securityonion-resources
|
||||||
- rev: 'dev/defend-filters'
|
- rev: 'dev/defend_filters'
|
||||||
- depth: 1
|
- depth: 1
|
||||||
- force_fetch: true
|
- force_fetch: true
|
||||||
- force_clone: true
|
- force_clone: true
|
||||||
|
- user: socore # id is not allowed for this
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
elasticdefendconfdir:
|
elasticdefendconfdir:
|
||||||
@@ -92,13 +100,6 @@ elasticdefendconfdir:
|
|||||||
- group: 939
|
- group: 939
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
|
|
||||||
elasticdefendrulesets:
|
|
||||||
file.recurse:
|
|
||||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets
|
|
||||||
- source: salt://elasticfleet/files/securityonion-resources/event-filters/
|
|
||||||
- user: 947
|
|
||||||
- group: 939
|
|
||||||
|
|
||||||
elasticdefenddisabled:
|
elasticdefenddisabled:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml
|
- name: /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml
|
||||||
@@ -115,6 +116,17 @@ elasticdefendcustom:
|
|||||||
- group: 939
|
- group: 939
|
||||||
- mode: 600
|
- mode: 600
|
||||||
|
|
||||||
|
cronelasticdefendfilters:
|
||||||
|
cron.present:
|
||||||
|
- name: python3 /sbin/so-elastic-defend-manage-filters.py -c /opt/so/conf/elasticsearch/curl.config -d /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml -i /nsm/securityonion-resources/event_filters/ -i /opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters/ &>> /opt/so/log/elasticfleet/elastic-defend-manage-filters.log
|
||||||
|
- identifier: elastic-defend-filters
|
||||||
|
- user: root
|
||||||
|
- minute: '0'
|
||||||
|
- hour: '3'
|
||||||
|
- daymonth: '*'
|
||||||
|
- month: '*'
|
||||||
|
- dayweek: '*'
|
||||||
|
|
||||||
eaintegrationsdir:
|
eaintegrationsdir:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/conf/elastic-fleet/integrations
|
- name: /opt/so/conf/elastic-fleet/integrations
|
||||||
|
|||||||
@@ -8,6 +8,8 @@ elasticfleet:
|
|||||||
endpoints_enrollment: ''
|
endpoints_enrollment: ''
|
||||||
es_token: ''
|
es_token: ''
|
||||||
grid_enrollment: ''
|
grid_enrollment: ''
|
||||||
|
defend_filters:
|
||||||
|
enable_auto_configuration: False
|
||||||
logging:
|
logging:
|
||||||
zeek:
|
zeek:
|
||||||
excluded:
|
excluded:
|
||||||
|
|||||||
@@ -1,27 +1,27 @@
|
|||||||
title: 'Template 1'
|
title: 'Template 1'
|
||||||
id: '2B160E43-3B95-4B50-AA3F-25D99F51FA69'
|
id: 'This needs to be a UUIDv4 id - https://www.uuidgenerator.net/version4'
|
||||||
description: 'Credit @ITProPaul'
|
description: 'Short description detailing what this rule is filtering and why.'
|
||||||
references: 'https://github.com/Neo23x0/sysmon-config'
|
references: 'Relevant urls, etc'
|
||||||
author: 'SOS'
|
author: '@SecurityOnion'
|
||||||
date: '09/18/24'
|
date: 'MM/DD/YY'
|
||||||
event_type: 'NetworkConnect'
|
event_type: 'dns_query'
|
||||||
filter_type: 'exclude'
|
filter_type: 'exclude'
|
||||||
filter:
|
filter:
|
||||||
selection_1:
|
selection_1:
|
||||||
TargetField: 'DestinationIp'
|
TargetField: 'QueryName'
|
||||||
Condition: 'begin with'
|
Condition: 'end with'
|
||||||
Pattern: 'fe80:0:0:0'
|
Pattern: '.thawte.com'
|
||||||
---
|
---
|
||||||
title: 'Template2'
|
title: 'Template 2'
|
||||||
id: '3B160E43-3B95-4B50-AA3F-25D99F51FA69'
|
id: 'This needs to be a UUIDv4 id - https://www.uuidgenerator.net/version4'
|
||||||
description: 'Credit @ITProPaul'
|
description: 'Short description detailing what this rule is filtering and why.'
|
||||||
references: 'https://github.com/Neo23x0/sysmon-config'
|
references: 'Relevant urls, etc'
|
||||||
author: 'SOS'
|
author: '@SecurityOnion'
|
||||||
date: '09/18/24'
|
date: 'MM/DD/YY'
|
||||||
event_type: 'NetworkConnect'
|
event_type: 'process_creation'
|
||||||
filter_type: 'exclude'
|
filter_type: 'exclude'
|
||||||
filter:
|
filter:
|
||||||
selection_1:
|
selection_1:
|
||||||
TargetField: 'DestinationIp'
|
TargetField: 'ParentImage'
|
||||||
Condition: 'begin with'
|
Condition: 'is'
|
||||||
Pattern: 'fe80:0:0:0'
|
Pattern: 'C:\Windows\Microsoft.NET\Framework\v4.0.30319\ngentask.exe'
|
||||||
@@ -21,7 +21,7 @@ elasticfleet:
|
|||||||
advanced: True
|
advanced: True
|
||||||
elastic-defend-custom-filters__yaml:
|
elastic-defend-custom-filters__yaml:
|
||||||
title: Custom Elastic Defend filters
|
title: Custom Elastic Defend filters
|
||||||
description: Enter custom filters.
|
description: Enter custom filters seperated by ---
|
||||||
syntax: yaml
|
syntax: yaml
|
||||||
file: True
|
file: True
|
||||||
global: True
|
global: True
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import yaml
|
from datetime import datetime
|
||||||
import sys
|
import sys
|
||||||
import getopt
|
import getopt
|
||||||
from so_elastic_defend_filters_helper import *
|
from so_elastic_defend_filters_helper import *
|
||||||
@@ -6,44 +6,51 @@ import logging
|
|||||||
|
|
||||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||||
|
|
||||||
# Define mappings for conditional_field, event_type, and conditions
|
# Define mappings for Target Field, Event Type, Conditions
|
||||||
FIELD_MAPPINGS = {
|
TARGET_FIELD_MAPPINGS = {
|
||||||
"Image": "process.executable",
|
"Image": "process.executable",
|
||||||
"ParentImage": "process.parent.executable",
|
"ParentImage": "process.parent.executable",
|
||||||
"CommandLine": "process.command_line",
|
"CommandLine": "process.command_line",
|
||||||
"ParentCommandLine": "process.parent.command_line",
|
"ParentCommandLine": "process.parent.command_line",
|
||||||
|
"DestinationHostname": "destination.domain",
|
||||||
"QueryName": "dns.question.name",
|
"QueryName": "dns.question.name",
|
||||||
"DestinationIp": "destination.ip",
|
"DestinationIp": "destination.ip",
|
||||||
"TargetObject": "registry.path"
|
"TargetObject": "registry.path",
|
||||||
|
"TargetFilename": "file.path"
|
||||||
}
|
}
|
||||||
|
|
||||||
DATASET_MAPPINGS = {
|
DATASET_MAPPINGS = {
|
||||||
"ProcessCreate": "endpoint.events.process",
|
"process_create": "endpoint.events.process",
|
||||||
"NetworkConnect": "endpoint.events.network",
|
"network_connection": "endpoint.events.network",
|
||||||
"FileCreate": "endpoint.events.file",
|
"file_create": "endpoint.events.file",
|
||||||
"RegistryEvent": "endpoint.events.registry",
|
"file_delete": "endpoint.events.file",
|
||||||
"DnsQuery": "endpoint.events.network"
|
"registry_event": "endpoint.events.registry",
|
||||||
|
"dns_query": "endpoint.events.network"
|
||||||
}
|
}
|
||||||
|
|
||||||
CONDITION_MAPPINGS = {
|
CONDITION_MAPPINGS = {
|
||||||
"is": ("included", "match"),
|
"is": ("included", "match"),
|
||||||
"end with": ("included", "wildcard"),
|
"end with": ("included", "wildcard"),
|
||||||
"begin with": ("included", "wildcard"),
|
"begin with": ("included", "wildcard"),
|
||||||
"contains": ("included", "wildcard"),
|
"contains": ("included", "wildcard")
|
||||||
"contains any": ("included", "match_any"),
|
|
||||||
"excluded": ("excluded", "match")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# Extract entries for a rule
|
# Extract entries for a rule
|
||||||
def extract_entries(data):
|
def extract_entries(data, event_type):
|
||||||
entries = []
|
entries = []
|
||||||
filter_data = data.get('filter', {})
|
filter_data = data.get('filter', {})
|
||||||
for value in filter_data.values():
|
for value in filter_data.values():
|
||||||
target_field = FIELD_MAPPINGS.get(value.get('TargetField', ''))
|
target_field = TARGET_FIELD_MAPPINGS.get(value.get('TargetField', ''))
|
||||||
condition = value.get('Condition', '')
|
condition = value.get('Condition', '')
|
||||||
pattern = value.get('Pattern', '')
|
pattern = value.get('Pattern', '')
|
||||||
|
|
||||||
operator, match_type = CONDITION_MAPPINGS.get(condition, ("included", "match"))
|
if condition not in CONDITION_MAPPINGS:
|
||||||
|
logging.error(f"Invalid condition: {condition}")
|
||||||
|
|
||||||
|
# Modify the pattern based on the condition
|
||||||
|
pattern = modify_pattern(condition, pattern)
|
||||||
|
|
||||||
|
operator, match_type = CONDITION_MAPPINGS[condition]
|
||||||
|
|
||||||
entries.append({
|
entries.append({
|
||||||
"field": target_field,
|
"field": target_field,
|
||||||
@@ -51,10 +58,23 @@ def extract_entries(data):
|
|||||||
"type": match_type,
|
"type": match_type,
|
||||||
"value": pattern
|
"value": pattern
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Add the event.dataset entry from DATASET_MAPPINGS
|
||||||
|
dataset_value = DATASET_MAPPINGS.get(event_type, '')
|
||||||
|
if dataset_value:
|
||||||
|
entries.append({
|
||||||
|
"field": "event.dataset",
|
||||||
|
"operator": "included",
|
||||||
|
"type": "match",
|
||||||
|
"value": dataset_value
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
logging.error(f"No dataset mapping found for event_type: {event_type}")
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
# Build the JSON entry
|
# Build the JSON
|
||||||
def build_json_entry(data, entries, guid, event_type, dataset, context):
|
def build_json_entry(entries, guid, event_type, context):
|
||||||
return {
|
return {
|
||||||
"comments": [],
|
"comments": [],
|
||||||
"entries": entries,
|
"entries": entries,
|
||||||
@@ -85,6 +105,26 @@ def disable_check(guid, disabled_rules, username, password):
|
|||||||
return True, "NOP"
|
return True, "NOP"
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
|
def modify_pattern(condition, pattern):
|
||||||
|
"""
|
||||||
|
Modify the pattern based on the condition.
|
||||||
|
- 'end with': Add '*' to the beginning of the pattern.
|
||||||
|
- 'begin with': Add '*' to the end of the pattern.
|
||||||
|
- 'contains': Add '*' to both the beginning and end of the pattern.
|
||||||
|
"""
|
||||||
|
if isinstance(pattern, list):
|
||||||
|
# Apply modification to each pattern in the list if it's a list of patterns
|
||||||
|
return [modify_pattern(condition, p) for p in pattern]
|
||||||
|
|
||||||
|
if condition == "end with":
|
||||||
|
return f"*{pattern}"
|
||||||
|
elif condition == "begin with":
|
||||||
|
return f"{pattern}*"
|
||||||
|
elif condition == "contains":
|
||||||
|
return f"*{pattern}*"
|
||||||
|
return pattern
|
||||||
|
|
||||||
|
|
||||||
def process_rule_update_or_create(guid, json_entry, username, password):
|
def process_rule_update_or_create(guid, json_entry, username, password):
|
||||||
existing_rule = api_request("GET", guid, username, password)
|
existing_rule = api_request("GET", guid, username, password)
|
||||||
|
|
||||||
@@ -122,8 +162,8 @@ def process_rules(yaml_files, disabled_rules, username, password):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
# Extract entries and build JSON
|
# Extract entries and build JSON
|
||||||
entries = extract_entries(data)
|
entries = extract_entries(data, event_type)
|
||||||
json_entry = build_json_entry(data, entries, guid, event_type, dataset, context)
|
json_entry = build_json_entry(entries, guid, event_type, context)
|
||||||
|
|
||||||
# Process rule creation or update
|
# Process rule creation or update
|
||||||
status = process_rule_update_or_create(guid, json_entry, username, password)
|
status = process_rule_update_or_create(guid, json_entry, username, password)
|
||||||
@@ -167,6 +207,9 @@ def main(argv):
|
|||||||
flags = load_flags(arg)
|
flags = load_flags(arg)
|
||||||
return main(argv + flags)
|
return main(argv + flags)
|
||||||
|
|
||||||
|
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
logging.info(f"\n{timestamp}")
|
||||||
|
|
||||||
validate_inputs(credentials_file, disabled_file, yaml_directories)
|
validate_inputs(credentials_file, disabled_file, yaml_directories)
|
||||||
|
|
||||||
credentials = load_credentials(credentials_file)
|
credentials = load_credentials(credentials_file)
|
||||||
@@ -200,6 +243,9 @@ def main(argv):
|
|||||||
logging.info(f"Rule status Summary")
|
logging.info(f"Rule status Summary")
|
||||||
logging.info(f" - Active rules: {total_stats['rule_count'] - total_stats['disabled']}")
|
logging.info(f" - Active rules: {total_stats['rule_count'] - total_stats['disabled']}")
|
||||||
logging.info(f" - Disabled rules: {total_stats['disabled']}")
|
logging.info(f" - Disabled rules: {total_stats['disabled']}")
|
||||||
|
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||||
|
logging.info(f"Execution completed at: {timestamp}")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main(sys.argv[1:])
|
main(sys.argv[1:])
|
||||||
|
|||||||
@@ -7,19 +7,21 @@ import requests
|
|||||||
from requests.auth import HTTPBasicAuth
|
from requests.auth import HTTPBasicAuth
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
# Extract only the 'entries' and 'description' fields
|
# Extract 'entries', 'description' and 'os_types' fields
|
||||||
def extract_relevant_fields(rule):
|
def extract_relevant_fields(filter):
|
||||||
return {
|
return {
|
||||||
'entries': rule.get('entries', []),
|
'entries': filter.get('entries', []),
|
||||||
'description': rule.get('description', '')
|
'description': filter.get('description', '')
|
||||||
}
|
}
|
||||||
|
|
||||||
# Sort lists and recursively sort keys in dictionaries for consistency
|
# Sort for consistency, so that a hash can be generated
|
||||||
def sorted_data(value):
|
def sorted_data(value):
|
||||||
if isinstance(value, dict):
|
if isinstance(value, dict):
|
||||||
|
# Recursively sort the dictionary by key
|
||||||
return {k: sorted_data(v) for k, v in sorted(value.items())}
|
return {k: sorted_data(v) for k, v in sorted(value.items())}
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
return sorted(sorted_data(v) for v in value)
|
# Sort lists; for dictionaries, sort by a specific key
|
||||||
|
return sorted(value, key=lambda x: tuple(sorted(x.items())) if isinstance(x, dict) else x)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# Generate a hash based on sorted relevant fields
|
# Generate a hash based on sorted relevant fields
|
||||||
@@ -109,6 +111,9 @@ def prepare_custom_rules(input_file, output_dir):
|
|||||||
if 'id' not in doc:
|
if 'id' not in doc:
|
||||||
print(f"Skipping rule, no 'id' found: {doc}")
|
print(f"Skipping rule, no 'id' found: {doc}")
|
||||||
continue
|
continue
|
||||||
|
if doc.get('title') in ["Template 1", "Template 2"]:
|
||||||
|
print(f"Skipping template rule with title: {doc['title']}")
|
||||||
|
continue
|
||||||
# Create a filename using the 'id' field
|
# Create a filename using the 'id' field
|
||||||
file_name = os.path.join(output_dir, f"{doc['id']}.yaml")
|
file_name = os.path.join(output_dir, f"{doc['id']}.yaml")
|
||||||
|
|
||||||
@@ -120,4 +125,4 @@ def prepare_custom_rules(input_file, output_dir):
|
|||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
print(f"Error parsing YAML: {e}")
|
print(f"Error parsing YAML: {e}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"Error processing file: {e}")
|
print(f"Error processing file: {e}")
|
||||||
Reference in New Issue
Block a user