Compare commits

..

26 Commits

Author SHA1 Message Date
reyesj2
4780371ebe WIP soup changes 2025-12-18 19:21:52 -06:00
reyesj2
6c879cbd13 soup changes 2025-12-17 19:08:21 -06:00
reyesj2
089b5aaf44 Merge branch 'reyesj2/elastic9' of github.com:Security-Onion-Solutions/securityonion into reyesj2/elastic9 2025-12-17 16:03:18 -06:00
reyesj2
b61885add5 Fix Kafka output policy - singular topic key 2025-12-17 16:03:12 -06:00
Jorge Reyes
5cb1e284af Update VERSION 2025-12-17 06:54:32 -06:00
reyesj2
e3a4f0873e update expected version for elastalert state 2025-12-17 06:53:08 -06:00
reyesj2
7977a020ac elasticsearch 9.0.8 2025-12-16 16:03:47 -06:00
reyesj2
d518f75468 update deprecated config items 2025-12-11 20:07:06 -06:00
Josh Brower
04d6cca204 Merge pull request #15298 from Security-Onion-Solutions/idstools-refactor
Fixup logic
2025-12-10 17:18:59 -05:00
DefensiveDepth
5ab6bda639 Fixup logic 2025-12-10 17:16:35 -05:00
Josh Brower
f433de7e12 Merge pull request #15297 from Security-Onion-Solutions/idstools-refactor
small fixes
2025-12-10 15:23:12 -05:00
DefensiveDepth
8ef6c2f91d small fixes 2025-12-10 15:19:44 -05:00
Mike Reeves
7575218697 Merge pull request #15293 from Security-Onion-Solutions/TOoSmOotH-patch-4
Remove Claude Sonnet 4 model configuration
2025-12-09 11:04:38 -05:00
Mike Reeves
dc945dad00 Remove Claude Sonnet 4 model configuration
Removed configuration for Claude Sonnet 4 model.
2025-12-09 11:00:53 -05:00
Josh Brower
ddcd74ffd2 Merge pull request #15292 from Security-Onion-Solutions/idstools-refactor
Fix custom name
2025-12-09 10:12:41 -05:00
DefensiveDepth
e105bd12e6 Fix custom name 2025-12-09 09:49:27 -05:00
Josh Brower
f5688175b6 Merge pull request #15290 from Security-Onion-Solutions/idstools-refactor
match correct custom ruleset name
2025-12-08 18:25:46 -05:00
DefensiveDepth
72a4ba405f match correct custom ruleset name 2025-12-08 16:45:40 -05:00
Josh Brower
ba49765312 Merge pull request #15287 from Security-Onion-Solutions/idstools-refactor
Rework ordering
2025-12-08 12:42:48 -05:00
DefensiveDepth
72c8c2371e Rework ordering 2025-12-08 12:39:30 -05:00
Josh Brower
80411ab6cf Merge pull request #15286 from Security-Onion-Solutions/idstools-refactor
be more verbose
2025-12-08 10:31:39 -05:00
DefensiveDepth
0ff8fa57e7 be more verbose 2025-12-08 10:29:24 -05:00
Josh Brower
411f28a049 Merge pull request #15284 from Security-Onion-Solutions/idstools-refactor
Make sure local salt dir is created
2025-12-07 17:49:56 -05:00
DefensiveDepth
0f42233092 Make sure local salt dir is created 2025-12-07 16:13:55 -05:00
Josh Brower
2dd49f6d9b Merge pull request #15283 from Security-Onion-Solutions/idstools-refactor
Fixup Airgap
2025-12-06 16:06:57 -05:00
DefensiveDepth
271f545f4f Fixup Airgap 2025-12-06 15:26:44 -05:00
22 changed files with 177 additions and 69 deletions

View File

@@ -1 +1 @@
2.4.200
2.4.0-foxtrot

View File

@@ -60,7 +60,7 @@ so-elastalert:
- watch:
- file: elastaconf
- onlyif:
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
delete_so-elastalert_so-status.disabled:
file.uncomment:

View File

@@ -5,7 +5,7 @@
"package": {
"name": "endpoint",
"title": "Elastic Defend",
"version": "8.18.1",
"version": "9.0.2",
"requires_root": true
},
"enabled": true,

View File

@@ -21,6 +21,7 @@
'azure_application_insights.app_state': 'azure.app_state',
'azure_billing.billing': 'azure.billing',
'azure_functions.metrics': 'azure.function',
'azure_ai_foundry.metrics': 'azure.ai_foundry',
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
'azure_metrics.compute_vm': 'azure.compute_vm',
'azure_metrics.container_instance': 'azure.container_instance',

View File

@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
rm -f $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
while read -r package; do
# get package details

View File

@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg KAFKACA "$KAFKACA" \
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
--argjson HOSTS "$HOSTS" \
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"

View File

@@ -1,6 +1,6 @@
elasticsearch:
enabled: false
version: 8.18.8
version: 9.0.8
index_clean: true
config:
action:

View File

@@ -15,7 +15,7 @@ set -e
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker rm so-elasticsearchca
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem

View File

@@ -63,7 +63,7 @@ logstash:
settings:
lsheap: 500m
config:
http_x_host: 0.0.0.0
api_x_http_x_host: 0.0.0.0
path_x_logs: /var/log/logstash
pipeline_x_workers: 1
pipeline_x_batch_x_size: 125

View File

@@ -5,10 +5,10 @@ input {
codec => es_bulk
request_headers_target_field => client_headers
remote_host_target_field => client_host
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/filebeat.crt"
ssl_key => "/usr/share/logstash/filebeat.key"
ssl_verify_mode => "peer"
ssl_client_authentication => "required"
}
}

View File

@@ -2,11 +2,11 @@ input {
elastic_agent {
port => 5055
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
ssl_verify_mode => "force_peer"
ssl_client_authentication => "required"
ecs_compatibility => v8
}
}

View File

@@ -2,7 +2,7 @@ input {
elastic_agent {
port => 5056
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
ssl => true
ssl_enabled => true
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
ecs_compatibility => v8

View File

@@ -8,8 +8,8 @@ output {
document_id => "%{[metadata][_id]}"
index => "so-ip-mappings"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -25,8 +25,8 @@ output {
document_id => "%{[metadata][_id]}"
pipeline => "%{[metadata][pipeline]}"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -37,8 +37,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
pipeline => "%{[metadata][pipeline]}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}
@@ -49,8 +49,8 @@ output {
data_stream => true
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode=> "none"
}
}
}

View File

@@ -13,8 +13,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
index => "endgame-%{+YYYY.MM.dd}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}

View File

@@ -56,7 +56,7 @@ logstash:
helpLink: logstash.html
global: False
config:
http_x_host:
api_x_http_x_host:
description: Host interface to listen to connections.
helpLink: logstash.html
readonly: True

View File

@@ -214,7 +214,7 @@ git_config_set_safe_dirs:
surinsmrulesdir:
file.directory:
- name: /nsm/rules/suricata
- name: /nsm/rules/suricata/etopen
- user: 939
- group: 939
- makedirs: True

View File

@@ -427,6 +427,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
true
}
@@ -459,6 +460,7 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
true
}
@@ -615,9 +617,6 @@ post_to_2.4.180() {
}
post_to_2.4.190() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
# Only need to update import / eval nodes
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
update_import_fleet_output
@@ -645,6 +644,13 @@ post_to_2.4.200() {
POSTVERSION=2.4.200
}
post_to_2.4.210() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
POSTVERSION=2.4.210
}
repo_sync() {
echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -906,9 +912,7 @@ up_to_2.4.180() {
}
up_to_2.4.190() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
echo "Nothing to do for 2.4.190"
INSTALLEDVERSION=2.4.190
}
@@ -921,6 +925,13 @@ up_to_2.4.200() {
INSTALLEDVERSION=2.4.200
}
up_to_2.4.210() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.210
}
add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
@@ -1113,9 +1124,13 @@ suricata_idstools_removal_pre() {
install -d -o 939 -g 939 -m 755 /opt/so/conf/soc/fingerprints
install -o 939 -g 939 -m 644 /dev/null /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
cat > /opt/so/conf/soc/fingerprints/suricataengine.syncBlock << EOF
Suricata ruleset sync is blocked until this file is removed. Make sure that you have manually added any custom Suricata rulesets via SOC config - review the documentation for more details: securityonion.net/docs
Suricata ruleset sync is blocked until this file is removed. **CRITICAL** Make sure that you have manually added any custom Suricata rulesets via SOC config before removing this file - review the documentation for more details: https://docs.securityonion.net/en/2.4/nids.html#sync-block
EOF
# Remove possible symlink & create salt local rules dir
[ -L /opt/so/saltstack/local/salt/suricata/rules ] && rm -f /opt/so/saltstack/local/salt/suricata/rules
install -d -o 939 -g 939 /opt/so/saltstack/local/salt/suricata/rules/ || echo "Failed to create Suricata local rules directory"
# Backup custom rules & overrides
mkdir -p /nsm/backup/detections-migration/2-4-200
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
@@ -1127,6 +1142,7 @@ if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
# Verify backup by comparing counts
echo "Verifying detection overrides backup..."
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
--retry 5 --retry-delay 10 --retry-all-errors \
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
echo " Error: Failed to query Elasticsearch for override count"
exit 1
@@ -1297,7 +1313,6 @@ if [ -n "$(docker ps -q -f name=^so-idstools$)" ]; then
fi
echo "Removing idstools symlink and scripts..."
rm /opt/so/saltstack/local/salt/suricata/rules
rm -rf /usr/sbin/so-idstools*
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
@@ -1355,7 +1370,7 @@ unmount_update() {
update_airgap_rules() {
# Copy the rules over to update them for airgap.
rsync -a $UPDATE_DIR/agrules/suricata/* /nsm/rules/suricata/
rsync -a --delete $UPDATE_DIR/agrules/suricata/ /nsm/rules/suricata/etopen/
rsync -a $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
rsync -a $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
# Copy the securityonion-resorces repo over for SOC Detection Summaries and checkout the published summaries branch
@@ -1604,6 +1619,60 @@ verify_latest_update_script() {
fi
}
verify_es_version_compatibility() {
# Define supported upgrade paths for SO ES versions
declare -A es_upgrade_map=(
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
["8.17.3"]="8.18.4 8.18.6 8.18.8"
["8.18.4"]="8.18.6 8.18.8 9.0.8"
["8.18.6"]="8.18.8 9.0.8"
["8.18.8"]="9.0.8"
)
# ES version stepping stones. Elasticsearch MUST upgrade through these versions
declare -A es_to_so_version=(
["8.18.8"]="2.4.190-20251024"
["9.0.8"]="2.4.210"
)
# Get current Elasticsearch version
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
else
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
exit 1
fi
# Get the target ES version from the updatedir
# DOUBLE TAP ON THIS SOMEONE FROM AN OLD VERSION UPGRADING TO ANOTHER VERSION OLDER THAN 2.4.110 WOULD HAVE AN ERROR HERE. Prior to this version there was no version defined in defaults.yaml
target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p')
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " ]]; then
# supported upgrade
return 0
else
compatible_versions=${es_upgrade_map[$es_version]}
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
echo ""
echo "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION)."
echo ""
if [[ $is_airgap -eq 0 ]]; then
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
else
echo "You can use the following soup command to upgrade to $next_step_so_version;"
echo " sudo BRANCH=$next_step_so_version soup"
echo ""
fi
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
echo ""
exit 1
fi
}
# Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -1700,6 +1769,8 @@ main() {
echo "Verifying we have the latest soup script."
verify_latest_update_script
verify_es_version_compatibility
echo "Let's see if we need to update Security Onion."
upgrade_check
upgrade_space

View File

@@ -1622,12 +1622,11 @@ soc:
sourceType: directory
airgap:
- name: Emerging-Threats
description: "Emerging Threats ruleset - To enable ET Pro, enter your license key below. Leave empty for ET Open (free) rules."
description: "Emerging Threats ruleset - To enable ET Pro on Airgap, review the documentation at https://docs.securityonion.net/suricata"
licenseKey: ""
enabled: true
sourceType: url
sourcePath: 'https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz'
urlHash: "https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz.md5"
sourceType: directory
sourcePath: /nsm/rules/suricata/etopen/
license: "BSD"
excludeFiles:
- "*deleted*"
@@ -2653,12 +2652,6 @@ soc:
thresholdColorRatioMed: 0.75
thresholdColorRatioMax: 1
availableModels:
- id: sonnet-4
displayName: Claude Sonnet 4
contextLimitSmall: 200000
contextLimitLarge: 1000000
lowBalanceColorAlert: 500000
enabled: true
- id: sonnet-4.5
displayName: Claude Sonnet 4.5
contextLimitSmall: 200000

View File

@@ -70,7 +70,7 @@
{# Define the Detections custom ruleset that should always be present #}
{% set CUSTOM_RULESET = {
'name': 'custom',
'name': '__custom__',
'description': 'User-created custom rules created via the Detections module in the SOC UI',
'sourceType': 'elasticsearch',
'sourcePath': 'so_detection.ruleset:__custom__',
@@ -83,7 +83,7 @@
{# Always append the custom ruleset to suricataengine.rulesetSources if not already present #}
{% if SOCMERGED.config.server.modules.suricataengine is defined and SOCMERGED.config.server.modules.suricataengine.rulesetSources is defined %}
{% if SOCMERGED.config.server.modules.suricataengine.rulesetSources is not mapping %}
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', 'custom') | list %}
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', '__custom__') | list %}
{% if custom_names | length == 0 %}
{% do SOCMERGED.config.server.modules.suricataengine.rulesetSources.append(CUSTOM_RULESET) %}
{% endif %}
@@ -108,21 +108,39 @@
{% if ruleset.name == 'Emerging-Threats' %}
{% if ruleset.licenseKey and ruleset.licenseKey != '' %}
{# License key is defined - transform to ETPRO #}
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
'license': 'Commercial'
}) %}
{% if ruleset.sourceType == 'directory' %}
{# Airgap mode - update directory path #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': '/nsm/rules/custom-local-repos/local-etpro-suricata/etpro.rules.tar.gz',
'license': 'Commercial'
}) %}
{% else %}
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
{% do ruleset.update({
'name': 'ETPRO',
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
'license': 'Commercial'
}) %}
{% endif %}
{% else %}
{# No license key - explicitly set to ETOPEN #}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
'license': 'BSD'
}) %}
{% if ruleset.sourceType == 'directory' %}
{# Airgap mode - update directory path #}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': '/nsm/rules/suricata/etopen/',
'license': 'BSD'
}) %}
{% else %}
{% do ruleset.update({
'name': 'ETOPEN',
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
'license': 'BSD'
}) %}
{% endif %}
{% endif %}
{% endif %}
{% endfor %}

View File

@@ -608,6 +608,18 @@ soc:
label: Delete Unreferenced (Deletes rules that are no longer referenced by ruleset source)
forcedType: bool
required: False
- field: proxyURL
label: HTTP/HTTPS proxy URL for downloading the ruleset.
required: False
- field: proxyUsername
label: Proxy authentication username.
required: False
- field: proxyPassword
label: Proxy authentication password.
required: False
- field: proxyCACert
label: Path to CA certificate file for MITM proxy verification.
required: False
airgap: *serulesetSources
navigator:
intervalMinutes:

View File

@@ -17,14 +17,23 @@ query() {
STATS=$(query "ruleset-stats")
RELOAD=$(query "ruleset-reload-time")
[ -z "$RELOAD" ] && RELOAD='{}'
if echo "$STATS" | jq -e '.return == "OK"' > /dev/null 2>&1; then
LOADED=$(echo "$STATS" | jq -r '.message[0].rules_loaded')
FAILED=$(echo "$STATS" | jq -r '.message[0].rules_failed')
LAST_RELOAD=$(echo "$RELOAD" | jq -r '.message[0].last_reload')
# Outputs valid JSON on success, empty on failure
OUTPUT=$(jq -n \
--argjson stats "$STATS" \
--argjson reload "$RELOAD" \
'if $stats.return == "OK" and ($stats.message[0].rules_loaded | type) == "number" and ($stats.message[0].rules_failed | type) == "number" then
{
rules_loaded: $stats.message[0].rules_loaded,
rules_failed: $stats.message[0].rules_failed,
last_reload: ($reload.message[0].last_reload // ""),
return: "OK"
}
else empty end' 2>/dev/null)
jq -n --argjson loaded "$LOADED" --argjson failed "$FAILED" --arg reload "$LAST_RELOAD" \
'{rules_loaded: $loaded, rules_failed: $failed, last_reload: $reload, return: "OK"}' > "$OUTFILE"
if [ -n "$OUTPUT" ]; then
echo "$OUTPUT" > "$OUTFILE"
else
echo '{"return":"FAIL"}' > "$OUTFILE"
fi

View File

@@ -18,11 +18,15 @@ if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then
if [ -f "$STATSFILE" ] && [ $(($(date +%s) - $(stat -c %Y "$STATSFILE"))) -lt 90 ] && jq -e '.return == "OK" and .rules_loaded != null and .rules_failed != null' "$STATSFILE" > /dev/null 2>&1; then
LOADED=$(jq -r '.rules_loaded' "$STATSFILE")
FAILED=$(jq -r '.rules_failed' "$STATSFILE")
RELOAD_TIME=$(jq -r '.last_reload // ""' "$STATSFILE")
RELOAD_TIME=$(jq -r 'if .last_reload then .last_reload else "" end' "$STATSFILE")
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
if [ -n "$RELOAD_TIME" ]; then
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
else
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,status=\"ok\""
fi
else
echo "surirules loaded=0i,failed=0i,reload_time=\"\",status=\"unknown\""
echo "surirules loaded=0i,failed=0i,status=\"unknown\""
fi
fi