Compare commits

..

9 Commits

Author SHA1 Message Date
reyesj2
f666ad600f accept same version 'upgrades' 2025-12-23 16:27:22 -06:00
reyesj2
9345718967 verify pre-soup ES version is directly upgradable to post-soup ES version. 2025-12-19 16:15:05 -06:00
reyesj2
6c879cbd13 soup changes 2025-12-17 19:08:21 -06:00
reyesj2
089b5aaf44 Merge branch 'reyesj2/elastic9' of github.com:Security-Onion-Solutions/securityonion into reyesj2/elastic9 2025-12-17 16:03:18 -06:00
reyesj2
b61885add5 Fix Kafka output policy - singular topic key 2025-12-17 16:03:12 -06:00
Jorge Reyes
5cb1e284af Update VERSION 2025-12-17 06:54:32 -06:00
reyesj2
e3a4f0873e update expected version for elastalert state 2025-12-17 06:53:08 -06:00
reyesj2
7977a020ac elasticsearch 9.0.8 2025-12-16 16:03:47 -06:00
reyesj2
d518f75468 update deprecated config items 2025-12-11 20:07:06 -06:00
23 changed files with 218 additions and 174 deletions

View File

@@ -1,17 +1,17 @@
### 2.4.200-20251216 ISO image released on 2025/12/16
### 2.4.190-20251024 ISO image released on 2025/10/24
### Download and Verify
2.4.200-20251216 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
2.4.190-20251024 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
MD5: 07B38499952D1F2FD7B5AF10096D0043
SHA1: 7F3A26839CA3CAEC2D90BB73D229D55E04C7D370
SHA256: 8D3AC735873A2EA8527E16A6A08C34BD5018CBC0925AC4096E15A0C99F591D5F
MD5: 25358481FB876226499C011FC0710358
SHA1: 0B26173C0CE136F2CA40A15046D1DFB78BCA1165
SHA256: 4FD9F62EDA672408828B3C0C446FE5EA9FF3C4EE8488A7AB1101544A3C487872
Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
Download the signature file for the ISO:
```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
```
Download the ISO image:
```
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
```
Verify the downloaded ISO image using the signature file:
```
gpg --verify securityonion-2.4.200-20251216.iso.sig securityonion-2.4.200-20251216.iso
gpg --verify securityonion-2.4.190-20251024.iso.sig securityonion-2.4.190-20251024.iso
```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
```
gpg: Signature made Mon 15 Dec 2025 05:24:11 PM EST using RSA key ID FE507013
gpg: Signature made Thu 23 Oct 2025 07:21:46 AM EDT using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner.

View File

@@ -1 +1 @@
2.4.200
2.4.0-foxtrot

View File

@@ -60,7 +60,7 @@ so-elastalert:
- watch:
- file: elastaconf
- onlyif:
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
delete_so-elastalert_so-status.disabled:
file.uncomment:

View File

@@ -5,7 +5,7 @@
"package": {
"name": "endpoint",
"title": "Elastic Defend",
"version": "8.18.1",
"version": "9.0.2",
"requires_root": true
},
"enabled": true,

View File

@@ -21,6 +21,7 @@
'azure_application_insights.app_state': 'azure.app_state',
'azure_billing.billing': 'azure.billing',
'azure_functions.metrics': 'azure.function',
'azure_ai_foundry.metrics': 'azure.ai_foundry',
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
'azure_metrics.compute_vm': 'azure.compute_vm',
'azure_metrics.container_instance': 'azure.container_instance',

View File

@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
rm -f $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
while read -r package; do
# get package details

View File

@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg KAFKACA "$KAFKACA" \
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
--argjson HOSTS "$HOSTS" \
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"

View File

@@ -1,6 +1,6 @@
elasticsearch:
enabled: false
version: 8.18.8
version: 9.0.8
index_clean: true
config:
action:
@@ -299,19 +299,6 @@ elasticsearch:
hot:
actions: {}
min_age: 0ms
sos-backup:
index_sorting: false
index_template:
composed_of: []
ignore_missing_component_templates: []
index_patterns:
- sos-backup-*
priority: 501
template:
settings:
index:
number_of_replicas: 0
number_of_shards: 1
so-assistant-chat:
index_sorting: false
index_template:

View File

@@ -15,7 +15,7 @@ set -e
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker rm so-elasticsearchca
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem

View File

@@ -63,7 +63,7 @@ logstash:
settings:
lsheap: 500m
config:
http_x_host: 0.0.0.0
api_x_http_x_host: 0.0.0.0
path_x_logs: /var/log/logstash
pipeline_x_workers: 1
pipeline_x_batch_x_size: 125

View File

@@ -5,10 +5,10 @@ input {
codec => es_bulk
request_headers_target_field => client_headers
remote_host_target_field => client_host
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/filebeat.crt"
ssl_key => "/usr/share/logstash/filebeat.key"
ssl_verify_mode => "peer"
ssl_client_authentication => "required"
}
}

View File

@@ -2,11 +2,11 @@ input {
elastic_agent {
port => 5055
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
ssl_verify_mode => "force_peer"
ssl_client_authentication => "required"
ecs_compatibility => v8
}
}

View File

@@ -2,7 +2,7 @@ input {
elastic_agent {
port => 5056
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
ssl => true
ssl_enabled => true
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
ecs_compatibility => v8

View File

@@ -8,8 +8,8 @@ output {
document_id => "%{[metadata][_id]}"
index => "so-ip-mappings"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -25,8 +25,8 @@ output {
document_id => "%{[metadata][_id]}"
pipeline => "%{[metadata][pipeline]}"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -37,8 +37,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
pipeline => "%{[metadata][pipeline]}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}
@@ -49,8 +49,8 @@ output {
data_stream => true
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode=> "none"
}
}
}

View File

@@ -13,8 +13,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
index => "endgame-%{+YYYY.MM.dd}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}

View File

@@ -56,7 +56,7 @@ logstash:
helpLink: logstash.html
global: False
config:
http_x_host:
api_x_http_x_host:
description: Host interface to listen to connections.
helpLink: logstash.html
readonly: True

View File

@@ -133,7 +133,7 @@ function getinstallinfo() {
return 1
fi
export $(echo "$INSTALLVARS" | xargs)
source <(echo $INSTALLVARS)
if [ $? -ne 0 ]; then
log "ERROR" "Failed to source install variables"
return 1

View File

@@ -87,6 +87,9 @@ check_err() {
113)
echo 'No route to host'
;;
160)
echo 'Incompatiable Elasticsearch upgrade'
;;
*)
echo 'Unhandled error'
echo "$err_msg"
@@ -427,6 +430,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
true
}
@@ -459,6 +463,7 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
true
}
@@ -615,9 +620,6 @@ post_to_2.4.180() {
}
post_to_2.4.190() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
# Only need to update import / eval nodes
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
update_import_fleet_output
@@ -645,6 +647,13 @@ post_to_2.4.200() {
POSTVERSION=2.4.200
}
post_to_2.4.210() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
POSTVERSION=2.4.210
}
repo_sync() {
echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -906,9 +915,7 @@ up_to_2.4.180() {
}
up_to_2.4.190() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
echo "Nothing to do for 2.4.190"
INSTALLEDVERSION=2.4.190
}
@@ -921,6 +928,13 @@ up_to_2.4.200() {
INSTALLEDVERSION=2.4.200
}
up_to_2.4.210() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.210
}
add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
@@ -1125,35 +1139,40 @@ mkdir -p /nsm/backup/detections-migration/2-4-200
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
cp /opt/so/conf/idstools/etc/rulecat.conf /nsm/backup/detections-migration/2-4-200
# Backup so-detection index via reindex
echo "Creating sos-backup index template..."
template_result=$(/sbin/so-elasticsearch-query '_index_template/sos-backup' -X PUT \
--retry 5 --retry-delay 15 --retry-all-errors \
-d '{"index_patterns":["sos-backup-*"],"priority":501,"template":{"settings":{"index":{"number_of_replicas":0,"number_of_shards":1}}}}')
if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
python3 /opt/so/conf/soc/so-detections-backup.py
if [[ -z "$template_result" ]] || ! echo "$template_result" | jq -e '.acknowledged == true' > /dev/null 2>&1; then
echo "Error: Failed to create sos-backup index template"
echo "$template_result"
exit 1
fi
# Verify backup by comparing counts
echo "Verifying detection overrides backup..."
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
--retry 5 --retry-delay 10 --retry-all-errors \
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
echo " Error: Failed to query Elasticsearch for override count"
exit 1
}
BACKUP_INDEX="sos-backup-detection-$(date +%Y%m%d-%H%M%S)"
echo "Backing up so-detection index to $BACKUP_INDEX..."
reindex_result=$(/sbin/so-elasticsearch-query '_reindex?wait_for_completion=true' \
--retry 5 --retry-delay 15 --retry-all-errors \
-X POST -d "{\"source\": {\"index\": \"so-detection\"}, \"dest\": {\"index\": \"$BACKUP_INDEX\"}}")
if [[ ! "$es_override_count" =~ ^[0-9]+$ ]]; then
echo " Error: Invalid override count from Elasticsearch: '$es_override_count'"
exit 1
fi
if [[ -z "$reindex_result" ]]; then
echo "Error: Backup of detections failed - no response from Elasticsearch"
exit 1
elif echo "$reindex_result" | jq -e '.created >= 0' > /dev/null 2>&1; then
echo "Backup complete: $(echo "$reindex_result" | jq -r '.created') documents copied"
elif echo "$reindex_result" | grep -q "index_not_found_exception"; then
echo "so-detection index does not exist, skipping backup"
backup_override_count=$(find /nsm/backup/detections/repo/*/overrides -type f 2>/dev/null | wc -l)
echo " Elasticsearch overrides: $es_override_count"
echo " Backed up overrides: $backup_override_count"
if [[ "$es_override_count" -gt 0 ]]; then
if [[ "$backup_override_count" -gt 0 ]]; then
echo " Override backup verified successfully"
else
echo " Error: Elasticsearch has $es_override_count overrides but backup has 0 files"
exit 1
fi
else
echo " No overrides to backup"
fi
else
echo "Error: Backup of detections failed"
echo "$reindex_result"
exit 1
echo "SOC Detections backup script not found, skipping detection backup"
fi
}
@@ -1174,12 +1193,11 @@ hash_normalized_file() {
return 1
fi
# Ensure trailing newline for consistent hashing regardless of source file
{ sed -E \
sed -E \
-e 's/^[[:space:]]+//; s/[[:space:]]+$//' \
-e '/^$/d' \
-e 's|--url=http://[^:]+:7788|--url=http://MANAGER:7788|' \
"$file"; echo; } | sed '/^$/d' | sha256sum | awk '{print $1}'
"$file" | sha256sum | awk '{print $1}'
}
# Known-default hashes for so-rule-update (ETOPEN ruleset)
@@ -1275,13 +1293,6 @@ custom_found=0
check_config_file "$SO_RULE_UPDATE" "KNOWN_SO_RULE_UPDATE_HASHES" || custom_found=1
check_config_file "$RULECAT_CONF" "KNOWN_RULECAT_CONF_HASHES" || custom_found=1
# Check for ETPRO rules on airgap systems
if [[ $is_airgap -eq 0 ]] && grep -q 'ETPRO ' /nsm/rules/suricata/emerging-all.rules 2>/dev/null; then
echo "ETPRO rules detected on airgap system - custom configuration"
echo "ETPRO rules detected on Airgap in /nsm/rules/suricata/emerging-all.rules" >> /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
custom_found=1
fi
# If no custom configs found, remove syncBlock
if [[ $custom_found -eq 0 ]]; then
echo "idstools migration completed successfully - removing Suricata engine syncBlock"
@@ -1307,7 +1318,6 @@ fi
echo "Removing idstools symlink and scripts..."
rm -rf /usr/sbin/so-idstools*
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
crontab -l | grep -v 'so-rule-update' | crontab -
# Backup the salt master config & manager pillar before editing it
cp /opt/so/saltstack/local/pillar/minions/$MINIONID.sls /nsm/backup/detections-migration/2-4-200/
@@ -1612,6 +1622,69 @@ verify_latest_update_script() {
fi
}
verify_es_version_compatibility() {
# supported upgrade paths for SO-ES versions
declare -A es_upgrade_map=(
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
["8.17.3"]="8.18.4 8.18.6 8.18.8"
["8.18.4"]="8.18.6 8.18.8 9.0.8"
["8.18.6"]="8.18.8 9.0.8"
["8.18.8"]="9.0.8"
)
# Elasticsearch MUST upgrade through these versions
declare -A es_to_so_version=(
["8.18.8"]="2.4.190-20251024"
)
# Get current Elasticsearch version
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
else
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
exit 160
fi
if ! target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p'); then
# so-yaml.py failed to get the ES version from upgrade versions elasticsearch/defaults.yaml file. Likely they are upgrading to an SO version older than 2.4.110 prior to the ES version pinning and should be OKAY to continue with the upgrade.
# if so-yaml.py failed to get the ES version AND the version we are upgrading to is newer than 2.4.110 then we should bail
if [[ $(cat $UPDATE_DIR/VERSION | cut -d'.' -f3) > 110 ]]; then
echo "Couldn't determine the target Elasticsearch version (post soup version) to ensure compatibility with current Elasticsearch version. Exiting"
exit 160
fi
# allow upgrade to version < 2.4.110 without checking ES version compatibility
return 0
fi
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " || "$es_version" == "$target_es_version" ]]; then
# supported upgrade
return 0
else
compatible_versions=${es_upgrade_map[$es_version]}
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
echo -e "\n##############################################################################################################################\n"
echo -e "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION).\n"
if [[ $is_airgap -eq 0 ]]; then
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
else
echo "You can use the following soup command to upgrade to $next_step_so_version;"
echo -e " sudo BRANCH=$next_step_so_version soup\n"
fi
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
echo -e "\n###############################################################################################################################\n"
exit 160
fi
}
# Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -1708,6 +1781,8 @@ main() {
echo "Verifying we have the latest soup script."
verify_latest_update_script
verify_es_version_compatibility
echo "Let's see if we need to update Security Onion."
upgrade_check
upgrade_space
@@ -1873,7 +1948,7 @@ main() {
if [[ $is_airgap -eq 0 ]]; then
echo ""
echo "Cleaning repos on remote Security Onion nodes."
salt -C 'not *_eval and not *_manager* and not *_standalone and G@os:OEL' cmd.run "dnf clean all"
salt -C 'not *_eval and not *_manager and not *_managersearch and not *_standalone and G@os:CentOS' cmd.run "yum clean all"
echo ""
fi
fi

View File

@@ -2653,15 +2653,19 @@ soc:
thresholdColorRatioMax: 1
availableModels:
- id: sonnet-4.5
displayName: Claude Sonnet 4.5 ($$$)
origin: USA
displayName: Claude Sonnet 4.5
contextLimitSmall: 200000
contextLimitLarge: 1000000
lowBalanceColorAlert: 500000
enabled: true
- id: gptoss-120b
displayName: GPT-OSS 120B
contextLimitSmall: 128000
contextLimitLarge: 128000
lowBalanceColorAlert: 500000
enabled: true
- id: qwen-235b
displayName: QWEN 235B ($)
origin: China
displayName: QWEN 235B
contextLimitSmall: 256000
contextLimitLarge: 256000
lowBalanceColorAlert: 500000

View File

@@ -6,7 +6,6 @@
# This script queries Elasticsearch for Custom Detections and all Overrides,
# and git commits them to disk at $OUTPUT_DIR
import argparse
import os
import subprocess
import json
@@ -19,10 +18,10 @@ from datetime import datetime
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Constants
DEFAULT_INDEX = "so-detection"
DEFAULT_OUTPUT_DIR = "/nsm/backup/detections/repo"
ES_URL = "https://localhost:9200/so-detection/_search"
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
OUTPUT_DIR = "/nsm/backup/detections/repo"
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
def get_auth_credentials(auth_file):
@@ -31,10 +30,9 @@ def get_auth_credentials(auth_file):
if line.startswith('user ='):
return line.split('=', 1)[1].strip().replace('"', '')
def query_elasticsearch(query, auth, index):
url = f"https://localhost:9200/{index}/_search"
def query_elasticsearch(query, auth):
headers = {"Content-Type": "application/json"}
response = requests.get(url, headers=headers, data=query, auth=auth, verify=False)
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
response.raise_for_status()
return response.json()
@@ -49,12 +47,12 @@ def save_content(hit, base_folder, subfolder="", extension="txt"):
f.write(content)
return file_path
def save_overrides(hit, output_dir):
def save_overrides(hit):
so_detection = hit["_source"]["so_detection"]
public_id = so_detection["publicId"]
overrides = so_detection["overrides"]
language = so_detection["language"]
folder = os.path.join(output_dir, language, "overrides")
folder = os.path.join(OUTPUT_DIR, language, "overrides")
os.makedirs(folder, exist_ok=True)
extension = "yaml" if language == "sigma" else "txt"
file_path = os.path.join(folder, f"{public_id}.{extension}")
@@ -62,20 +60,20 @@ def save_overrides(hit, output_dir):
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
return file_path
def ensure_git_repo(output_dir):
if not os.path.isdir(os.path.join(output_dir, '.git')):
def ensure_git_repo():
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
subprocess.run(["git", "-C", output_dir, "init"], check=True)
subprocess.run(["git", "-C", output_dir, "remote", "add", "origin", "default"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
def commit_changes(output_dir):
ensure_git_repo(output_dir)
subprocess.run(["git", "-C", output_dir, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", output_dir, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", output_dir, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", output_dir, "status"], capture_output=True, text=True)
def commit_changes():
ensure_git_repo()
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
print(status_result.stdout)
commit_result = subprocess.run(["git", "-C", output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
if commit_result.returncode == 1:
print("No changes to commit.")
elif commit_result.returncode == 0:
@@ -83,41 +81,29 @@ def commit_changes(output_dir):
else:
commit_result.check_returncode()
def parse_args():
parser = argparse.ArgumentParser(description="Backup custom detections and overrides from Elasticsearch")
parser.add_argument("--output", "-o", default=DEFAULT_OUTPUT_DIR,
help=f"Output directory for backups (default: {DEFAULT_OUTPUT_DIR})")
parser.add_argument("--index", "-i", default=DEFAULT_INDEX,
help=f"Elasticsearch index to query (default: {DEFAULT_INDEX})")
return parser.parse_args()
def main():
args = parse_args()
output_dir = args.output
index = args.index
try:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backing up Custom Detections and all Overrides to {output_dir} - {timestamp}\n")
os.makedirs(output_dir, exist_ok=True)
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
os.makedirs(OUTPUT_DIR, exist_ok=True)
auth_credentials = get_auth_credentials(AUTH_FILE)
username, password = auth_credentials.split(':', 1)
auth = HTTPBasicAuth(username, password)
# Query and save custom detections
detections = query_elasticsearch(QUERY_DETECTIONS, auth, index)["hits"]["hits"]
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
for hit in detections:
save_content(hit, output_dir, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
# Query and save overrides
overrides = query_elasticsearch(QUERY_OVERRIDES, auth, index)["hits"]["hits"]
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
for hit in overrides:
save_overrides(hit, output_dir)
commit_changes(output_dir)
save_overrides(hit)
commit_changes()
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backup Completed - {timestamp}")
except Exception as e:

View File

@@ -57,12 +57,12 @@ class TestBackupScript(unittest.TestCase):
mock_response.json.return_value = {'hits': {'hits': []}}
mock_response.raise_for_status = MagicMock()
mock_get.return_value = mock_response
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX)
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
self.assertEqual(response, {'hits': {'hits': []}})
mock_get.assert_called_once_with(
f"https://localhost:9200/{ds.DEFAULT_INDEX}/_search",
ds.ES_URL,
headers={"Content-Type": "application/json"},
data=ds.QUERY_DETECTIONS,
auth=self.auth,
@@ -81,7 +81,7 @@ class TestBackupScript(unittest.TestCase):
@patch('os.makedirs')
@patch('builtins.open', new_callable=mock_open)
def test_save_overrides(self, mock_file, mock_makedirs):
file_path = ds.save_overrides(self.mock_override_hit, self.output_dir)
file_path = ds.save_overrides(self.mock_override_hit)
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
self.assertEqual(file_path, expected_path)
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
@@ -90,9 +90,9 @@ class TestBackupScript(unittest.TestCase):
@patch('subprocess.run')
def test_ensure_git_repo(self, mock_run):
mock_run.return_value = MagicMock(returncode=0)
ds.ensure_git_repo(self.output_dir)
ds.ensure_git_repo()
mock_run.assert_has_calls([
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
call(["git", "-C", self.output_dir, "init"], check=True),
@@ -106,9 +106,9 @@ class TestBackupScript(unittest.TestCase):
mock_commit_result = MagicMock(returncode=1)
# Ensure sufficient number of MagicMock instances for each subprocess.run call
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
print("Running test_commit_changes...")
ds.commit_changes(self.output_dir)
ds.commit_changes()
print("Finished test_commit_changes.")
mock_run.assert_has_calls([
@@ -120,45 +120,39 @@ class TestBackupScript(unittest.TestCase):
])
@patch('builtins.print')
@patch.object(ds, 'commit_changes')
@patch.object(ds, 'save_overrides')
@patch.object(ds, 'save_content')
@patch.object(ds, 'query_elasticsearch')
@patch.object(ds, 'get_auth_credentials')
@patch('so-detections-backup.commit_changes')
@patch('so-detections-backup.save_overrides')
@patch('so-detections-backup.save_content')
@patch('so-detections-backup.query_elasticsearch')
@patch('so-detections-backup.get_auth_credentials')
@patch('os.makedirs')
@patch.object(ds, 'parse_args')
def test_main(self, mock_parse_args, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
mock_args = MagicMock()
mock_args.output = self.output_dir
mock_args.index = ds.DEFAULT_INDEX
mock_parse_args.return_value = mock_args
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
mock_get_auth.return_value = self.auth_credentials
mock_query.side_effect = [
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
]
with patch('datetime.datetime') as mock_datetime:
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
ds.main()
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
mock_query.assert_has_calls([
call(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX),
call(ds.QUERY_OVERRIDES, self.auth, ds.DEFAULT_INDEX)
call(ds.QUERY_DETECTIONS, self.auth),
call(ds.QUERY_OVERRIDES, self.auth)
])
mock_save_content.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
"yaml"
)
mock_save_overrides.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}},
self.output_dir
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
)
mock_commit.assert_called_once_with(self.output_dir)
mock_commit.assert_called_once()
mock_print.assert_called()
if __name__ == '__main__':

View File

@@ -708,9 +708,6 @@ soc:
- field: displayName
label: Display Name
required: True
- field: origin
label: Country of Origin for the Model Training
required: false
- field: contextLimitSmall
label: Context Limit (Small)
forcedType: int