mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2026-01-12 03:03:09 +01:00
Compare commits
33 Commits
reyesj2/el
...
reyesj2/el
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
452d864b88 | ||
|
|
cb1e59fa49 | ||
|
|
588aa435ec | ||
|
|
752c764066 | ||
|
|
af604c2ea8 | ||
|
|
893aaafa1b | ||
|
|
33c34cdeca | ||
|
|
fd1596b3a0 | ||
|
|
b05de22f58 | ||
|
|
1d63269883 | ||
|
|
dd8027480b | ||
|
|
c45bd77e44 | ||
|
|
032e0abd61 | ||
|
|
8509d1e454 | ||
|
|
ddd6935e50 | ||
|
|
5588a56b24 | ||
|
|
12aed6e280 | ||
|
|
b2a469e08c | ||
|
|
285b0e4af9 | ||
|
|
f9edfd6391 | ||
|
|
f6301bc3e5 | ||
|
|
6c5c176b7d | ||
|
|
c6d52b5eb1 | ||
|
|
7cac528389 | ||
|
|
6fe817ca4a | ||
|
|
cb9a6fac25 | ||
|
|
a945768251 | ||
|
|
c6646e3821 | ||
|
|
99dc72cece | ||
|
|
94694d394e | ||
|
|
03dd746601 | ||
|
|
eec3373ae7 | ||
|
|
db45ce07ed |
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
@@ -33,6 +33,7 @@ body:
|
||||
- 2.4.180
|
||||
- 2.4.190
|
||||
- 2.4.200
|
||||
- 2.4.210
|
||||
- Other (please provide detail below)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
### 2.4.190-20251024 ISO image released on 2025/10/24
|
||||
### 2.4.200-20251216 ISO image released on 2025/12/16
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.4.190-20251024 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
|
||||
2.4.200-20251216 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
|
||||
|
||||
MD5: 25358481FB876226499C011FC0710358
|
||||
SHA1: 0B26173C0CE136F2CA40A15046D1DFB78BCA1165
|
||||
SHA256: 4FD9F62EDA672408828B3C0C446FE5EA9FF3C4EE8488A7AB1101544A3C487872
|
||||
MD5: 07B38499952D1F2FD7B5AF10096D0043
|
||||
SHA1: 7F3A26839CA3CAEC2D90BB73D229D55E04C7D370
|
||||
SHA256: 8D3AC735873A2EA8527E16A6A08C34BD5018CBC0925AC4096E15A0C99F591D5F
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
||||
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.4.190-20251024.iso.sig securityonion-2.4.190-20251024.iso
|
||||
gpg --verify securityonion-2.4.200-20251216.iso.sig securityonion-2.4.200-20251216.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Thu 23 Oct 2025 07:21:46 AM EDT using RSA key ID FE507013
|
||||
gpg: Signature made Mon 15 Dec 2025 05:24:11 PM EST using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
|
||||
@@ -129,6 +129,7 @@ if [[ $EXCLUDE_STARTUP_ERRORS == 'Y' ]]; then
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|responded with status-code 503" # telegraf getting 503 from ES during startup
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|process_cluster_event_timeout_exception" # logstash waiting for elasticsearch to start
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|not configured for GeoIP" # SO does not bundle the maxminddb with Zeek
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|HTTP 404: Not Found" # Salt loops until Kratos returns 200, during startup Kratos may not be ready
|
||||
fi
|
||||
|
||||
if [[ $EXCLUDE_FALSE_POSITIVE_ERRORS == 'Y' ]]; then
|
||||
|
||||
@@ -299,6 +299,19 @@ elasticsearch:
|
||||
hot:
|
||||
actions: {}
|
||||
min_age: 0ms
|
||||
sos-backup:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
composed_of: []
|
||||
ignore_missing_component_templates: []
|
||||
index_patterns:
|
||||
- sos-backup-*
|
||||
priority: 501
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
number_of_replicas: 0
|
||||
number_of_shards: 1
|
||||
so-assistant-chat:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
@@ -844,53 +857,11 @@ elasticsearch:
|
||||
composed_of:
|
||||
- agent-mappings
|
||||
- dtc-agent-mappings
|
||||
- base-mappings
|
||||
- dtc-base-mappings
|
||||
- client-mappings
|
||||
- dtc-client-mappings
|
||||
- container-mappings
|
||||
- destination-mappings
|
||||
- dtc-destination-mappings
|
||||
- pb-override-destination-mappings
|
||||
- dll-mappings
|
||||
- dns-mappings
|
||||
- dtc-dns-mappings
|
||||
- ecs-mappings
|
||||
- dtc-ecs-mappings
|
||||
- error-mappings
|
||||
- event-mappings
|
||||
- dtc-event-mappings
|
||||
- file-mappings
|
||||
- dtc-file-mappings
|
||||
- group-mappings
|
||||
- host-mappings
|
||||
- dtc-host-mappings
|
||||
- http-mappings
|
||||
- dtc-http-mappings
|
||||
- log-mappings
|
||||
- metadata-mappings
|
||||
- network-mappings
|
||||
- dtc-network-mappings
|
||||
- observer-mappings
|
||||
- dtc-observer-mappings
|
||||
- organization-mappings
|
||||
- package-mappings
|
||||
- process-mappings
|
||||
- dtc-process-mappings
|
||||
- related-mappings
|
||||
- rule-mappings
|
||||
- dtc-rule-mappings
|
||||
- server-mappings
|
||||
- service-mappings
|
||||
- dtc-service-mappings
|
||||
- source-mappings
|
||||
- dtc-source-mappings
|
||||
- pb-override-source-mappings
|
||||
- threat-mappings
|
||||
- tls-mappings
|
||||
- url-mappings
|
||||
- user_agent-mappings
|
||||
- dtc-user_agent-mappings
|
||||
- common-settings
|
||||
- common-dynamic-mappings
|
||||
data_stream:
|
||||
|
||||
@@ -1,9 +1,90 @@
|
||||
{
|
||||
"description" : "kratos",
|
||||
"processors" : [
|
||||
{"set":{"field":"audience","value":"access","override":false,"ignore_failure":true}},
|
||||
{"set":{"field":"event.dataset","ignore_empty_value":true,"ignore_failure":true,"value":"kratos.{{{audience}}}","media_type":"text/plain"}},
|
||||
{"set":{"field":"event.action","ignore_failure":true,"copy_from":"msg" }},
|
||||
{ "pipeline": { "name": "common" } }
|
||||
]
|
||||
"description": "kratos",
|
||||
"processors": [
|
||||
{
|
||||
"set": {
|
||||
"field": "audience",
|
||||
"value": "access",
|
||||
"override": false,
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"set": {
|
||||
"field": "event.dataset",
|
||||
"ignore_empty_value": true,
|
||||
"ignore_failure": true,
|
||||
"value": "kratos.{{{audience}}}",
|
||||
"media_type": "text/plain"
|
||||
}
|
||||
},
|
||||
{
|
||||
"set": {
|
||||
"field": "event.action",
|
||||
"ignore_failure": true,
|
||||
"copy_from": "msg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http_request",
|
||||
"target_field": "http.request",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http_response",
|
||||
"target_field": "http.response",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http.request.path",
|
||||
"target_field": "http.uri",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http.request.method",
|
||||
"target_field": "http.method",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http.request.method",
|
||||
"target_field": "http.method",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http.request.query",
|
||||
"target_field": "http.query",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"rename": {
|
||||
"field": "http.request.headers.user-agent",
|
||||
"target_field": "http.useragent",
|
||||
"ignore_failure": true,
|
||||
"ignore_missing": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"pipeline": {
|
||||
"name": "common"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -75,6 +75,7 @@ kratosconfig:
|
||||
- group: 928
|
||||
- mode: 600
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
- defaults:
|
||||
KRATOSMERGED: {{ KRATOSMERGED }}
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ kratos:
|
||||
ui_url: https://URL_BASE/
|
||||
login:
|
||||
ui_url: https://URL_BASE/login/
|
||||
lifespan: 60m
|
||||
error:
|
||||
ui_url: https://URL_BASE/login/
|
||||
registration:
|
||||
|
||||
@@ -182,6 +182,10 @@ kratos:
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: kratos.html
|
||||
lifespan:
|
||||
description: Defines the duration that a login form will remain valid.
|
||||
global: True
|
||||
helpLink: kratos.html
|
||||
error:
|
||||
ui_url:
|
||||
description: User accessible URL containing the Security Onion login page. Leave as default to ensure proper operation.
|
||||
|
||||
@@ -133,7 +133,7 @@ function getinstallinfo() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
source <(echo $INSTALLVARS)
|
||||
export $(echo "$INSTALLVARS" | xargs)
|
||||
if [ $? -ne 0 ]; then
|
||||
log "ERROR" "Failed to source install variables"
|
||||
return 1
|
||||
|
||||
@@ -90,6 +90,9 @@ check_err() {
|
||||
160)
|
||||
echo 'Incompatiable Elasticsearch upgrade'
|
||||
;;
|
||||
161)
|
||||
echo 'Required intermediate Elasticsearch upgrade not complete'
|
||||
;;
|
||||
*)
|
||||
echo 'Unhandled error'
|
||||
echo "$err_msg"
|
||||
@@ -648,6 +651,10 @@ post_to_2.4.200() {
|
||||
}
|
||||
|
||||
post_to_2.4.210() {
|
||||
echo "Rolling over Kratos index to apply new index template"
|
||||
|
||||
rollover_index "logs-kratos-so"
|
||||
|
||||
echo "Regenerating Elastic Agent Installers"
|
||||
/sbin/so-elastic-agent-gen-installers
|
||||
|
||||
@@ -1139,40 +1146,35 @@ mkdir -p /nsm/backup/detections-migration/2-4-200
|
||||
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
|
||||
cp /opt/so/conf/idstools/etc/rulecat.conf /nsm/backup/detections-migration/2-4-200
|
||||
|
||||
if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
|
||||
python3 /opt/so/conf/soc/so-detections-backup.py
|
||||
# Backup so-detection index via reindex
|
||||
echo "Creating sos-backup index template..."
|
||||
template_result=$(/sbin/so-elasticsearch-query '_index_template/sos-backup' -X PUT \
|
||||
--retry 5 --retry-delay 15 --retry-all-errors \
|
||||
-d '{"index_patterns":["sos-backup-*"],"priority":501,"template":{"settings":{"index":{"number_of_replicas":0,"number_of_shards":1}}}}')
|
||||
|
||||
# Verify backup by comparing counts
|
||||
echo "Verifying detection overrides backup..."
|
||||
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
|
||||
--retry 5 --retry-delay 10 --retry-all-errors \
|
||||
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
|
||||
echo " Error: Failed to query Elasticsearch for override count"
|
||||
exit 1
|
||||
}
|
||||
if [[ -z "$template_result" ]] || ! echo "$template_result" | jq -e '.acknowledged == true' > /dev/null 2>&1; then
|
||||
echo "Error: Failed to create sos-backup index template"
|
||||
echo "$template_result"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$es_override_count" =~ ^[0-9]+$ ]]; then
|
||||
echo " Error: Invalid override count from Elasticsearch: '$es_override_count'"
|
||||
exit 1
|
||||
fi
|
||||
BACKUP_INDEX="sos-backup-detection-$(date +%Y%m%d-%H%M%S)"
|
||||
echo "Backing up so-detection index to $BACKUP_INDEX..."
|
||||
reindex_result=$(/sbin/so-elasticsearch-query '_reindex?wait_for_completion=true' \
|
||||
--retry 5 --retry-delay 15 --retry-all-errors \
|
||||
-X POST -d "{\"source\": {\"index\": \"so-detection\"}, \"dest\": {\"index\": \"$BACKUP_INDEX\"}}")
|
||||
|
||||
backup_override_count=$(find /nsm/backup/detections/repo/*/overrides -type f 2>/dev/null | wc -l)
|
||||
|
||||
echo " Elasticsearch overrides: $es_override_count"
|
||||
echo " Backed up overrides: $backup_override_count"
|
||||
|
||||
if [[ "$es_override_count" -gt 0 ]]; then
|
||||
if [[ "$backup_override_count" -gt 0 ]]; then
|
||||
echo " Override backup verified successfully"
|
||||
else
|
||||
echo " Error: Elasticsearch has $es_override_count overrides but backup has 0 files"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo " No overrides to backup"
|
||||
fi
|
||||
if [[ -z "$reindex_result" ]]; then
|
||||
echo "Error: Backup of detections failed - no response from Elasticsearch"
|
||||
exit 1
|
||||
elif echo "$reindex_result" | jq -e '.created >= 0' > /dev/null 2>&1; then
|
||||
echo "Backup complete: $(echo "$reindex_result" | jq -r '.created') documents copied"
|
||||
elif echo "$reindex_result" | grep -q "index_not_found_exception"; then
|
||||
echo "so-detection index does not exist, skipping backup"
|
||||
else
|
||||
echo "SOC Detections backup script not found, skipping detection backup"
|
||||
echo "Error: Backup of detections failed"
|
||||
echo "$reindex_result"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
@@ -1193,11 +1195,12 @@ hash_normalized_file() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
sed -E \
|
||||
# Ensure trailing newline for consistent hashing regardless of source file
|
||||
{ sed -E \
|
||||
-e 's/^[[:space:]]+//; s/[[:space:]]+$//' \
|
||||
-e '/^$/d' \
|
||||
-e 's|--url=http://[^:]+:7788|--url=http://MANAGER:7788|' \
|
||||
"$file" | sha256sum | awk '{print $1}'
|
||||
"$file"; echo; } | sed '/^$/d' | sha256sum | awk '{print $1}'
|
||||
}
|
||||
|
||||
# Known-default hashes for so-rule-update (ETOPEN ruleset)
|
||||
@@ -1293,6 +1296,13 @@ custom_found=0
|
||||
check_config_file "$SO_RULE_UPDATE" "KNOWN_SO_RULE_UPDATE_HASHES" || custom_found=1
|
||||
check_config_file "$RULECAT_CONF" "KNOWN_RULECAT_CONF_HASHES" || custom_found=1
|
||||
|
||||
# Check for ETPRO rules on airgap systems
|
||||
if [[ $is_airgap -eq 0 ]] && grep -q 'ETPRO ' /nsm/rules/suricata/emerging-all.rules 2>/dev/null; then
|
||||
echo "ETPRO rules detected on airgap system - custom configuration"
|
||||
echo "ETPRO rules detected on Airgap in /nsm/rules/suricata/emerging-all.rules" >> /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
|
||||
custom_found=1
|
||||
fi
|
||||
|
||||
# If no custom configs found, remove syncBlock
|
||||
if [[ $custom_found -eq 0 ]]; then
|
||||
echo "idstools migration completed successfully - removing Suricata engine syncBlock"
|
||||
@@ -1318,6 +1328,7 @@ fi
|
||||
echo "Removing idstools symlink and scripts..."
|
||||
rm -rf /usr/sbin/so-idstools*
|
||||
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
|
||||
crontab -l | grep -v 'so-rule-update' | crontab -
|
||||
|
||||
# Backup the salt master config & manager pillar before editing it
|
||||
cp /opt/so/saltstack/local/pillar/minions/$MINIONID.sls /nsm/backup/detections-migration/2-4-200/
|
||||
@@ -1625,6 +1636,8 @@ verify_latest_update_script() {
|
||||
|
||||
verify_es_version_compatibility() {
|
||||
|
||||
local es_required_version_statefile="/opt/so/state/so_es_required_upgrade_version.txt"
|
||||
local es_verification_script="/tmp/so_intermediate_upgrade_verification.sh"
|
||||
# supported upgrade paths for SO-ES versions
|
||||
declare -A es_upgrade_map=(
|
||||
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
|
||||
@@ -1661,6 +1674,26 @@ verify_es_version_compatibility() {
|
||||
|
||||
fi
|
||||
|
||||
# if this statefile exists then we have done an intermediate upgrade and we need to ensure that ALL ES nodes have been upgraded to the version in the statefile before allowing soup to continue
|
||||
if [[ -f "$es_required_version_statefile" ]]; then
|
||||
# required so verification script should have already been created
|
||||
if [[ ! -f "$es_verification_script" ]]; then
|
||||
create_intermediate_upgrade_verification_script $es_verification_script
|
||||
fi
|
||||
|
||||
# create script using version in statefile
|
||||
local es_required_version_statefile_value=$(cat $es_required_version_statefile)
|
||||
timeout --foreground 3600 bash "$es_verification_script" "$es_required_version_statefile_value" "$es_required_version_statefile"
|
||||
if [[ $? -ne 0 ]]; then
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
|
||||
echo "A previous required intermediate Elasticsearch upgrade to $es_required_version_statefile_value has yet to successfully complete across the grid. Please allow time for all Searchnodes/Heavynodes to have upgraded Elasticsearch to $es_required_version_statefile_value before running soup again to avoid potential data loss!"
|
||||
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
exit 161
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " || "$es_version" == "$target_es_version" ]]; then
|
||||
# supported upgrade
|
||||
@@ -1671,20 +1704,171 @@ verify_es_version_compatibility() {
|
||||
echo -e "\n##############################################################################################################################\n"
|
||||
echo -e "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION).\n"
|
||||
|
||||
echo "${compatible_versions##* }" > "$es_required_version_statefile"
|
||||
|
||||
# We expect to upgrade to the latest compatiable minor version of ES
|
||||
create_intermediate_upgrade_verification_script $es_verification_script
|
||||
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
|
||||
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
|
||||
echo -e "\n##############################################################################################################################\n"
|
||||
exit 160
|
||||
else
|
||||
echo "You can use the following soup command to upgrade to $next_step_so_version;"
|
||||
echo -e " sudo BRANCH=$next_step_so_version soup\n"
|
||||
# preserve BRANCH value if set originally
|
||||
if [[ -n "$BRANCH" ]]; then
|
||||
local originally_requested_so_version="$BRANCH"
|
||||
else
|
||||
local originally_requested_so_version="2.4/main"
|
||||
fi
|
||||
|
||||
echo "Starting automated intermediate upgrade to $next_step_so_version."
|
||||
echo "After completion, the system will automatically attempt to upgrade to the latest version."
|
||||
echo -e "\n##############################################################################################################################\n"
|
||||
exec bash -c "BRANCH=$next_step_so_version soup -y && BRANCH=$next_step_so_version soup -y && \
|
||||
echo -e \"\n##############################################################################################################################\n\" && \
|
||||
echo -e \"Verifying Elasticsearch was successfully upgraded to ${compatible_versions##* } across the grid. This part can take a while as Searchnodes/Heavynodes sync up with the Manager! \n\nOnce verification completes the next soup will begin automatically. If verification takes longer than 1 hour it will stop waiting and your grid will remain at $next_step_so_version. Allowing for all Searchnodes/Heavynodes to upgrade Elasticsearch to the required version on their own time.\n\" \
|
||||
&& timeout --foreground 3600 bash /tmp/so_intermediate_upgrade_verification.sh ${compatible_versions##* } $es_required_version_statefile && \
|
||||
echo -e \"\n##############################################################################################################################\n\" \
|
||||
&& BRANCH=$originally_requested_so_version soup -y && BRANCH=$originally_requested_so_version soup -y"
|
||||
fi
|
||||
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
|
||||
echo -e "\n###############################################################################################################################\n"
|
||||
exit 160
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
create_intermediate_upgrade_verification_script() {
|
||||
# After an intermediate upgrade, verify that ALL nodes running Elasticsearch are at the expected version BEFORE proceeding to the next upgrade step. This is a CRITICAL step
|
||||
local verification_script="$1"
|
||||
|
||||
cat << 'EOF' > "$verification_script"
|
||||
#!/bin/bash
|
||||
|
||||
SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE="/root/so_intermediate_upgrade_verification_failures.log"
|
||||
CURRENT_TIME=$(date +%Y%m%d.%H%M%S)
|
||||
EXPECTED_ES_VERSION="$1"
|
||||
|
||||
if [[ -z "$EXPECTED_ES_VERSION" ]]; then
|
||||
echo -e "\nExpected Elasticsearch version not provided. Usage: $0 <expected_es_version>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -f "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE" ]]; then
|
||||
mv "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE" "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE.$CURRENT_TIME"
|
||||
fi
|
||||
|
||||
check_heavynodes_es_version() {
|
||||
# Check if heavynodes are in this grid
|
||||
if ! salt-key -l accepted | grep -q 'heavynode$'; then
|
||||
|
||||
# No heavynodes, skip version check
|
||||
echo "No heavynodes detected in this Security Onion deployment. Skipping heavynode Elasticsearch version verification."
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo -e "\nOne or more heavynodes detected. Verifying their Elasticsearch versions."
|
||||
|
||||
local retries=20
|
||||
local retry_count=0
|
||||
local delay=180
|
||||
local success=1
|
||||
while [[ $retry_count -lt $retries ]]; do
|
||||
# keep stderr with variable for logging
|
||||
heavynode_versions=$(salt -C 'G@role:so-heavynode' cmd.run 'so-elasticsearch-query / --retry 3 --retry-delay 10 | jq ".version.number"' shell=/bin/bash --out=json 2>&1)
|
||||
local exit_status=$?
|
||||
|
||||
# Check that all heavynodes returned good data
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "Failed to retrieve Elasticsearch version from one or more heavynodes... Retrying in $delay seconds. Attempt $((retry_count + 1)) of $retries."
|
||||
((retry_count++))
|
||||
sleep $delay
|
||||
|
||||
continue
|
||||
else
|
||||
if echo "$heavynode_versions" | jq -s --arg expected "\"$EXPECTED_ES_VERSION\"" --exit-status 'all(.[]; . | to_entries | all(.[]; .value == $expected))' > /dev/null; then
|
||||
echo -e "\nAll heavynodes are at the expected Elasticsearch version $EXPECTED_ES_VERSION."
|
||||
|
||||
return 0
|
||||
else
|
||||
echo "One or more heavynodes is not at the expected Elasticsearch version $EXPECTED_ES_VERSION. Rechecking in $delay seconds. Attempt $((retry_count + 1)) of $retries."
|
||||
((retry_count++))
|
||||
sleep $delay
|
||||
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
echo "One or more heavynodes is not at the expected Elasticsearch version $EXPECTED_ES_VERSION."
|
||||
echo "Current versions:"
|
||||
echo "$heavynode_versions" | jq -s 'add'
|
||||
echo "$heavynode_versions" | jq -s 'add' >> "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE"
|
||||
echo -e "\n Stopping automatic upgrade to latest Security Onion version. Heavynodes must ALL be at Elasticsearch version $EXPECTED_ES_VERSION before proceeding with the next upgrade step to avoid potential data loss!"
|
||||
echo -e "\n Heavynodes will upgrade themselves to Elasticsearch $EXPECTED_ES_VERSION on their own, but this process can take a long time depending on network link between Manager and Heavynodes."
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
check_searchnodes_es_version() {
|
||||
local retries=20
|
||||
local retry_count=0
|
||||
local delay=180
|
||||
local success=1
|
||||
|
||||
while [[ $retry_count -lt $retries ]]; do
|
||||
# keep stderr with variable for logging
|
||||
cluster_versions=$(so-elasticsearch-query _nodes/_all/version --retry 5 --retry-delay 10 2>&1)
|
||||
local exit_status=$?
|
||||
|
||||
if [[ $exit_status -ne 0 ]]; then
|
||||
echo "Failed to retrieve Elasticsearch versions from searchnodes... Retrying in $delay seconds. Attempt $((retry_count + 1)) of $retries."
|
||||
((retry_count++))
|
||||
sleep $delay
|
||||
|
||||
continue
|
||||
else
|
||||
if echo "$cluster_versions" | jq --arg expected "$EXPECTED_ES_VERSION" --exit-status '.nodes | to_entries | all(.[].value.version; . == $expected)' > /dev/null; then
|
||||
echo "All Searchnodes are at the expected Elasticsearch version $EXPECTED_ES_VERSION."
|
||||
|
||||
return 0
|
||||
else
|
||||
echo "One or more Searchnodes is not at the expected Elasticsearch version $EXPECTED_ES_VERSION. Rechecking in $delay seconds. Attempt $((retry_count + 1)) of $retries."
|
||||
((retry_count++))
|
||||
sleep $delay
|
||||
|
||||
continue
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
echo "One or more Searchnodes is not at the expected Elasticsearch version $EXPECTED_ES_VERSION."
|
||||
echo "Current versions:"
|
||||
echo "$cluster_versions" | jq '.nodes | to_entries | map({(.value.name): .value.version}) | sort | add'
|
||||
echo "$cluster_versions" >> "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE"
|
||||
echo -e "\nStopping automatic upgrade to latest version. Searchnodes must ALL be at Elasticsearch version $EXPECTED_ES_VERSION before proceeding with the next upgrade step to avoid potential data loss!"
|
||||
echo -e "\nSearchnodes will upgrade themselves to Elasticsearch $EXPECTED_ES_VERSION on their own, but this process can take a while depending on cluster size / network link between Manager and Searchnodes."
|
||||
echo -e "\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
|
||||
|
||||
echo "$cluster_versions" > "$SOUP_INTERMEDIATE_UPGRADE_FAILURES_LOG_FILE"
|
||||
|
||||
return 1
|
||||
|
||||
}
|
||||
|
||||
# Need to add a check for heavynodes and ensure all heavynodes get their own "cluster" upgraded before moving on to final upgrade.
|
||||
check_searchnodes_es_version || exit 1
|
||||
check_heavynodes_es_version || exit 1
|
||||
|
||||
# Remove required version state file after successful verification
|
||||
rm -f "$2"
|
||||
|
||||
exit 0
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# Keeping this block in case we need to do a hotfix that requires salt update
|
||||
apply_hotfix() {
|
||||
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
|
||||
@@ -1948,7 +2132,7 @@ main() {
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
echo ""
|
||||
echo "Cleaning repos on remote Security Onion nodes."
|
||||
salt -C 'not *_eval and not *_manager and not *_managersearch and not *_standalone and G@os:CentOS' cmd.run "yum clean all"
|
||||
salt -C 'not *_eval and not *_manager* and not *_standalone and G@os:OEL' cmd.run "dnf clean all"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
Onion AI Session Report
|
||||
==========================
|
||||
|
||||
## Session Details
|
||||
|
||||
**Session ID:** {{.Session.SessionId}}
|
||||
|
||||
**Title:** {{.Session.Title}}
|
||||
|
||||
**Created:** {{formatDateTime "Mon Jan 02 15:04:05 -0700 2006" .Session.CreateTime}}
|
||||
|
||||
**Updated:** {{formatDateTime "Mon Jan 02 15:04:05 -0700 2006" .Session.UpdateTime}}
|
||||
|
||||
{{ if .Session.DeleteTime }}
|
||||
**Deleted:** {{ formatDateTime "Mon Jan 02 15:04:05 -0700 2006" .Session.DeleteTime}}
|
||||
{{ end }}
|
||||
|
||||
**User ID:** {{getUserDetail "email" .Session.UserId}}
|
||||
|
||||
## Session Usage
|
||||
|
||||
**Total Input Tokens** {{.Session.Usage.TotalInputTokens}}
|
||||
|
||||
**Total Output Tokens** {{.Session.Usage.TotalOutputTokens}}
|
||||
|
||||
**Total Credits:** {{.Session.Usage.TotalCredits}}
|
||||
|
||||
**Total Messages:** {{.Session.Usage.TotalMessages}}
|
||||
|
||||
## Messages
|
||||
|
||||
{{ range $index, $msg := sortAssistantMessages "CreateTime" "asc" .History }}
|
||||
#### Message {{ add $index 1 }}
|
||||
|
||||
**Created:** {{formatDateTime "Mon Jan 02 15:04:05 -0700 2006" $msg.CreateTime}}
|
||||
|
||||
**User ID:** {{getUserDetail "email" $msg.UserId}}
|
||||
|
||||
**Role:** {{$msg.Message.Role}}
|
||||
|
||||
{{ range $i, $block := $msg.Message.ContentBlocks }}
|
||||
|
||||
---
|
||||
|
||||
{{ if eq $block.Type "text" }}
|
||||
**Text:** {{ stripEmoji $block.Text }}
|
||||
{{ else if eq $block.Type "tool_use" }}
|
||||
**Tool:** {{ $block.Name }}
|
||||
{{ if $block.Input }}
|
||||
**Parameters:**
|
||||
{{ range $key, $value := parseJSON $block.Input }}
|
||||
{{ if eq $key "limit" }}- {{ $key }}: {{ $value }}
|
||||
{{ else }}- {{ $key }}: "{{ $value }}"
|
||||
{{ end }}{{ end }}{{ end }}
|
||||
{{ else if $block.ToolResult }}
|
||||
**Tool Result:**
|
||||
{{ if $block.ToolResult.Content }}
|
||||
{{ range $j, $contentBlock := $block.ToolResult.Content }}
|
||||
{{ if gt $j 0 }}
|
||||
|
||||
---
|
||||
|
||||
{{ end }}
|
||||
{{ if $contentBlock.Text }}
|
||||
{{ if $block.ToolResult.IsError }}
|
||||
**Error:** {{ $contentBlock.Text }}
|
||||
{{ else }}
|
||||
{{ $contentBlock.Text }}
|
||||
{{ end }}
|
||||
{{ else if $contentBlock.Json }}
|
||||
```json
|
||||
{{ toJSON $contentBlock.Json }}
|
||||
```
|
||||
{{ end }}{{ end }}
|
||||
{{ end }}{{ end }}{{ end }}
|
||||
|
||||
{{ if eq $msg.Message.Role "assistant" }}{{ if $msg.Message.Usage }}
|
||||
|
||||
---
|
||||
|
||||
**Message Usage:**
|
||||
|
||||
- Input Tokens: {{$msg.Message.Usage.InputTokens}}
|
||||
- Output Tokens: {{$msg.Message.Usage.OutputTokens}}
|
||||
- Credits: {{$msg.Message.Usage.Credits}}
|
||||
|
||||
{{end}}{{end}}
|
||||
|
||||
---
|
||||
|
||||
{{end}}
|
||||
@@ -357,7 +357,7 @@ sensoroni:
|
||||
reports:
|
||||
standard:
|
||||
case_report__md:
|
||||
title: Case report Template
|
||||
title: Case Report Template
|
||||
description: The template used when generating a case report. Supports markdown format.
|
||||
file: True
|
||||
global: True
|
||||
@@ -370,6 +370,13 @@ sensoroni:
|
||||
global: True
|
||||
syntax: md
|
||||
helpLink: reports.html
|
||||
assistant_session_report__md:
|
||||
title: Assistant Session Report Template
|
||||
description: The template used when generating an assistant session report. Supports markdown format.
|
||||
file: True
|
||||
global: True
|
||||
syntax: md
|
||||
helplink: reports.html
|
||||
custom:
|
||||
generic_report1__md:
|
||||
title: Custom Report 1
|
||||
|
||||
@@ -115,16 +115,16 @@ soc:
|
||||
':kratos:':
|
||||
- soc_timestamp
|
||||
- event.dataset
|
||||
- http_request.headers.x-real-ip
|
||||
- http.request.headers.x-real-ip
|
||||
- user.name
|
||||
- http_request.headers.user-agent
|
||||
- http.useragent
|
||||
- msg
|
||||
':hydra:':
|
||||
- soc_timestamp
|
||||
- event.dataset
|
||||
- http_request.headers.x-real-ip
|
||||
- http.request.headers.x-real-ip
|
||||
- user.name
|
||||
- http_request.headers.user-agent
|
||||
- http.useragent
|
||||
- msg
|
||||
'::conn':
|
||||
- soc_timestamp
|
||||
@@ -1747,7 +1747,7 @@ soc:
|
||||
showSubtitle: true
|
||||
- name: SOC - Auth
|
||||
description: Users authenticated to SOC grouped by IP address and identity
|
||||
query: 'event.dataset:kratos.audit AND msg:*authenticated* | groupby http_request.headers.x-real-ip user.name'
|
||||
query: 'event.dataset:kratos.audit AND msg:*authenticated* | groupby http.request.headers.x-real-ip user.name'
|
||||
showSubtitle: true
|
||||
- name: SOC - App
|
||||
description: Logs generated by the Security Onion Console (SOC) server and modules
|
||||
@@ -2027,10 +2027,10 @@ soc:
|
||||
query: '* | groupby event.category | groupby -sankey event.category event.module | groupby event.module | groupby -sankey event.module event.dataset | groupby event.dataset | groupby observer.name | groupby host.name | groupby source.ip | groupby destination.ip | groupby destination.port'
|
||||
- name: SOC Logins
|
||||
description: SOC (Security Onion Console) logins
|
||||
query: 'event.dataset:kratos.audit AND msg:*authenticated* | groupby http_request.headers.x-real-ip | groupby -sankey http_request.headers.x-real-ip user.name | groupby user.name | groupby http_request.headers.user-agent'
|
||||
query: 'event.dataset:kratos.audit AND msg:*authenticated* | groupby http.request.headers.x-real-ip | groupby -sankey http.request.headers.x-real-ip user.name | groupby user.name | groupby http.useragent'
|
||||
- name: SOC Login Failures
|
||||
description: SOC (Security Onion Console) login failures
|
||||
query: 'event.dataset:kratos.audit AND msg:*Encountered*self-service*login*error* | groupby user.name | groupby http_request.headers.x-real-ip | groupby -sankey http_request.headers.x-real-ip http_request.headers.user-agent | groupby http_request.headers.user-agent'
|
||||
query: 'event.dataset:kratos.audit AND msg:*Encountered*self-service*login*error* | groupby user.name | groupby http.request.headers.x-real-ip | groupby -sankey http.request.headers.x-real-ip http.useragent | groupby http.useragent'
|
||||
- name: Alerts
|
||||
description: Overview of all alerts
|
||||
query: 'tags:alert | groupby event.module* | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby rule.name | groupby event.severity | groupby destination.as.organization.name'
|
||||
@@ -2653,19 +2653,15 @@ soc:
|
||||
thresholdColorRatioMax: 1
|
||||
availableModels:
|
||||
- id: sonnet-4.5
|
||||
displayName: Claude Sonnet 4.5
|
||||
displayName: Claude Sonnet 4.5 ($$$)
|
||||
origin: USA
|
||||
contextLimitSmall: 200000
|
||||
contextLimitLarge: 1000000
|
||||
lowBalanceColorAlert: 500000
|
||||
enabled: true
|
||||
- id: gptoss-120b
|
||||
displayName: GPT-OSS 120B
|
||||
contextLimitSmall: 128000
|
||||
contextLimitLarge: 128000
|
||||
lowBalanceColorAlert: 500000
|
||||
enabled: true
|
||||
- id: qwen-235b
|
||||
displayName: QWEN 235B
|
||||
displayName: QWEN 235B ($)
|
||||
origin: China
|
||||
contextLimitSmall: 256000
|
||||
contextLimitLarge: 256000
|
||||
lowBalanceColorAlert: 500000
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
# This script queries Elasticsearch for Custom Detections and all Overrides,
|
||||
# and git commits them to disk at $OUTPUT_DIR
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
@@ -18,10 +19,10 @@ from datetime import datetime
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
# Constants
|
||||
ES_URL = "https://localhost:9200/so-detection/_search"
|
||||
DEFAULT_INDEX = "so-detection"
|
||||
DEFAULT_OUTPUT_DIR = "/nsm/backup/detections/repo"
|
||||
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
|
||||
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
|
||||
OUTPUT_DIR = "/nsm/backup/detections/repo"
|
||||
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
|
||||
|
||||
def get_auth_credentials(auth_file):
|
||||
@@ -30,9 +31,10 @@ def get_auth_credentials(auth_file):
|
||||
if line.startswith('user ='):
|
||||
return line.split('=', 1)[1].strip().replace('"', '')
|
||||
|
||||
def query_elasticsearch(query, auth):
|
||||
def query_elasticsearch(query, auth, index):
|
||||
url = f"https://localhost:9200/{index}/_search"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
|
||||
response = requests.get(url, headers=headers, data=query, auth=auth, verify=False)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
@@ -47,12 +49,12 @@ def save_content(hit, base_folder, subfolder="", extension="txt"):
|
||||
f.write(content)
|
||||
return file_path
|
||||
|
||||
def save_overrides(hit):
|
||||
def save_overrides(hit, output_dir):
|
||||
so_detection = hit["_source"]["so_detection"]
|
||||
public_id = so_detection["publicId"]
|
||||
overrides = so_detection["overrides"]
|
||||
language = so_detection["language"]
|
||||
folder = os.path.join(OUTPUT_DIR, language, "overrides")
|
||||
folder = os.path.join(output_dir, language, "overrides")
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
extension = "yaml" if language == "sigma" else "txt"
|
||||
file_path = os.path.join(folder, f"{public_id}.{extension}")
|
||||
@@ -60,20 +62,20 @@ def save_overrides(hit):
|
||||
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
|
||||
return file_path
|
||||
|
||||
def ensure_git_repo():
|
||||
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
|
||||
def ensure_git_repo(output_dir):
|
||||
if not os.path.isdir(os.path.join(output_dir, '.git')):
|
||||
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "init"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "remote", "add", "origin", "default"], check=True)
|
||||
|
||||
def commit_changes():
|
||||
ensure_git_repo()
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
|
||||
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
|
||||
def commit_changes(output_dir):
|
||||
ensure_git_repo(output_dir)
|
||||
subprocess.run(["git", "-C", output_dir, "config", "user.email", "securityonion@local.invalid"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "config", "user.name", "securityonion"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "add", "."], check=True)
|
||||
status_result = subprocess.run(["git", "-C", output_dir, "status"], capture_output=True, text=True)
|
||||
print(status_result.stdout)
|
||||
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
|
||||
commit_result = subprocess.run(["git", "-C", output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
|
||||
if commit_result.returncode == 1:
|
||||
print("No changes to commit.")
|
||||
elif commit_result.returncode == 0:
|
||||
@@ -81,29 +83,41 @@ def commit_changes():
|
||||
else:
|
||||
commit_result.check_returncode()
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Backup custom detections and overrides from Elasticsearch")
|
||||
parser.add_argument("--output", "-o", default=DEFAULT_OUTPUT_DIR,
|
||||
help=f"Output directory for backups (default: {DEFAULT_OUTPUT_DIR})")
|
||||
parser.add_argument("--index", "-i", default=DEFAULT_INDEX,
|
||||
help=f"Elasticsearch index to query (default: {DEFAULT_INDEX})")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
output_dir = args.output
|
||||
index = args.index
|
||||
|
||||
try:
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
|
||||
|
||||
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
||||
print(f"Backing up Custom Detections and all Overrides to {output_dir} - {timestamp}\n")
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
auth_credentials = get_auth_credentials(AUTH_FILE)
|
||||
username, password = auth_credentials.split(':', 1)
|
||||
auth = HTTPBasicAuth(username, password)
|
||||
|
||||
|
||||
# Query and save custom detections
|
||||
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
|
||||
detections = query_elasticsearch(QUERY_DETECTIONS, auth, index)["hits"]["hits"]
|
||||
for hit in detections:
|
||||
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
|
||||
|
||||
save_content(hit, output_dir, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
|
||||
|
||||
# Query and save overrides
|
||||
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
|
||||
overrides = query_elasticsearch(QUERY_OVERRIDES, auth, index)["hits"]["hits"]
|
||||
for hit in overrides:
|
||||
save_overrides(hit)
|
||||
|
||||
commit_changes()
|
||||
|
||||
save_overrides(hit, output_dir)
|
||||
|
||||
commit_changes(output_dir)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"Backup Completed - {timestamp}")
|
||||
except Exception as e:
|
||||
|
||||
@@ -57,12 +57,12 @@ class TestBackupScript(unittest.TestCase):
|
||||
mock_response.json.return_value = {'hits': {'hits': []}}
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
|
||||
|
||||
|
||||
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX)
|
||||
|
||||
self.assertEqual(response, {'hits': {'hits': []}})
|
||||
mock_get.assert_called_once_with(
|
||||
ds.ES_URL,
|
||||
f"https://localhost:9200/{ds.DEFAULT_INDEX}/_search",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=ds.QUERY_DETECTIONS,
|
||||
auth=self.auth,
|
||||
@@ -81,7 +81,7 @@ class TestBackupScript(unittest.TestCase):
|
||||
@patch('os.makedirs')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
def test_save_overrides(self, mock_file, mock_makedirs):
|
||||
file_path = ds.save_overrides(self.mock_override_hit)
|
||||
file_path = ds.save_overrides(self.mock_override_hit, self.output_dir)
|
||||
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
|
||||
self.assertEqual(file_path, expected_path)
|
||||
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
|
||||
@@ -90,9 +90,9 @@ class TestBackupScript(unittest.TestCase):
|
||||
@patch('subprocess.run')
|
||||
def test_ensure_git_repo(self, mock_run):
|
||||
mock_run.return_value = MagicMock(returncode=0)
|
||||
|
||||
ds.ensure_git_repo()
|
||||
|
||||
|
||||
ds.ensure_git_repo(self.output_dir)
|
||||
|
||||
mock_run.assert_has_calls([
|
||||
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
|
||||
call(["git", "-C", self.output_dir, "init"], check=True),
|
||||
@@ -106,9 +106,9 @@ class TestBackupScript(unittest.TestCase):
|
||||
mock_commit_result = MagicMock(returncode=1)
|
||||
# Ensure sufficient number of MagicMock instances for each subprocess.run call
|
||||
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
|
||||
|
||||
|
||||
print("Running test_commit_changes...")
|
||||
ds.commit_changes()
|
||||
ds.commit_changes(self.output_dir)
|
||||
print("Finished test_commit_changes.")
|
||||
|
||||
mock_run.assert_has_calls([
|
||||
@@ -120,39 +120,45 @@ class TestBackupScript(unittest.TestCase):
|
||||
])
|
||||
|
||||
@patch('builtins.print')
|
||||
@patch('so-detections-backup.commit_changes')
|
||||
@patch('so-detections-backup.save_overrides')
|
||||
@patch('so-detections-backup.save_content')
|
||||
@patch('so-detections-backup.query_elasticsearch')
|
||||
@patch('so-detections-backup.get_auth_credentials')
|
||||
@patch.object(ds, 'commit_changes')
|
||||
@patch.object(ds, 'save_overrides')
|
||||
@patch.object(ds, 'save_content')
|
||||
@patch.object(ds, 'query_elasticsearch')
|
||||
@patch.object(ds, 'get_auth_credentials')
|
||||
@patch('os.makedirs')
|
||||
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
|
||||
@patch.object(ds, 'parse_args')
|
||||
def test_main(self, mock_parse_args, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
|
||||
mock_args = MagicMock()
|
||||
mock_args.output = self.output_dir
|
||||
mock_args.index = ds.DEFAULT_INDEX
|
||||
mock_parse_args.return_value = mock_args
|
||||
mock_get_auth.return_value = self.auth_credentials
|
||||
mock_query.side_effect = [
|
||||
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
|
||||
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
|
||||
]
|
||||
|
||||
|
||||
with patch('datetime.datetime') as mock_datetime:
|
||||
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
|
||||
ds.main()
|
||||
|
||||
|
||||
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
|
||||
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
|
||||
mock_query.assert_has_calls([
|
||||
call(ds.QUERY_DETECTIONS, self.auth),
|
||||
call(ds.QUERY_OVERRIDES, self.auth)
|
||||
call(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX),
|
||||
call(ds.QUERY_OVERRIDES, self.auth, ds.DEFAULT_INDEX)
|
||||
])
|
||||
mock_save_content.assert_called_once_with(
|
||||
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
|
||||
self.output_dir,
|
||||
"sigma",
|
||||
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
|
||||
self.output_dir,
|
||||
"sigma",
|
||||
"yaml"
|
||||
)
|
||||
mock_save_overrides.assert_called_once_with(
|
||||
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
|
||||
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}},
|
||||
self.output_dir
|
||||
)
|
||||
mock_commit.assert_called_once()
|
||||
mock_commit.assert_called_once_with(self.output_dir)
|
||||
mock_print.assert_called()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -652,7 +652,6 @@ soc:
|
||||
assistant:
|
||||
apiUrl:
|
||||
description: The URL of the AI gateway.
|
||||
advanced: True
|
||||
global: True
|
||||
healthTimeoutSeconds:
|
||||
description: Timeout in seconds for the Onion AI health check.
|
||||
@@ -708,6 +707,9 @@ soc:
|
||||
- field: displayName
|
||||
label: Display Name
|
||||
required: True
|
||||
- field: origin
|
||||
label: Country of Origin for the Model Training
|
||||
required: false
|
||||
- field: contextLimitSmall
|
||||
label: Context Limit (Small)
|
||||
forcedType: int
|
||||
|
||||
BIN
sigs/securityonion-2.4.200-20251216.iso.sig
Normal file
BIN
sigs/securityonion-2.4.200-20251216.iso.sig
Normal file
Binary file not shown.
Reference in New Issue
Block a user