mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-19 23:43:07 +01:00
Compare commits
26 Commits
c4a70b540e
...
reyesj2/el
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9345718967 | ||
|
|
6c879cbd13 | ||
|
|
089b5aaf44 | ||
|
|
b61885add5 | ||
|
|
5cb1e284af | ||
|
|
e3a4f0873e | ||
|
|
7977a020ac | ||
|
|
d518f75468 | ||
|
|
04d6cca204 | ||
|
|
5ab6bda639 | ||
|
|
f433de7e12 | ||
|
|
8ef6c2f91d | ||
|
|
7575218697 | ||
|
|
dc945dad00 | ||
|
|
ddcd74ffd2 | ||
|
|
e105bd12e6 | ||
|
|
f5688175b6 | ||
|
|
72a4ba405f | ||
|
|
ba49765312 | ||
|
|
72c8c2371e | ||
|
|
80411ab6cf | ||
|
|
0ff8fa57e7 | ||
|
|
411f28a049 | ||
|
|
0f42233092 | ||
|
|
2dd49f6d9b | ||
|
|
271f545f4f |
@@ -60,7 +60,7 @@ so-elastalert:
|
||||
- watch:
|
||||
- file: elastaconf
|
||||
- onlyif:
|
||||
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
|
||||
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
|
||||
|
||||
delete_so-elastalert_so-status.disabled:
|
||||
file.uncomment:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"package": {
|
||||
"name": "endpoint",
|
||||
"title": "Elastic Defend",
|
||||
"version": "8.18.1",
|
||||
"version": "9.0.2",
|
||||
"requires_root": true
|
||||
},
|
||||
"enabled": true,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
'azure_application_insights.app_state': 'azure.app_state',
|
||||
'azure_billing.billing': 'azure.billing',
|
||||
'azure_functions.metrics': 'azure.function',
|
||||
'azure_ai_foundry.metrics': 'azure.ai_foundry',
|
||||
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
|
||||
'azure_metrics.compute_vm': 'azure.compute_vm',
|
||||
'azure_metrics.container_instance': 'azure.container_instance',
|
||||
|
||||
@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
|
||||
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
|
||||
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
|
||||
rm -f $INSTALLED_PACKAGE_LIST
|
||||
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
|
||||
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
|
||||
|
||||
while read -r package; do
|
||||
# get package details
|
||||
|
||||
@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
|
||||
--arg KAFKACA "$KAFKACA" \
|
||||
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
|
||||
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
|
||||
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
)
|
||||
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
|
||||
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
|
||||
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
|
||||
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
|
||||
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
|
||||
--argjson HOSTS "$HOSTS" \
|
||||
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
)
|
||||
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
|
||||
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
elasticsearch:
|
||||
enabled: false
|
||||
version: 8.18.8
|
||||
version: 9.0.8
|
||||
index_clean: true
|
||||
config:
|
||||
action:
|
||||
|
||||
@@ -15,7 +15,7 @@ set -e
|
||||
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
|
||||
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
|
||||
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
|
||||
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
docker rm so-elasticsearchca
|
||||
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
|
||||
@@ -63,7 +63,7 @@ logstash:
|
||||
settings:
|
||||
lsheap: 500m
|
||||
config:
|
||||
http_x_host: 0.0.0.0
|
||||
api_x_http_x_host: 0.0.0.0
|
||||
path_x_logs: /var/log/logstash
|
||||
pipeline_x_workers: 1
|
||||
pipeline_x_batch_x_size: 125
|
||||
|
||||
@@ -5,10 +5,10 @@ input {
|
||||
codec => es_bulk
|
||||
request_headers_target_field => client_headers
|
||||
remote_host_target_field => client_host
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
|
||||
ssl_certificate => "/usr/share/logstash/filebeat.crt"
|
||||
ssl_key => "/usr/share/logstash/filebeat.key"
|
||||
ssl_verify_mode => "peer"
|
||||
ssl_client_authentication => "required"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ input {
|
||||
elastic_agent {
|
||||
port => 5055
|
||||
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
|
||||
ssl_verify_mode => "force_peer"
|
||||
ssl_client_authentication => "required"
|
||||
ecs_compatibility => v8
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ input {
|
||||
elastic_agent {
|
||||
port => 5056
|
||||
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
|
||||
ecs_compatibility => v8
|
||||
|
||||
@@ -8,8 +8,8 @@ output {
|
||||
document_id => "%{[metadata][_id]}"
|
||||
index => "so-ip-mappings"
|
||||
silence_errors_in_log => ["version_conflict_engine_exception"]
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -25,8 +25,8 @@ output {
|
||||
document_id => "%{[metadata][_id]}"
|
||||
pipeline => "%{[metadata][pipeline]}"
|
||||
silence_errors_in_log => ["version_conflict_engine_exception"]
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -37,8 +37,8 @@ output {
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
pipeline => "%{[metadata][pipeline]}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -49,8 +49,8 @@ output {
|
||||
data_stream => true
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode=> "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ output {
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
index => "endgame-%{+YYYY.MM.dd}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ logstash:
|
||||
helpLink: logstash.html
|
||||
global: False
|
||||
config:
|
||||
http_x_host:
|
||||
api_x_http_x_host:
|
||||
description: Host interface to listen to connections.
|
||||
helpLink: logstash.html
|
||||
readonly: True
|
||||
|
||||
@@ -214,7 +214,7 @@ git_config_set_safe_dirs:
|
||||
|
||||
surinsmrulesdir:
|
||||
file.directory:
|
||||
- name: /nsm/rules/suricata
|
||||
- name: /nsm/rules/suricata/etopen
|
||||
- user: 939
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
@@ -87,6 +87,9 @@ check_err() {
|
||||
113)
|
||||
echo 'No route to host'
|
||||
;;
|
||||
160)
|
||||
echo 'Incompatiable Elasticsearch upgrade'
|
||||
;;
|
||||
*)
|
||||
echo 'Unhandled error'
|
||||
echo "$err_msg"
|
||||
@@ -427,6 +430,7 @@ preupgrade_changes() {
|
||||
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
|
||||
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
|
||||
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
|
||||
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
|
||||
true
|
||||
}
|
||||
|
||||
@@ -459,6 +463,7 @@ postupgrade_changes() {
|
||||
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
|
||||
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
|
||||
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
|
||||
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
|
||||
true
|
||||
}
|
||||
|
||||
@@ -615,9 +620,6 @@ post_to_2.4.180() {
|
||||
}
|
||||
|
||||
post_to_2.4.190() {
|
||||
echo "Regenerating Elastic Agent Installers"
|
||||
/sbin/so-elastic-agent-gen-installers
|
||||
|
||||
# Only need to update import / eval nodes
|
||||
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
|
||||
update_import_fleet_output
|
||||
@@ -645,6 +647,13 @@ post_to_2.4.200() {
|
||||
POSTVERSION=2.4.200
|
||||
}
|
||||
|
||||
post_to_2.4.210() {
|
||||
echo "Regenerating Elastic Agent Installers"
|
||||
/sbin/so-elastic-agent-gen-installers
|
||||
|
||||
POSTVERSION=2.4.210
|
||||
}
|
||||
|
||||
repo_sync() {
|
||||
echo "Sync the local repo."
|
||||
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
||||
@@ -906,9 +915,7 @@ up_to_2.4.180() {
|
||||
}
|
||||
|
||||
up_to_2.4.190() {
|
||||
# Elastic Update for this release, so download Elastic Agent files
|
||||
determine_elastic_agent_upgrade
|
||||
|
||||
echo "Nothing to do for 2.4.190"
|
||||
INSTALLEDVERSION=2.4.190
|
||||
}
|
||||
|
||||
@@ -921,6 +928,13 @@ up_to_2.4.200() {
|
||||
INSTALLEDVERSION=2.4.200
|
||||
}
|
||||
|
||||
up_to_2.4.210() {
|
||||
# Elastic Update for this release, so download Elastic Agent files
|
||||
determine_elastic_agent_upgrade
|
||||
|
||||
INSTALLEDVERSION=2.4.210
|
||||
}
|
||||
|
||||
add_hydra_pillars() {
|
||||
mkdir -p /opt/so/saltstack/local/pillar/hydra
|
||||
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
|
||||
@@ -1113,9 +1127,13 @@ suricata_idstools_removal_pre() {
|
||||
install -d -o 939 -g 939 -m 755 /opt/so/conf/soc/fingerprints
|
||||
install -o 939 -g 939 -m 644 /dev/null /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
|
||||
cat > /opt/so/conf/soc/fingerprints/suricataengine.syncBlock << EOF
|
||||
Suricata ruleset sync is blocked until this file is removed. Make sure that you have manually added any custom Suricata rulesets via SOC config - review the documentation for more details: securityonion.net/docs
|
||||
Suricata ruleset sync is blocked until this file is removed. **CRITICAL** Make sure that you have manually added any custom Suricata rulesets via SOC config before removing this file - review the documentation for more details: https://docs.securityonion.net/en/2.4/nids.html#sync-block
|
||||
EOF
|
||||
|
||||
# Remove possible symlink & create salt local rules dir
|
||||
[ -L /opt/so/saltstack/local/salt/suricata/rules ] && rm -f /opt/so/saltstack/local/salt/suricata/rules
|
||||
install -d -o 939 -g 939 /opt/so/saltstack/local/salt/suricata/rules/ || echo "Failed to create Suricata local rules directory"
|
||||
|
||||
# Backup custom rules & overrides
|
||||
mkdir -p /nsm/backup/detections-migration/2-4-200
|
||||
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
|
||||
@@ -1127,6 +1145,7 @@ if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
|
||||
# Verify backup by comparing counts
|
||||
echo "Verifying detection overrides backup..."
|
||||
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
|
||||
--retry 5 --retry-delay 10 --retry-all-errors \
|
||||
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
|
||||
echo " Error: Failed to query Elasticsearch for override count"
|
||||
exit 1
|
||||
@@ -1297,7 +1316,6 @@ if [ -n "$(docker ps -q -f name=^so-idstools$)" ]; then
|
||||
fi
|
||||
|
||||
echo "Removing idstools symlink and scripts..."
|
||||
rm /opt/so/saltstack/local/salt/suricata/rules
|
||||
rm -rf /usr/sbin/so-idstools*
|
||||
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
|
||||
|
||||
@@ -1355,7 +1373,7 @@ unmount_update() {
|
||||
|
||||
update_airgap_rules() {
|
||||
# Copy the rules over to update them for airgap.
|
||||
rsync -a $UPDATE_DIR/agrules/suricata/* /nsm/rules/suricata/
|
||||
rsync -a --delete $UPDATE_DIR/agrules/suricata/ /nsm/rules/suricata/etopen/
|
||||
rsync -a $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
|
||||
rsync -a $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
|
||||
# Copy the securityonion-resorces repo over for SOC Detection Summaries and checkout the published summaries branch
|
||||
@@ -1604,6 +1622,69 @@ verify_latest_update_script() {
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
verify_es_version_compatibility() {
|
||||
|
||||
# supported upgrade paths for SO-ES versions
|
||||
declare -A es_upgrade_map=(
|
||||
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
|
||||
["8.17.3"]="8.18.4 8.18.6 8.18.8"
|
||||
["8.18.4"]="8.18.6 8.18.8 9.0.8"
|
||||
["8.18.6"]="8.18.8 9.0.8"
|
||||
["8.18.8"]="9.0.8"
|
||||
)
|
||||
|
||||
# Elasticsearch MUST upgrade through these versions
|
||||
declare -A es_to_so_version=(
|
||||
["8.18.8"]="2.4.190-20251024"
|
||||
)
|
||||
|
||||
# Get current Elasticsearch version
|
||||
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
|
||||
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
|
||||
else
|
||||
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
|
||||
exit 160
|
||||
fi
|
||||
|
||||
if ! target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p'); then
|
||||
# so-yaml.py failed to get the ES version from upgrade versions elasticsearch/defaults.yaml file. Likely they are upgrading to an SO version older than 2.4.110 prior to the ES version pinning and should be OKAY to continue with the upgrade.
|
||||
|
||||
# if so-yaml.py failed to get the ES version AND the version we are upgrading to is newer than 2.4.110 then we should bail
|
||||
if [[ $(cat $UPDATE_DIR/VERSION | cut -d'.' -f3) > 110 ]]; then
|
||||
echo "Couldn't determine the target Elasticsearch version (post soup version) to ensure compatibility with current Elasticsearch version. Exiting"
|
||||
exit 160
|
||||
fi
|
||||
|
||||
# allow upgrade to version < 2.4.110 without checking ES version compatibility
|
||||
return 0
|
||||
|
||||
fi
|
||||
|
||||
|
||||
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " ]]; then
|
||||
# supported upgrade
|
||||
return 0
|
||||
else
|
||||
compatible_versions=${es_upgrade_map[$es_version]}
|
||||
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
|
||||
echo -e "\n##############################################################################################################################\n"
|
||||
echo -e "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION).\n"
|
||||
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
|
||||
else
|
||||
echo "You can use the following soup command to upgrade to $next_step_so_version;"
|
||||
echo -e " sudo BRANCH=$next_step_so_version soup\n"
|
||||
|
||||
fi
|
||||
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
|
||||
echo -e "\n###############################################################################################################################\n"
|
||||
exit 160
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# Keeping this block in case we need to do a hotfix that requires salt update
|
||||
apply_hotfix() {
|
||||
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
|
||||
@@ -1700,6 +1781,8 @@ main() {
|
||||
echo "Verifying we have the latest soup script."
|
||||
verify_latest_update_script
|
||||
|
||||
verify_es_version_compatibility
|
||||
|
||||
echo "Let's see if we need to update Security Onion."
|
||||
upgrade_check
|
||||
upgrade_space
|
||||
|
||||
@@ -1622,12 +1622,11 @@ soc:
|
||||
sourceType: directory
|
||||
airgap:
|
||||
- name: Emerging-Threats
|
||||
description: "Emerging Threats ruleset - To enable ET Pro, enter your license key below. Leave empty for ET Open (free) rules."
|
||||
description: "Emerging Threats ruleset - To enable ET Pro on Airgap, review the documentation at https://docs.securityonion.net/suricata"
|
||||
licenseKey: ""
|
||||
enabled: true
|
||||
sourceType: url
|
||||
sourcePath: 'https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz'
|
||||
urlHash: "https://rules.emergingthreats.net/open/suricata/emerging.rules.tar.gz.md5"
|
||||
sourceType: directory
|
||||
sourcePath: /nsm/rules/suricata/etopen/
|
||||
license: "BSD"
|
||||
excludeFiles:
|
||||
- "*deleted*"
|
||||
@@ -2653,12 +2652,6 @@ soc:
|
||||
thresholdColorRatioMed: 0.75
|
||||
thresholdColorRatioMax: 1
|
||||
availableModels:
|
||||
- id: sonnet-4
|
||||
displayName: Claude Sonnet 4
|
||||
contextLimitSmall: 200000
|
||||
contextLimitLarge: 1000000
|
||||
lowBalanceColorAlert: 500000
|
||||
enabled: true
|
||||
- id: sonnet-4.5
|
||||
displayName: Claude Sonnet 4.5
|
||||
contextLimitSmall: 200000
|
||||
|
||||
@@ -70,7 +70,7 @@
|
||||
|
||||
{# Define the Detections custom ruleset that should always be present #}
|
||||
{% set CUSTOM_RULESET = {
|
||||
'name': 'custom',
|
||||
'name': '__custom__',
|
||||
'description': 'User-created custom rules created via the Detections module in the SOC UI',
|
||||
'sourceType': 'elasticsearch',
|
||||
'sourcePath': 'so_detection.ruleset:__custom__',
|
||||
@@ -83,7 +83,7 @@
|
||||
{# Always append the custom ruleset to suricataengine.rulesetSources if not already present #}
|
||||
{% if SOCMERGED.config.server.modules.suricataengine is defined and SOCMERGED.config.server.modules.suricataengine.rulesetSources is defined %}
|
||||
{% if SOCMERGED.config.server.modules.suricataengine.rulesetSources is not mapping %}
|
||||
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', 'custom') | list %}
|
||||
{% set custom_names = SOCMERGED.config.server.modules.suricataengine.rulesetSources | selectattr('name', 'equalto', '__custom__') | list %}
|
||||
{% if custom_names | length == 0 %}
|
||||
{% do SOCMERGED.config.server.modules.suricataengine.rulesetSources.append(CUSTOM_RULESET) %}
|
||||
{% endif %}
|
||||
@@ -108,21 +108,39 @@
|
||||
{% if ruleset.name == 'Emerging-Threats' %}
|
||||
{% if ruleset.licenseKey and ruleset.licenseKey != '' %}
|
||||
{# License key is defined - transform to ETPRO #}
|
||||
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETPRO',
|
||||
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
|
||||
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
|
||||
'license': 'Commercial'
|
||||
}) %}
|
||||
{% if ruleset.sourceType == 'directory' %}
|
||||
{# Airgap mode - update directory path #}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETPRO',
|
||||
'sourcePath': '/nsm/rules/custom-local-repos/local-etpro-suricata/etpro.rules.tar.gz',
|
||||
'license': 'Commercial'
|
||||
}) %}
|
||||
{% else %}
|
||||
{# Engine Version is hardcoded in the URL - this does not change often: https://community.emergingthreats.net/t/supported-engines/71 #}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETPRO',
|
||||
'sourcePath': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz',
|
||||
'urlHash': 'https://rules.emergingthreatspro.com/' ~ ruleset.licenseKey ~ '/suricata-7.0.3/etpro.rules.tar.gz.md5',
|
||||
'license': 'Commercial'
|
||||
}) %}
|
||||
{% endif %}
|
||||
{% else %}
|
||||
{# No license key - explicitly set to ETOPEN #}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETOPEN',
|
||||
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
|
||||
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
|
||||
'license': 'BSD'
|
||||
}) %}
|
||||
{% if ruleset.sourceType == 'directory' %}
|
||||
{# Airgap mode - update directory path #}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETOPEN',
|
||||
'sourcePath': '/nsm/rules/suricata/etopen/',
|
||||
'license': 'BSD'
|
||||
}) %}
|
||||
{% else %}
|
||||
{% do ruleset.update({
|
||||
'name': 'ETOPEN',
|
||||
'sourcePath': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz',
|
||||
'urlHash': 'https://rules.emergingthreats.net/open/suricata-7.0.3/emerging.rules.tar.gz.md5',
|
||||
'license': 'BSD'
|
||||
}) %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
@@ -608,6 +608,18 @@ soc:
|
||||
label: Delete Unreferenced (Deletes rules that are no longer referenced by ruleset source)
|
||||
forcedType: bool
|
||||
required: False
|
||||
- field: proxyURL
|
||||
label: HTTP/HTTPS proxy URL for downloading the ruleset.
|
||||
required: False
|
||||
- field: proxyUsername
|
||||
label: Proxy authentication username.
|
||||
required: False
|
||||
- field: proxyPassword
|
||||
label: Proxy authentication password.
|
||||
required: False
|
||||
- field: proxyCACert
|
||||
label: Path to CA certificate file for MITM proxy verification.
|
||||
required: False
|
||||
airgap: *serulesetSources
|
||||
navigator:
|
||||
intervalMinutes:
|
||||
|
||||
@@ -17,14 +17,23 @@ query() {
|
||||
|
||||
STATS=$(query "ruleset-stats")
|
||||
RELOAD=$(query "ruleset-reload-time")
|
||||
[ -z "$RELOAD" ] && RELOAD='{}'
|
||||
|
||||
if echo "$STATS" | jq -e '.return == "OK"' > /dev/null 2>&1; then
|
||||
LOADED=$(echo "$STATS" | jq -r '.message[0].rules_loaded')
|
||||
FAILED=$(echo "$STATS" | jq -r '.message[0].rules_failed')
|
||||
LAST_RELOAD=$(echo "$RELOAD" | jq -r '.message[0].last_reload')
|
||||
# Outputs valid JSON on success, empty on failure
|
||||
OUTPUT=$(jq -n \
|
||||
--argjson stats "$STATS" \
|
||||
--argjson reload "$RELOAD" \
|
||||
'if $stats.return == "OK" and ($stats.message[0].rules_loaded | type) == "number" and ($stats.message[0].rules_failed | type) == "number" then
|
||||
{
|
||||
rules_loaded: $stats.message[0].rules_loaded,
|
||||
rules_failed: $stats.message[0].rules_failed,
|
||||
last_reload: ($reload.message[0].last_reload // ""),
|
||||
return: "OK"
|
||||
}
|
||||
else empty end' 2>/dev/null)
|
||||
|
||||
jq -n --argjson loaded "$LOADED" --argjson failed "$FAILED" --arg reload "$LAST_RELOAD" \
|
||||
'{rules_loaded: $loaded, rules_failed: $failed, last_reload: $reload, return: "OK"}' > "$OUTFILE"
|
||||
if [ -n "$OUTPUT" ]; then
|
||||
echo "$OUTPUT" > "$OUTFILE"
|
||||
else
|
||||
echo '{"return":"FAIL"}' > "$OUTFILE"
|
||||
fi
|
||||
|
||||
@@ -18,11 +18,15 @@ if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then
|
||||
if [ -f "$STATSFILE" ] && [ $(($(date +%s) - $(stat -c %Y "$STATSFILE"))) -lt 90 ] && jq -e '.return == "OK" and .rules_loaded != null and .rules_failed != null' "$STATSFILE" > /dev/null 2>&1; then
|
||||
LOADED=$(jq -r '.rules_loaded' "$STATSFILE")
|
||||
FAILED=$(jq -r '.rules_failed' "$STATSFILE")
|
||||
RELOAD_TIME=$(jq -r '.last_reload // ""' "$STATSFILE")
|
||||
RELOAD_TIME=$(jq -r 'if .last_reload then .last_reload else "" end' "$STATSFILE")
|
||||
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
|
||||
if [ -n "$RELOAD_TIME" ]; then
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
|
||||
else
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,status=\"ok\""
|
||||
fi
|
||||
else
|
||||
echo "surirules loaded=0i,failed=0i,reload_time=\"\",status=\"unknown\""
|
||||
echo "surirules loaded=0i,failed=0i,status=\"unknown\""
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user