Compare commits

...

8 Commits

Author SHA1 Message Date
reyesj2
4780371ebe WIP soup changes 2025-12-18 19:21:52 -06:00
reyesj2
6c879cbd13 soup changes 2025-12-17 19:08:21 -06:00
reyesj2
089b5aaf44 Merge branch 'reyesj2/elastic9' of github.com:Security-Onion-Solutions/securityonion into reyesj2/elastic9 2025-12-17 16:03:18 -06:00
reyesj2
b61885add5 Fix Kafka output policy - singular topic key 2025-12-17 16:03:12 -06:00
Jorge Reyes
5cb1e284af Update VERSION 2025-12-17 06:54:32 -06:00
reyesj2
e3a4f0873e update expected version for elastalert state 2025-12-17 06:53:08 -06:00
reyesj2
7977a020ac elasticsearch 9.0.8 2025-12-16 16:03:47 -06:00
reyesj2
d518f75468 update deprecated config items 2025-12-11 20:07:06 -06:00
16 changed files with 99 additions and 31 deletions

View File

@@ -1 +1 @@
2.4.200 2.4.0-foxtrot

View File

@@ -60,7 +60,7 @@ so-elastalert:
- watch: - watch:
- file: elastaconf - file: elastaconf
- onlyif: - onlyif:
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #} - "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
delete_so-elastalert_so-status.disabled: delete_so-elastalert_so-status.disabled:
file.uncomment: file.uncomment:

View File

@@ -5,7 +5,7 @@
"package": { "package": {
"name": "endpoint", "name": "endpoint",
"title": "Elastic Defend", "title": "Elastic Defend",
"version": "8.18.1", "version": "9.0.2",
"requires_root": true "requires_root": true
}, },
"enabled": true, "enabled": true,

View File

@@ -21,6 +21,7 @@
'azure_application_insights.app_state': 'azure.app_state', 'azure_application_insights.app_state': 'azure.app_state',
'azure_billing.billing': 'azure.billing', 'azure_billing.billing': 'azure.billing',
'azure_functions.metrics': 'azure.function', 'azure_functions.metrics': 'azure.function',
'azure_ai_foundry.metrics': 'azure.ai_foundry',
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset', 'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
'azure_metrics.compute_vm': 'azure.compute_vm', 'azure_metrics.compute_vm': 'azure.compute_vm',
'azure_metrics.container_instance': 'azure.container_instance', 'azure_metrics.container_instance': 'azure.container_instance',

View File

@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list) latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
rm -f $INSTALLED_PACKAGE_LIST rm -f $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
while read -r package; do while read -r package; do
# get package details # get package details

View File

@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg KAFKACA "$KAFKACA" \ --arg KAFKACA "$KAFKACA" \
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \ --arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \ --arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}' '{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
) )
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n" echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\ --arg ENABLED_DISABLED "$ENABLED_DISABLED"\
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \ --arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
--argjson HOSTS "$HOSTS" \ --argjson HOSTS "$HOSTS" \
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}' '{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
) )
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n" echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"

View File

@@ -1,6 +1,6 @@
elasticsearch: elasticsearch:
enabled: false enabled: false
version: 8.18.8 version: 9.0.8
index_clean: true index_clean: true
config: config:
action: action:

View File

@@ -15,7 +15,7 @@ set -e
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker rm so-elasticsearchca docker rm so-elasticsearchca
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem

View File

@@ -63,7 +63,7 @@ logstash:
settings: settings:
lsheap: 500m lsheap: 500m
config: config:
http_x_host: 0.0.0.0 api_x_http_x_host: 0.0.0.0
path_x_logs: /var/log/logstash path_x_logs: /var/log/logstash
pipeline_x_workers: 1 pipeline_x_workers: 1
pipeline_x_batch_x_size: 125 pipeline_x_batch_x_size: 125

View File

@@ -5,10 +5,10 @@ input {
codec => es_bulk codec => es_bulk
request_headers_target_field => client_headers request_headers_target_field => client_headers
remote_host_target_field => client_host remote_host_target_field => client_host
ssl => true ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"] ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/filebeat.crt" ssl_certificate => "/usr/share/logstash/filebeat.crt"
ssl_key => "/usr/share/logstash/filebeat.key" ssl_key => "/usr/share/logstash/filebeat.key"
ssl_verify_mode => "peer" ssl_client_authentication => "required"
} }
} }

View File

@@ -2,11 +2,11 @@ input {
elastic_agent { elastic_agent {
port => 5055 port => 5055
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ] tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
ssl => true ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"] ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt" ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key" ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
ssl_verify_mode => "force_peer" ssl_client_authentication => "required"
ecs_compatibility => v8 ecs_compatibility => v8
} }
} }

View File

@@ -2,7 +2,7 @@ input {
elastic_agent { elastic_agent {
port => 5056 port => 5056
tags => [ "elastic-agent", "fleet-lumberjack-input" ] tags => [ "elastic-agent", "fleet-lumberjack-input" ]
ssl => true ssl_enabled => true
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt" ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key" ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
ecs_compatibility => v8 ecs_compatibility => v8

View File

@@ -8,8 +8,8 @@ output {
document_id => "%{[metadata][_id]}" document_id => "%{[metadata][_id]}"
index => "so-ip-mappings" index => "so-ip-mappings"
silence_errors_in_log => ["version_conflict_engine_exception"] silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true ssl_enabled => true
ssl_certificate_verification => false ssl_verification_mode => "none"
} }
} }
else { else {
@@ -25,8 +25,8 @@ output {
document_id => "%{[metadata][_id]}" document_id => "%{[metadata][_id]}"
pipeline => "%{[metadata][pipeline]}" pipeline => "%{[metadata][pipeline]}"
silence_errors_in_log => ["version_conflict_engine_exception"] silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true ssl_enabled => true
ssl_certificate_verification => false ssl_verification_mode => "none"
} }
} }
else { else {
@@ -37,8 +37,8 @@ output {
user => "{{ ES_USER }}" user => "{{ ES_USER }}"
password => "{{ ES_PASS }}" password => "{{ ES_PASS }}"
pipeline => "%{[metadata][pipeline]}" pipeline => "%{[metadata][pipeline]}"
ssl => true ssl_enabled => true
ssl_certificate_verification => false ssl_verification_mode => "none"
} }
} }
} }
@@ -49,8 +49,8 @@ output {
data_stream => true data_stream => true
user => "{{ ES_USER }}" user => "{{ ES_USER }}"
password => "{{ ES_PASS }}" password => "{{ ES_PASS }}"
ssl => true ssl_enabled => true
ssl_certificate_verification => false ssl_verification_mode=> "none"
} }
} }
} }

View File

@@ -13,8 +13,8 @@ output {
user => "{{ ES_USER }}" user => "{{ ES_USER }}"
password => "{{ ES_PASS }}" password => "{{ ES_PASS }}"
index => "endgame-%{+YYYY.MM.dd}" index => "endgame-%{+YYYY.MM.dd}"
ssl => true ssl_enabled => true
ssl_certificate_verification => false ssl_verification_mode => "none"
} }
} }
} }

View File

@@ -56,7 +56,7 @@ logstash:
helpLink: logstash.html helpLink: logstash.html
global: False global: False
config: config:
http_x_host: api_x_http_x_host:
description: Host interface to listen to connections. description: Host interface to listen to connections.
helpLink: logstash.html helpLink: logstash.html
readonly: True readonly: True

View File

@@ -427,6 +427,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180 [[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190 [[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200 [[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
true true
} }
@@ -459,6 +460,7 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180 [[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190 [[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200 [[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
true true
} }
@@ -615,9 +617,6 @@ post_to_2.4.180() {
} }
post_to_2.4.190() { post_to_2.4.190() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
# Only need to update import / eval nodes # Only need to update import / eval nodes
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
update_import_fleet_output update_import_fleet_output
@@ -645,6 +644,13 @@ post_to_2.4.200() {
POSTVERSION=2.4.200 POSTVERSION=2.4.200
} }
post_to_2.4.210() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
POSTVERSION=2.4.210
}
repo_sync() { repo_sync() {
echo "Sync the local repo." echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync." su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -906,9 +912,7 @@ up_to_2.4.180() {
} }
up_to_2.4.190() { up_to_2.4.190() {
# Elastic Update for this release, so download Elastic Agent files echo "Nothing to do for 2.4.190"
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.190 INSTALLEDVERSION=2.4.190
} }
@@ -921,6 +925,13 @@ up_to_2.4.200() {
INSTALLEDVERSION=2.4.200 INSTALLEDVERSION=2.4.200
} }
up_to_2.4.210() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.210
}
add_hydra_pillars() { add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
@@ -1608,6 +1619,60 @@ verify_latest_update_script() {
fi fi
} }
verify_es_version_compatibility() {
# Define supported upgrade paths for SO ES versions
declare -A es_upgrade_map=(
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
["8.17.3"]="8.18.4 8.18.6 8.18.8"
["8.18.4"]="8.18.6 8.18.8 9.0.8"
["8.18.6"]="8.18.8 9.0.8"
["8.18.8"]="9.0.8"
)
# ES version stepping stones. Elasticsearch MUST upgrade through these versions
declare -A es_to_so_version=(
["8.18.8"]="2.4.190-20251024"
["9.0.8"]="2.4.210"
)
# Get current Elasticsearch version
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
else
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
exit 1
fi
# Get the target ES version from the updatedir
# DOUBLE TAP ON THIS SOMEONE FROM AN OLD VERSION UPGRADING TO ANOTHER VERSION OLDER THAN 2.4.110 WOULD HAVE AN ERROR HERE. Prior to this version there was no version defined in defaults.yaml
target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p')
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " ]]; then
# supported upgrade
return 0
else
compatible_versions=${es_upgrade_map[$es_version]}
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
echo ""
echo "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION)."
echo ""
if [[ $is_airgap -eq 0 ]]; then
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
else
echo "You can use the following soup command to upgrade to $next_step_so_version;"
echo " sudo BRANCH=$next_step_so_version soup"
echo ""
fi
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
echo ""
exit 1
fi
}
# Keeping this block in case we need to do a hotfix that requires salt update # Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() { apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -1704,6 +1769,8 @@ main() {
echo "Verifying we have the latest soup script." echo "Verifying we have the latest soup script."
verify_latest_update_script verify_latest_update_script
verify_es_version_compatibility
echo "Let's see if we need to update Security Onion." echo "Let's see if we need to update Security Onion."
upgrade_check upgrade_check
upgrade_space upgrade_space