Compare commits

...

15 Commits

Author SHA1 Message Date
reyesj2
63f7a87c27 WIP: autosoup test 2025-12-30 10:33:53 -06:00
reyesj2
893aaafa1b foxtrot 2025-12-29 15:54:06 -06:00
reyesj2
33c34cdeca Merge branch '2.4/dev' of github.com:Security-Onion-Solutions/securityonion into reyesj2/elastic9-autosoup 2025-12-29 15:49:49 -06:00
reyesj2
9b411867df update version 2025-12-29 10:27:38 -06:00
Jason Ertel
fd1596b3a0 Merge pull request #15347 from Security-Onion-Solutions/jertel/wip
expose login form lifespan in config scr
2025-12-24 15:09:36 -05:00
Jason Ertel
b05de22f58 expose login form lifespan in config scr 2025-12-24 14:39:55 -05:00
reyesj2
f666ad600f accept same version 'upgrades' 2025-12-23 16:27:22 -06:00
reyesj2
9345718967 verify pre-soup ES version is directly upgradable to post-soup ES version. 2025-12-19 16:15:05 -06:00
reyesj2
6c879cbd13 soup changes 2025-12-17 19:08:21 -06:00
reyesj2
089b5aaf44 Merge branch 'reyesj2/elastic9' of github.com:Security-Onion-Solutions/securityonion into reyesj2/elastic9 2025-12-17 16:03:18 -06:00
reyesj2
b61885add5 Fix Kafka output policy - singular topic key 2025-12-17 16:03:12 -06:00
Jorge Reyes
5cb1e284af Update VERSION 2025-12-17 06:54:32 -06:00
reyesj2
e3a4f0873e update expected version for elastalert state 2025-12-17 06:53:08 -06:00
reyesj2
7977a020ac elasticsearch 9.0.8 2025-12-16 16:03:47 -06:00
reyesj2
d518f75468 update deprecated config items 2025-12-11 20:07:06 -06:00
18 changed files with 117 additions and 31 deletions

View File

@@ -1 +1 @@
2.4.210
2.4.0-foxtrot

View File

@@ -60,7 +60,7 @@ so-elastalert:
- watch:
- file: elastaconf
- onlyif:
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
delete_so-elastalert_so-status.disabled:
file.uncomment:

View File

@@ -5,7 +5,7 @@
"package": {
"name": "endpoint",
"title": "Elastic Defend",
"version": "8.18.1",
"version": "9.0.2",
"requires_root": true
},
"enabled": true,

View File

@@ -21,6 +21,7 @@
'azure_application_insights.app_state': 'azure.app_state',
'azure_billing.billing': 'azure.billing',
'azure_functions.metrics': 'azure.function',
'azure_ai_foundry.metrics': 'azure.ai_foundry',
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
'azure_metrics.compute_vm': 'azure.compute_vm',
'azure_metrics.container_instance': 'azure.container_instance',

View File

@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
rm -f $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
while read -r package; do
# get package details

View File

@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg KAFKACA "$KAFKACA" \
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
--argjson HOSTS "$HOSTS" \
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"

View File

@@ -1,6 +1,6 @@
elasticsearch:
enabled: false
version: 8.18.8
version: 9.0.8
index_clean: true
config:
action:

View File

@@ -15,7 +15,7 @@ set -e
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker rm so-elasticsearchca
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem

View File

@@ -46,6 +46,7 @@ kratos:
ui_url: https://URL_BASE/
login:
ui_url: https://URL_BASE/login/
lifespan: 60m
error:
ui_url: https://URL_BASE/login/
registration:

View File

@@ -182,6 +182,10 @@ kratos:
global: True
advanced: True
helpLink: kratos.html
lifespan:
description: Defines the duration that a login form will remain valid.
global: True
helpLink: kratos.html
error:
ui_url:
description: User accessible URL containing the Security Onion login page. Leave as default to ensure proper operation.

View File

@@ -63,7 +63,7 @@ logstash:
settings:
lsheap: 500m
config:
http_x_host: 0.0.0.0
api_x_http_x_host: 0.0.0.0
path_x_logs: /var/log/logstash
pipeline_x_workers: 1
pipeline_x_batch_x_size: 125

View File

@@ -5,10 +5,10 @@ input {
codec => es_bulk
request_headers_target_field => client_headers
remote_host_target_field => client_host
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/filebeat.crt"
ssl_key => "/usr/share/logstash/filebeat.key"
ssl_verify_mode => "peer"
ssl_client_authentication => "required"
}
}

View File

@@ -2,11 +2,11 @@ input {
elastic_agent {
port => 5055
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
ssl => true
ssl_enabled => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
ssl_verify_mode => "force_peer"
ssl_client_authentication => "required"
ecs_compatibility => v8
}
}

View File

@@ -2,7 +2,7 @@ input {
elastic_agent {
port => 5056
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
ssl => true
ssl_enabled => true
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
ecs_compatibility => v8

View File

@@ -8,8 +8,8 @@ output {
document_id => "%{[metadata][_id]}"
index => "so-ip-mappings"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -25,8 +25,8 @@ output {
document_id => "%{[metadata][_id]}"
pipeline => "%{[metadata][pipeline]}"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
else {
@@ -37,8 +37,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
pipeline => "%{[metadata][pipeline]}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}
@@ -49,8 +49,8 @@ output {
data_stream => true
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode=> "none"
}
}
}

View File

@@ -13,8 +13,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
index => "endgame-%{+YYYY.MM.dd}"
ssl => true
ssl_certificate_verification => false
ssl_enabled => true
ssl_verification_mode => "none"
}
}
}

View File

@@ -56,7 +56,7 @@ logstash:
helpLink: logstash.html
global: False
config:
http_x_host:
api_x_http_x_host:
description: Host interface to listen to connections.
helpLink: logstash.html
readonly: True

View File

@@ -87,6 +87,9 @@ check_err() {
113)
echo 'No route to host'
;;
160)
echo 'Incompatiable Elasticsearch upgrade'
;;
*)
echo 'Unhandled error'
echo "$err_msg"
@@ -427,6 +430,7 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
true
}
@@ -459,6 +463,7 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
true
}
@@ -615,9 +620,6 @@ post_to_2.4.180() {
}
post_to_2.4.190() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
# Only need to update import / eval nodes
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
update_import_fleet_output
@@ -645,6 +647,13 @@ post_to_2.4.200() {
POSTVERSION=2.4.200
}
post_to_2.4.210() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
POSTVERSION=2.4.210
}
repo_sync() {
echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -906,9 +915,7 @@ up_to_2.4.180() {
}
up_to_2.4.190() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
echo "Nothing to do for 2.4.190"
INSTALLEDVERSION=2.4.190
}
@@ -921,6 +928,13 @@ up_to_2.4.200() {
INSTALLEDVERSION=2.4.200
}
up_to_2.4.210() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.210
}
add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
@@ -1612,6 +1626,70 @@ verify_latest_update_script() {
fi
}
verify_es_version_compatibility() {
# supported upgrade paths for SO-ES versions
declare -A es_upgrade_map=(
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
["8.17.3"]="8.18.4 8.18.6 8.18.8"
["8.18.4"]="8.18.6 8.18.8 9.0.8"
["8.18.6"]="8.18.8 9.0.8"
["8.18.8"]="9.0.8"
)
# Elasticsearch MUST upgrade through these versions
declare -A es_to_so_version=(
["8.18.8"]="2.4.190-20251024"
)
# Get current Elasticsearch version
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
else
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
exit 160
fi
if ! target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p'); then
# so-yaml.py failed to get the ES version from upgrade versions elasticsearch/defaults.yaml file. Likely they are upgrading to an SO version older than 2.4.110 prior to the ES version pinning and should be OKAY to continue with the upgrade.
# if so-yaml.py failed to get the ES version AND the version we are upgrading to is newer than 2.4.110 then we should bail
if [[ $(cat $UPDATE_DIR/VERSION | cut -d'.' -f3) > 110 ]]; then
echo "Couldn't determine the target Elasticsearch version (post soup version) to ensure compatibility with current Elasticsearch version. Exiting"
exit 160
fi
# allow upgrade to version < 2.4.110 without checking ES version compatibility
return 0
fi
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " || "$es_version" == "$target_es_version" ]]; then
# supported upgrade
return 0
else
compatible_versions=${es_upgrade_map[$es_version]}
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
echo -e "\n##############################################################################################################################\n"
echo -e "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION).\n"
if [[ $is_airgap -eq 0 ]]; then
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
echo -e "\n##############################################################################################################################\n"
exit 160
else
echo "Starting automated intermediate upgrade to $next_step_so_version."
echo "After completion, the system will automatically attempt to upgrade to the latest version."
echo -e "\n##############################################################################################################################\n"
exec bash -c "BRANCH=$next_step_so_version soup -y; BRANCH=$next_step_so_version soup -y; BRANCH=reyesj2/elastic9-autosoup soup -y; BRANCH=reyesj2/elastic9-autosoup soup -y"
fi
fi
}
# Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -1708,6 +1786,8 @@ main() {
echo "Verifying we have the latest soup script."
verify_latest_update_script
verify_es_version_compatibility
echo "Let's see if we need to update Security Onion."
upgrade_check
upgrade_space