mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-19 07:23:06 +01:00
Compare commits
8 Commits
2.4.200-20
...
reyesj2/el
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4780371ebe | ||
|
|
6c879cbd13 | ||
|
|
089b5aaf44 | ||
|
|
b61885add5 | ||
|
|
5cb1e284af | ||
|
|
e3a4f0873e | ||
|
|
7977a020ac | ||
|
|
d518f75468 |
@@ -60,7 +60,7 @@ so-elastalert:
|
||||
- watch:
|
||||
- file: elastaconf
|
||||
- onlyif:
|
||||
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
|
||||
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
|
||||
|
||||
delete_so-elastalert_so-status.disabled:
|
||||
file.uncomment:
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"package": {
|
||||
"name": "endpoint",
|
||||
"title": "Elastic Defend",
|
||||
"version": "8.18.1",
|
||||
"version": "9.0.2",
|
||||
"requires_root": true
|
||||
},
|
||||
"enabled": true,
|
||||
|
||||
@@ -21,6 +21,7 @@
|
||||
'azure_application_insights.app_state': 'azure.app_state',
|
||||
'azure_billing.billing': 'azure.billing',
|
||||
'azure_functions.metrics': 'azure.function',
|
||||
'azure_ai_foundry.metrics': 'azure.ai_foundry',
|
||||
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
|
||||
'azure_metrics.compute_vm': 'azure.compute_vm',
|
||||
'azure_metrics.container_instance': 'azure.container_instance',
|
||||
|
||||
@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
|
||||
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
|
||||
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
|
||||
rm -f $INSTALLED_PACKAGE_LIST
|
||||
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
|
||||
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
|
||||
|
||||
while read -r package; do
|
||||
# get package details
|
||||
|
||||
@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
|
||||
--arg KAFKACA "$KAFKACA" \
|
||||
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
|
||||
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
|
||||
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
)
|
||||
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
|
||||
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
|
||||
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
|
||||
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
|
||||
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
|
||||
--argjson HOSTS "$HOSTS" \
|
||||
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
|
||||
)
|
||||
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
|
||||
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
elasticsearch:
|
||||
enabled: false
|
||||
version: 8.18.8
|
||||
version: 9.0.8
|
||||
index_clean: true
|
||||
config:
|
||||
action:
|
||||
|
||||
@@ -15,7 +15,7 @@ set -e
|
||||
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
|
||||
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
|
||||
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
|
||||
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
docker rm so-elasticsearchca
|
||||
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
|
||||
|
||||
@@ -63,7 +63,7 @@ logstash:
|
||||
settings:
|
||||
lsheap: 500m
|
||||
config:
|
||||
http_x_host: 0.0.0.0
|
||||
api_x_http_x_host: 0.0.0.0
|
||||
path_x_logs: /var/log/logstash
|
||||
pipeline_x_workers: 1
|
||||
pipeline_x_batch_x_size: 125
|
||||
|
||||
@@ -5,10 +5,10 @@ input {
|
||||
codec => es_bulk
|
||||
request_headers_target_field => client_headers
|
||||
remote_host_target_field => client_host
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
|
||||
ssl_certificate => "/usr/share/logstash/filebeat.crt"
|
||||
ssl_key => "/usr/share/logstash/filebeat.key"
|
||||
ssl_verify_mode => "peer"
|
||||
ssl_client_authentication => "required"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ input {
|
||||
elastic_agent {
|
||||
port => 5055
|
||||
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
|
||||
ssl_verify_mode => "force_peer"
|
||||
ssl_client_authentication => "required"
|
||||
ecs_compatibility => v8
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ input {
|
||||
elastic_agent {
|
||||
port => 5056
|
||||
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
|
||||
ssl => true
|
||||
ssl_enabled => true
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
|
||||
ecs_compatibility => v8
|
||||
|
||||
@@ -8,8 +8,8 @@ output {
|
||||
document_id => "%{[metadata][_id]}"
|
||||
index => "so-ip-mappings"
|
||||
silence_errors_in_log => ["version_conflict_engine_exception"]
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -25,8 +25,8 @@ output {
|
||||
document_id => "%{[metadata][_id]}"
|
||||
pipeline => "%{[metadata][pipeline]}"
|
||||
silence_errors_in_log => ["version_conflict_engine_exception"]
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
else {
|
||||
@@ -37,8 +37,8 @@ output {
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
pipeline => "%{[metadata][pipeline]}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -49,8 +49,8 @@ output {
|
||||
data_stream => true
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode=> "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ output {
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
index => "endgame-%{+YYYY.MM.dd}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
ssl_enabled => true
|
||||
ssl_verification_mode => "none"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,7 +56,7 @@ logstash:
|
||||
helpLink: logstash.html
|
||||
global: False
|
||||
config:
|
||||
http_x_host:
|
||||
api_x_http_x_host:
|
||||
description: Host interface to listen to connections.
|
||||
helpLink: logstash.html
|
||||
readonly: True
|
||||
|
||||
@@ -427,6 +427,7 @@ preupgrade_changes() {
|
||||
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
|
||||
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
|
||||
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
|
||||
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
|
||||
true
|
||||
}
|
||||
|
||||
@@ -459,6 +460,7 @@ postupgrade_changes() {
|
||||
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
|
||||
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
|
||||
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
|
||||
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
|
||||
true
|
||||
}
|
||||
|
||||
@@ -615,9 +617,6 @@ post_to_2.4.180() {
|
||||
}
|
||||
|
||||
post_to_2.4.190() {
|
||||
echo "Regenerating Elastic Agent Installers"
|
||||
/sbin/so-elastic-agent-gen-installers
|
||||
|
||||
# Only need to update import / eval nodes
|
||||
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
|
||||
update_import_fleet_output
|
||||
@@ -645,6 +644,13 @@ post_to_2.4.200() {
|
||||
POSTVERSION=2.4.200
|
||||
}
|
||||
|
||||
post_to_2.4.210() {
|
||||
echo "Regenerating Elastic Agent Installers"
|
||||
/sbin/so-elastic-agent-gen-installers
|
||||
|
||||
POSTVERSION=2.4.210
|
||||
}
|
||||
|
||||
repo_sync() {
|
||||
echo "Sync the local repo."
|
||||
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
||||
@@ -906,9 +912,7 @@ up_to_2.4.180() {
|
||||
}
|
||||
|
||||
up_to_2.4.190() {
|
||||
# Elastic Update for this release, so download Elastic Agent files
|
||||
determine_elastic_agent_upgrade
|
||||
|
||||
echo "Nothing to do for 2.4.190"
|
||||
INSTALLEDVERSION=2.4.190
|
||||
}
|
||||
|
||||
@@ -921,6 +925,13 @@ up_to_2.4.200() {
|
||||
INSTALLEDVERSION=2.4.200
|
||||
}
|
||||
|
||||
up_to_2.4.210() {
|
||||
# Elastic Update for this release, so download Elastic Agent files
|
||||
determine_elastic_agent_upgrade
|
||||
|
||||
INSTALLEDVERSION=2.4.210
|
||||
}
|
||||
|
||||
add_hydra_pillars() {
|
||||
mkdir -p /opt/so/saltstack/local/pillar/hydra
|
||||
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
|
||||
@@ -1608,6 +1619,60 @@ verify_latest_update_script() {
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
verify_es_version_compatibility() {
|
||||
|
||||
# Define supported upgrade paths for SO ES versions
|
||||
declare -A es_upgrade_map=(
|
||||
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
|
||||
["8.17.3"]="8.18.4 8.18.6 8.18.8"
|
||||
["8.18.4"]="8.18.6 8.18.8 9.0.8"
|
||||
["8.18.6"]="8.18.8 9.0.8"
|
||||
["8.18.8"]="9.0.8"
|
||||
)
|
||||
|
||||
# ES version stepping stones. Elasticsearch MUST upgrade through these versions
|
||||
declare -A es_to_so_version=(
|
||||
["8.18.8"]="2.4.190-20251024"
|
||||
["9.0.8"]="2.4.210"
|
||||
)
|
||||
|
||||
# Get current Elasticsearch version
|
||||
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
|
||||
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
|
||||
else
|
||||
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the target ES version from the updatedir
|
||||
# DOUBLE TAP ON THIS SOMEONE FROM AN OLD VERSION UPGRADING TO ANOTHER VERSION OLDER THAN 2.4.110 WOULD HAVE AN ERROR HERE. Prior to this version there was no version defined in defaults.yaml
|
||||
target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p')
|
||||
|
||||
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " ]]; then
|
||||
# supported upgrade
|
||||
return 0
|
||||
else
|
||||
compatible_versions=${es_upgrade_map[$es_version]}
|
||||
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
|
||||
echo ""
|
||||
echo "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION)."
|
||||
echo ""
|
||||
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
|
||||
else
|
||||
echo "You can use the following soup command to upgrade to $next_step_so_version;"
|
||||
echo " sudo BRANCH=$next_step_so_version soup"
|
||||
echo ""
|
||||
fi
|
||||
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
|
||||
echo ""
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
# Keeping this block in case we need to do a hotfix that requires salt update
|
||||
apply_hotfix() {
|
||||
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
|
||||
@@ -1704,6 +1769,8 @@ main() {
|
||||
echo "Verifying we have the latest soup script."
|
||||
verify_latest_update_script
|
||||
|
||||
verify_es_version_compatibility
|
||||
|
||||
echo "Let's see if we need to update Security Onion."
|
||||
upgrade_check
|
||||
upgrade_space
|
||||
|
||||
Reference in New Issue
Block a user