mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge pull request #14439 from Security-Onion-Solutions/2.4/dev
2.4.140
This commit is contained in:
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
@@ -25,6 +25,7 @@ body:
|
||||
- 2.4.111
|
||||
- 2.4.120
|
||||
- 2.4.130
|
||||
- 2.4.140
|
||||
- Other (please provide detail below)
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
### 2.4.130-20250311 ISO image released on 2025/03/11
|
||||
### 2.4.140-20250324 ISO image released on 2025/03/24
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.4.130-20250311 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.130-20250311.iso
|
||||
2.4.140-20250324 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.140-20250324.iso
|
||||
|
||||
MD5: 4641CA710570CCE18CD7D50653373DC0
|
||||
SHA1: 786EF73F7945FDD80126C9AE00BDD29E58743715
|
||||
SHA256: 48C7A042F20C46B8087BAE0F971696DADE9F9364D52F416718245C16E7CCB977
|
||||
MD5: 36393200A5CEEC5B58277691DDAFF247
|
||||
SHA1: 48655378C732CF47A6B3290F6F07F4F3162BE054
|
||||
SHA256: 470E00245EBAD83C045743CFB27885CEC3E1F057D91081906B240A38B6D3759A
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.130-20250311.iso.sig
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.140-20250324.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
||||
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.130-20250311.iso.sig
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.140-20250324.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.130-20250311.iso
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.140-20250324.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.4.130-20250311.iso.sig securityonion-2.4.130-20250311.iso
|
||||
gpg --verify securityonion-2.4.140-20250324.iso.sig securityonion-2.4.140-20250324.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Mon 10 Mar 2025 06:30:49 PM EDT using RSA key ID FE507013
|
||||
gpg: Signature made Sun 23 Mar 2025 08:37:47 PM EDT using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
|
||||
@@ -64,6 +64,12 @@ copy_so-repo-sync_manager_tools_sbin:
|
||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-repo-sync
|
||||
- preserve: True
|
||||
|
||||
copy_bootstrap-salt_manager_tools_sbin:
|
||||
file.copy:
|
||||
- name: /opt/so/saltstack/default/salt/salt/scripts/bootstrap-salt.sh
|
||||
- source: {{UPDATE_DIR}}/salt/salt/scripts/bootstrap-salt.sh
|
||||
- preserve: True
|
||||
|
||||
# This section is used to put the new script in place so that it can be called during soup.
|
||||
# It is faster than calling the states that normally manage them to put them in place.
|
||||
copy_so-common_sbin:
|
||||
@@ -108,6 +114,13 @@ copy_so-repo-sync_sbin:
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_bootstrap-salt_sbin:
|
||||
file.copy:
|
||||
- name: /usr/sbin/bootstrap-salt.sh
|
||||
- source: {{UPDATE_DIR}}/salt/salt/scripts/bootstrap-salt.sh
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
{# this is added in 2.4.120 to remove salt repo files pointing to saltproject.io to accomodate the move to broadcom and new bootstrap-salt script #}
|
||||
{% if salt['pkg.version_cmp'](SOVERSION, '2.4.120') == -1 %}
|
||||
{% set saltrepofile = '/etc/yum.repos.d/salt.repo' %}
|
||||
|
||||
@@ -127,6 +127,7 @@ if [[ $EXCLUDE_STARTUP_ERRORS == 'Y' ]]; then
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|process already finished" # Telegraf script finished just as the auto kill timeout kicked in
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|No shard available" # Typical error when making a query before ES has finished loading all indices
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|responded with status-code 503" # telegraf getting 503 from ES during startup
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|process_cluster_event_timeout_exception" # logstash waiting for elasticsearch to start
|
||||
fi
|
||||
|
||||
if [[ $EXCLUDE_FALSE_POSITIVE_ERRORS == 'Y' ]]; then
|
||||
@@ -155,6 +156,7 @@ if [[ $EXCLUDE_FALSE_POSITIVE_ERRORS == 'Y' ]]; then
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|request_unauthorized" # false positive (login failures to Hydra result in an 'error' log)
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|adding index lifecycle policy" # false positive (elasticsearch policy names contain 'error')
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|adding ingest pipeline" # false positive (elasticsearch ingest pipeline names contain 'error')
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|updating index template" # false positive (elasticsearch index or template names contain 'error')
|
||||
fi
|
||||
|
||||
if [[ $EXCLUDE_KNOWN_ERRORS == 'Y' ]]; then
|
||||
|
||||
@@ -108,7 +108,7 @@ elastic_fleet_package_is_installed() {
|
||||
}
|
||||
|
||||
elastic_fleet_installed_packages() {
|
||||
curl -s -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X GET -H 'kbn-xsrf: true' -H 'Content-Type: application/json' "localhost:5601/api/fleet/epm/packages/installed?perPage=300"
|
||||
curl -s -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X GET -H 'kbn-xsrf: true' -H 'Content-Type: application/json' "localhost:5601/api/fleet/epm/packages/installed?perPage=500"
|
||||
}
|
||||
|
||||
elastic_fleet_agent_policy_ids() {
|
||||
|
||||
@@ -1,9 +1,25 @@
|
||||
{
|
||||
"description":"zeek.ldap_search",
|
||||
"processors":[
|
||||
{"pipeline": {"name": "zeek.ldap", "ignore_missing_pipeline":true,"ignore_failure":true}},
|
||||
{"set": {"field": "event.dataset", "value":"ldap_search"}},
|
||||
{"remove": {"field": "tags", "ignore_missing":true}},
|
||||
{"json": {"field": "message", "target_field": "message2", "ignore_failure": true}},
|
||||
{"rename": {"field": "message2.message_id", "target_field": "ldap.message_id", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.opcode", "target_field": "ldap.opcode", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.result", "target_field": "ldap.result", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.diagnostic_message", "target_field": "ldap.diagnostic_message", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.version", "target_field": "ldap.version", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.object", "target_field": "ldap.object", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.argument", "target_field": "ldap.argument", "ignore_missing": true}},
|
||||
{"rename": {"field": "message2.scope", "target_field": "ldap_search.scope", "ignore_missing":true}},
|
||||
{"rename": {"field": "message2.deref_aliases", "target_field": "ldap_search.deref_aliases", "ignore_missing":true}},
|
||||
{"rename": {"field": "message2.base_object", "target_field": "ldap.object", "ignore_missing":true}},
|
||||
{"rename": {"field": "message2.result_count", "target_field": "ldap_search.result_count", "ignore_missing":true}},
|
||||
{"rename": {"field": "message2.filter", "target_field": "ldap_search.filter", "ignore_missing":true}},
|
||||
{"rename": {"field": "message2.attributes", "target_field": "ldap_search.attributes", "ignore_missing":true}},
|
||||
{"script": {"source": "if (ctx.containsKey('ldap') && ctx.ldap.containsKey('diagnostic_message') && ctx.ldap.diagnostic_message != null) {\n String message = ctx.ldap.diagnostic_message;\n\n // get user and property from SASL success\n if (message.toLowerCase().contains(\"sasl(0): successful result\")) {\n Pattern pattern = /user:\\s*([^ ]+)\\s*property:\\s*([^ ]+)/i;\n Matcher matcher = pattern.matcher(message);\n if (matcher.find()) {\n ctx.ldap.user_email = matcher.group(1); // Extract user email\n ctx.ldap.property = matcher.group(2); // Extract property\n }\n }\n if (message.toLowerCase().contains(\"ldaperr:\")) {\n Pattern pattern = /comment:\\s*([^,]+)/i;\n Matcher matcher = pattern.matcher(message);\n\n if (matcher.find()) {\n ctx.ldap.comment = matcher.group(1);\n }\n }\n }","ignore_failure": true}},
|
||||
{"script": {"source": "if (ctx.containsKey('ldap') && ctx.ldap.containsKey('object') && ctx.ldap.object != null) {\n String message = ctx.ldap.object;\n\n // parse common name from ldap object\n if (message.toLowerCase().contains(\"cn=\")) {\n Pattern pattern = /cn=([^,]+)/i;\n Matcher matcher = pattern.matcher(message);\n if (matcher.find()) {\n ctx.ldap.common_name = matcher.group(1); // Extract CN\n }\n }\n // build domain from ldap object\n if (message.toLowerCase().contains(\"dc=\")) {\n Pattern dcPattern = /dc=([^,]+)/i;\n Matcher dcMatcher = dcPattern.matcher(message);\n\n StringBuilder domainBuilder = new StringBuilder();\n while (dcMatcher.find()) {\n if (domainBuilder.length() > 0 ){\n domainBuilder.append(\".\");\n }\n domainBuilder.append(dcMatcher.group(1));\n }\n if (domainBuilder.length() > 0) {\n ctx.ldap.domain = domainBuilder.toString();\n }\n }\n // create list of any organizational units from ldap object\n if (message.toLowerCase().contains(\"ou=\")) {\n Pattern ouPattern = /ou=([^,]+)/i;\n Matcher ouMatcher = ouPattern.matcher(message);\n ctx.ldap.organizational_unit = [];\n\n while (ouMatcher.find()) {\n ctx.ldap.organizational_unit.add(ouMatcher.group(1));\n }\n if(ctx.ldap.organizational_unit.isEmpty()) {\n ctx.remove(\"ldap.organizational_unit\");\n }\n }\n}\n","ignore_failure": true}},
|
||||
{"remove": {"field": "message2.tags", "ignore_failure": true}},
|
||||
{"remove": {"field": ["host"], "ignore_failure": true}},
|
||||
{"pipeline": {"name": "zeek.common"}}
|
||||
]
|
||||
}
|
||||
@@ -43,7 +43,7 @@ global:
|
||||
global: True
|
||||
advanced: True
|
||||
pipeline:
|
||||
description: Sets which pipeline technology for events to use. Currently only Redis is fully supported. Kafka is experimental and requires a Security Onion Pro license.
|
||||
description: Sets which pipeline technology for events to use. The use of Kafka requires a Security Onion Pro license.
|
||||
regex: ^(REDIS|KAFKA)$
|
||||
options:
|
||||
- REDIS
|
||||
|
||||
@@ -407,6 +407,7 @@ preupgrade_changes() {
|
||||
[[ "$INSTALLEDVERSION" == 2.4.110 ]] && up_to_2.4.111
|
||||
[[ "$INSTALLEDVERSION" == 2.4.111 ]] && up_to_2.4.120
|
||||
[[ "$INSTALLEDVERSION" == 2.4.120 ]] && up_to_2.4.130
|
||||
[[ "$INSTALLEDVERSION" == 2.4.130 ]] && up_to_2.4.140
|
||||
true
|
||||
}
|
||||
|
||||
@@ -431,6 +432,7 @@ postupgrade_changes() {
|
||||
[[ "$POSTVERSION" == 2.4.110 ]] && post_to_2.4.111
|
||||
[[ "$POSTVERSION" == 2.4.111 ]] && post_to_2.4.120
|
||||
[[ "$POSTVERSION" == 2.4.120 ]] && post_to_2.4.130
|
||||
[[ "$POSTVERSION" == 2.4.130 ]] && post_to_2.4.140
|
||||
true
|
||||
}
|
||||
|
||||
@@ -553,6 +555,11 @@ post_to_2.4.130() {
|
||||
POSTVERSION=2.4.130
|
||||
}
|
||||
|
||||
post_to_2.4.140() {
|
||||
echo "Nothing to apply"
|
||||
POSTVERSION=2.4.140
|
||||
}
|
||||
|
||||
repo_sync() {
|
||||
echo "Sync the local repo."
|
||||
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
||||
@@ -773,6 +780,12 @@ up_to_2.4.130() {
|
||||
INSTALLEDVERSION=2.4.130
|
||||
}
|
||||
|
||||
up_to_2.4.140() {
|
||||
echo "Nothing to do for 2.4.140"
|
||||
|
||||
INSTALLEDVERSION=2.4.140
|
||||
}
|
||||
|
||||
add_hydra_pillars() {
|
||||
mkdir -p /opt/so/saltstack/local/pillar/hydra
|
||||
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
|
||||
@@ -1085,7 +1098,7 @@ upgrade_check() {
|
||||
}
|
||||
|
||||
upgrade_check_salt() {
|
||||
NEWSALTVERSION=$(grep version: $UPDATE_DIR/salt/salt/master.defaults.yaml | awk '{print $2}')
|
||||
NEWSALTVERSION=$(grep "version:" $UPDATE_DIR/salt/salt/master.defaults.yaml | grep -o "[0-9]\+\.[0-9]\+")
|
||||
if [ "$INSTALLEDSALTVERSION" == "$NEWSALTVERSION" ]; then
|
||||
echo "You are already running the correct version of Salt for Security Onion."
|
||||
else
|
||||
@@ -1231,26 +1244,6 @@ failed_soup_restore_items() {
|
||||
masterunlock
|
||||
}
|
||||
|
||||
#upgrade salt to 3004.1
|
||||
#2_3_10_hotfix_1() {
|
||||
# systemctl_func "stop" "$cron_service_name"
|
||||
# # update mine items prior to stopping salt-minion and salt-master
|
||||
# update_salt_mine
|
||||
# stop_salt_minion
|
||||
# stop_salt_master
|
||||
# update_repo
|
||||
# # Does salt need upgraded. If so update it.
|
||||
# if [[ $UPGRADESALT -eq 1 ]]; then
|
||||
# echo "Upgrading Salt"
|
||||
# # Update the repo files so it can actually upgrade
|
||||
# upgrade_salt
|
||||
# fi
|
||||
# systemctl_func "start" "salt-master"
|
||||
# systemctl_func "start" "salt-minion"
|
||||
# systemctl_func "start" "$cron_service_name"
|
||||
|
||||
#}
|
||||
|
||||
main() {
|
||||
trap 'check_err $?' EXIT
|
||||
|
||||
@@ -1446,6 +1439,9 @@ main() {
|
||||
# Stop long-running scripts to allow potentially updated scripts to load on the next execution.
|
||||
killall salt-relay.sh
|
||||
|
||||
# ensure the mine is updated and populated before highstates run, following the salt-master restart
|
||||
update_salt_mine
|
||||
|
||||
highstate
|
||||
postupgrade_changes
|
||||
[[ $is_airgap -eq 0 ]] && unmount_update
|
||||
|
||||
@@ -79,13 +79,6 @@ pcaptmpdir:
|
||||
- group: 941
|
||||
- makedirs: True
|
||||
|
||||
pcapoutdir:
|
||||
file.directory:
|
||||
- name: /nsm/pcapout
|
||||
- user: 939
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
pcapindexdir:
|
||||
file.directory:
|
||||
- name: /nsm/pcapindex
|
||||
|
||||
@@ -24,3 +24,10 @@ pcapdir:
|
||||
- user: 941
|
||||
- group: 941
|
||||
- makedirs: True
|
||||
|
||||
pcapoutdir:
|
||||
file.directory:
|
||||
- name: /nsm/pcapout
|
||||
- user: 939
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
{# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
Elastic License 2.0. #}
|
||||
|
||||
{% import_yaml 'salt/minion.defaults.yaml' as saltminion %}
|
||||
{% set SALTVERSION = saltminion.salt.minion.version %}
|
||||
{% set SALTVERSION = saltminion.salt.minion.version | string %}
|
||||
{% set INSTALLEDSALTVERSION = grains.saltversion | string %}
|
||||
|
||||
{% if grains.os_family == 'Debian' %}
|
||||
{% set SPLITCHAR = '+' %}
|
||||
@@ -11,9 +17,7 @@
|
||||
{% set SYSTEMD_UNIT_FILE = '/usr/lib/systemd/system/salt-minion.service' %}
|
||||
{% endif %}
|
||||
|
||||
{% set INSTALLEDSALTVERSION = grains.saltversion %}
|
||||
|
||||
{% if grains.saltversion|string != SALTVERSION|string %}
|
||||
{% if INSTALLEDSALTVERSION != SALTVERSION %}
|
||||
{% if grains.os_family|lower == 'redhat' %}
|
||||
{% set UPGRADECOMMAND = 'yum clean all ; /usr/sbin/bootstrap-salt.sh -s 120 -r -F stable ' ~ SALTVERSION %}
|
||||
{% elif grains.os_family|lower == 'debian' %}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
|
||||
salt:
|
||||
master:
|
||||
version: 3006.9
|
||||
version: '3006.9'
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
|
||||
salt:
|
||||
minion:
|
||||
version: 3006.9
|
||||
version: '3006.9'
|
||||
check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default
|
||||
service_start_delay: 30 # in seconds.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -89,7 +89,7 @@ function manage_user() {
|
||||
add)
|
||||
email=$(echo "$request" | jq -r .email)
|
||||
password=$(echo "$request" | jq -r .password)
|
||||
perm=$(echo "$request" | jq -r .role)
|
||||
role=$(echo "$request" | jq -r .role)
|
||||
firstName=$(echo "$request" | jq -r .firstName)
|
||||
lastName=$(echo "$request" | jq -r .lastName)
|
||||
note=$(echo "$request" | jq -r .note)
|
||||
@@ -283,7 +283,7 @@ function send_file() {
|
||||
|
||||
log "encrypting..."
|
||||
password=$(lookup_pillar_secret import_pass)
|
||||
response=$(gpg --passphrase "$password" --batch --symmetric --cipher-algo AES256 "$from")
|
||||
response=$(gpg --passphrase "$password" --batch --yes --symmetric --cipher-algo AES256 "$from")
|
||||
log Response:$'\n'"$response"
|
||||
|
||||
fromgpg="$from.gpg"
|
||||
@@ -329,12 +329,11 @@ function import_file() {
|
||||
|
||||
log "decrypting..."
|
||||
password=$(lookup_pillar_secret import_pass)
|
||||
decrypt_cmd="gpg --passphrase $password -o $file.tmp --batch --decrypt $filegpg"
|
||||
decrypt_cmd="gpg --passphrase $password -o $file --batch --yes --decrypt $filegpg"
|
||||
salt "$node" cmd.run "\"$decrypt_cmd\""
|
||||
decrypt_code=$?
|
||||
|
||||
if [[ $decrypt_code -eq 0 ]]; then
|
||||
mv "$file.tmp" "$file"
|
||||
log "importing..."
|
||||
case $importer in
|
||||
pcap)
|
||||
@@ -357,7 +356,7 @@ function import_file() {
|
||||
exit_code=$decrypt_code
|
||||
fi
|
||||
|
||||
rm -f "$file" "$filegpg"
|
||||
salt "$node" cmd.run "rm -f \"$file\" \"$filegpg\""
|
||||
|
||||
log Response:$'\n'"$response"
|
||||
log "Exit Code: $exit_code"
|
||||
|
||||
@@ -63,4 +63,5 @@ zeek:
|
||||
duplicates: True
|
||||
file_extraction:
|
||||
description: Contains a list of file or MIME types Zeek will extract from the network streams. Values must adhere to the following format - {"MIME_TYPE":"FILE_EXTENSION"}
|
||||
forcedType: "[]{}"
|
||||
helpLink: zeek.html
|
||||
|
||||
@@ -95,7 +95,7 @@ analyze_system() {
|
||||
|
||||
desktop_salt_local() {
|
||||
|
||||
SALTVERSION=$(egrep 'version: [0-9]{4}' ../salt/salt/master.defaults.yaml | sed 's/^.*version: //')
|
||||
SALTVERSION=$(grep "version:" ../salt/salt/master.defaults.yaml | grep -o "[0-9]\+\.[0-9]\+")
|
||||
# Install everything using local salt
|
||||
# Set the repo
|
||||
securityonion_repo
|
||||
@@ -1817,7 +1817,7 @@ securityonion_repo() {
|
||||
}
|
||||
|
||||
repo_sync_local() {
|
||||
SALTVERSION=$(egrep 'version: [0-9]{4}' ../salt/salt/master.defaults.yaml | sed 's/^.*version: //')
|
||||
SALTVERSION=$(grep "version:" ../salt/salt/master.defaults.yaml | grep -o "[0-9]\+\.[0-9]\+")
|
||||
info "Repo Sync"
|
||||
if [[ $is_supported ]]; then
|
||||
# Sync the repo from the the SO repo locally.
|
||||
@@ -1878,7 +1878,7 @@ repo_sync_local() {
|
||||
|
||||
saltify() {
|
||||
info "Installing Salt"
|
||||
SALTVERSION=$(egrep 'version: [0-9]{4}' ../salt/salt/master.defaults.yaml | sed 's/^.*version: //')
|
||||
SALTVERSION=$(grep "version:" ../salt/salt/master.defaults.yaml | grep -o "[0-9]\+\.[0-9]\+")
|
||||
if [[ $is_deb ]]; then
|
||||
|
||||
DEBIAN_FRONTEND=noninteractive retry 150 20 "apt-get -y -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" upgrade" >> "$setup_log" 2>&1 || fail_setup
|
||||
|
||||
BIN
sigs/securityonion-2.4.140-20250324.iso.sig
Normal file
BIN
sigs/securityonion-2.4.140-20250324.iso.sig
Normal file
Binary file not shown.
Reference in New Issue
Block a user