mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 09:12:45 +01:00
Merge branch 'dev' into kilo
This commit is contained in:
@@ -663,6 +663,15 @@
|
||||
|
||||
# # Read metrics from one or more commands that can output to stdout
|
||||
|
||||
[[inputs.exec]]
|
||||
commands = [
|
||||
"/scripts/sostatus.sh"
|
||||
]
|
||||
data_format = "influx"
|
||||
timeout = "15s"
|
||||
interval = "180s"
|
||||
|
||||
|
||||
# ## Commands array
|
||||
{% if grains['role'] in ['so-manager', 'so-managersearch'] %}
|
||||
[[inputs.exec]]
|
||||
|
||||
@@ -563,7 +563,7 @@ collect_patch_schedule_name_import() {
|
||||
|
||||
collect_proxy() {
|
||||
[[ -n $TESTING ]] && return
|
||||
collect_proxy_details
|
||||
collect_proxy_details || return
|
||||
while ! proxy_validate; do
|
||||
if whiptail_invalid_proxy; then
|
||||
collect_proxy_details no_ask
|
||||
@@ -608,6 +608,8 @@ collect_proxy_details() {
|
||||
so_proxy="$proxy_addr"
|
||||
fi
|
||||
export so_proxy
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1870,12 +1872,13 @@ print_salt_state_apply() {
|
||||
}
|
||||
|
||||
proxy_validate() {
|
||||
echo "Testing proxy..."
|
||||
local test_url="https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS"
|
||||
proxy_test_err=$(curl -sS "$test_url" --proxy "$so_proxy" 2>&1)
|
||||
proxy_test_err=$(curl -sS "$test_url" --proxy "$so_proxy" --connect-timeout 5 2>&1) # set short connection timeout so user doesn't sit waiting for proxy test to timeout
|
||||
local ret=$?
|
||||
|
||||
if [[ $ret != 0 ]]; then
|
||||
error "Could not reach $test_url using proxy $so_proxy"
|
||||
error "Could not reach $test_url using proxy provided"
|
||||
error "Received error: $proxy_test_err"
|
||||
if [[ -n $TESTING ]]; then
|
||||
error "Exiting setup"
|
||||
|
||||
@@ -558,7 +558,6 @@ if [[ $is_node && ! $is_eval ]]; then
|
||||
LSPIPELINEWORKERS=$num_cpu_cores
|
||||
LSPIPELINEBATCH=125
|
||||
LSINPUTTHREADS=1
|
||||
LSPIPELINEBATCH=125
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -572,14 +571,14 @@ fi
|
||||
|
||||
if [[ $is_manager || $is_import ]]; then collect_so_allow; fi
|
||||
|
||||
whiptail_make_changes
|
||||
# This block sets REDIRECTIT which is used by a function outside the below subshell
|
||||
set_redirect >> $setup_log 2>&1
|
||||
|
||||
whiptail_end_settings
|
||||
|
||||
# From here on changes will be made.
|
||||
echo "1" > /root/accept_changes
|
||||
|
||||
# This block sets REDIRECTIT which is used by a function outside the below subshell
|
||||
set_redirect >> $setup_log 2>&1
|
||||
|
||||
|
||||
# Begin install
|
||||
{
|
||||
@@ -962,6 +961,7 @@ else
|
||||
} | whiptail_gauge_post_setup "Running post-installation steps..."
|
||||
|
||||
whiptail_setup_complete
|
||||
[[ $setup_type != 'iso' ]] && whitpail_ssh_warning
|
||||
echo "Post-installation steps have completed." >> $setup_log 2>&1
|
||||
fi
|
||||
|
||||
|
||||
@@ -391,6 +391,7 @@ whiptail_dockernet_net() {
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
|
||||
}
|
||||
|
||||
whiptail_enable_components() {
|
||||
|
||||
[ -n "$TESTING" ] && return
|
||||
@@ -423,6 +424,211 @@ whiptail_enable_components() {
|
||||
done
|
||||
}
|
||||
|
||||
whiptail_end_settings() {
|
||||
[ -n "$TESTING" ] && return
|
||||
|
||||
# BASIC INFO (NETWORK, HOSTNAME, DESCRIPTION, ETC)
|
||||
|
||||
read -r -d '' end_msg <<- EOM
|
||||
Node Type: $install_type
|
||||
Hostname: $HOSTNAME
|
||||
EOM
|
||||
|
||||
[[ -n $NODE_DESCRIPTION ]] && __append_end_msg "Description: $NODE_DESCRIPTION"
|
||||
|
||||
[[ $is_airgap ]] && __append_end_msg "Airgap: True"
|
||||
|
||||
if [[ $is_minion ]]; then
|
||||
__append_end_msg "Manager Hostname: $MSRV"
|
||||
__append_end_msg "Manager IP: $MSRVIP"
|
||||
fi
|
||||
|
||||
|
||||
[[ $is_iso ]] && __append_end_msg "Network: $address_type"
|
||||
|
||||
__append_end_msg "Management NIC: $MNIC"
|
||||
__append_end_msg "Management IP: $MAINIP"
|
||||
|
||||
if [[ $address_type == 'STATIC' ]]; then
|
||||
__append_end_msg "Gateway: $MGATEWAY"
|
||||
__append_end_msg "DNS: $MDNS"
|
||||
__append_end_msg "DNS Domain: $MSEARCH"
|
||||
fi
|
||||
|
||||
if [[ -n $so_proxy ]]; then
|
||||
__append_end_msg "Proxy:"
|
||||
__append_end_msg " Server URL: $proxy_addr"
|
||||
[[ -n $proxy_user ]] && __append_end_msg " User: $proxy_user"
|
||||
else
|
||||
__append_end_msg "Proxy: N/A"
|
||||
fi
|
||||
|
||||
if [[ $is_sensor ]]; then
|
||||
__append_end_msg "Bond NIC(s):"
|
||||
for nic in "${BNICS[@]}"; do
|
||||
__append_end_msg " - $nic"
|
||||
done
|
||||
[[ -n $MTU ]] && __append_end_msg "MTU: $MTU"
|
||||
fi
|
||||
|
||||
local homenet_arr
|
||||
if [[ -n $HNMANAGER ]]; then
|
||||
__append_end_msg "Home Network(s):"
|
||||
IFS="," read -r -a homenet_arr <<< "$HNMANAGER"
|
||||
for net in "${homenet_arr[@]}"; do
|
||||
__append_end_msg " - $net"
|
||||
done
|
||||
elif [[ -n $HNSENSOR ]]; then
|
||||
__append_end_msg "Home Network(s):"
|
||||
IFS="," read -r -a homenet_arr <<< "$HNSENSOR"
|
||||
for net in "${homenet_arr[@]}"; do
|
||||
__append_end_msg " - $net"
|
||||
done
|
||||
fi
|
||||
|
||||
[[ -n $REDIRECTIT ]] && __append_end_msg "Access URL: https://${REDIRECTIT}"
|
||||
|
||||
[[ -n $ALLOW_CIDR ]] && __append_end_msg "Allowed IP or Subnet: $ALLOW_CIDR"
|
||||
|
||||
[[ -n $WEBUSER ]] && __append_end_msg "Web User: $WEBUSER"
|
||||
|
||||
[[ -n $FLEETNODEUSER ]] && __append_end_msg "Fleet User: $FLEETNODEUSER"
|
||||
|
||||
if [[ $is_manager ]]; then
|
||||
__append_end_msg "Enabled Optional Components:"
|
||||
for component in "${COMPONENTS[@]}"; do
|
||||
__append_end_msg " - $component"
|
||||
done
|
||||
fi
|
||||
|
||||
# METADATA / IDS
|
||||
|
||||
if [[ -n $ZEEKVERSION ]]; then
|
||||
local md_tool_string=${ZEEKVERSION,;}
|
||||
md_tool_string=${md_tool_string^}
|
||||
|
||||
__append_end_msg "Metadata Tool: $md_tool_string"
|
||||
fi
|
||||
|
||||
[[ -n $RULESETUP ]] && __append_end_msg "IDS Ruleset: $RULESETUP"
|
||||
[[ -n $OINKCODE ]] && __append_end_msg "Oinkcode: $OINKCODE"
|
||||
|
||||
# PATCH SCHEDULE
|
||||
|
||||
if [[ -n $PATCHSCHEDULENAME ]]; then
|
||||
__append_end_msg "Patch Schedule:"
|
||||
if [[ $PATCHSCHEDULENAME == 'auto'|| $PATCHSCHEDULENAME == 'manual' ]]; then
|
||||
__append_end_msg " Type: $PATCHSCHEDULENAME"
|
||||
else
|
||||
__append_end_msg " Name: $PATCHSCHEDULENAME"
|
||||
fi
|
||||
if [[ ${#PATCHSCHEDULEDAYS[@]} -gt 0 ]]; then
|
||||
__append_end_msg " Day(s):"
|
||||
for day in "${PATCHSCHEDULEDAYS[@]}"; do
|
||||
__append_end_msg " - $day"
|
||||
done
|
||||
fi
|
||||
if [[ ${#PATCHSCHEDULEHOURS[@]} -gt 0 ]]; then
|
||||
__append_end_msg " Hours(s):"
|
||||
for hour in "${PATCHSCHEDULEHOURS[@]}"; do
|
||||
__append_end_msg " - $hour"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
# MISC
|
||||
|
||||
[[ $is_helix ]] && __append_end_msg "Helix API key: $HELIXAPIKEY"
|
||||
[[ -n $DOCKERNET ]] && __append_end_msg "Docker network: $DOCKERNET"
|
||||
if [[ -n $MANAGERUPDATES ]]; then
|
||||
__append_end_msg "OS Package Updates: Manager"
|
||||
else
|
||||
__append_end_msg "OS Package Updates: Open"
|
||||
fi
|
||||
if [[ ${#ntp_servers[@]} -gt 0 ]]; then
|
||||
__append_end_msg "NTP Servers:"
|
||||
for server in "${ntp_servers[@]}"; do
|
||||
__append_end_msg " - $server"
|
||||
done
|
||||
fi
|
||||
|
||||
if [[ $NSMSETUP != 'ADVANCED' ]]; then
|
||||
[[ -n $BASICZEEK ]] && __append_end_msg "Zeek Processes: $BASICZEEK"
|
||||
[[ -n $BASICSURI ]] && __append_end_msg "Suricata Processes: $BASICSURI"
|
||||
fi
|
||||
|
||||
# ADVANCED OR REGULAR
|
||||
|
||||
if [[ $NODESETUP == 'NODEADVANCED' ]]; then
|
||||
__append_end_msg "Advanced Node Settings:"
|
||||
__append_end_msg " Elasticsearch Heap Size: $NODE_ES_HEAP_SIZE"
|
||||
__append_end_msg " Logstash Heap Size: $NODE_LS_HEAP_SIZE"
|
||||
__append_end_msg " Logstash Worker Count: $LSPIPELINEWORKERS"
|
||||
__append_end_msg " Logstash Batch Size: $LSPIPELINEBATCH"
|
||||
__append_end_msg " Logstash Input Threads: $LSINPUTTHREADS"
|
||||
__append_end_msg " Curator Day Cutoff: $CURCLOSEDAYS days"
|
||||
__append_end_msg " Elasticsearch Storage Space: ${log_size_limit}GB"
|
||||
else
|
||||
__append_end_msg "Elasticsearch Heap Size: $NODE_ES_HEAP_SIZE"
|
||||
__append_end_msg "Logstash Heap Size: $NODE_LS_HEAP_SIZE"
|
||||
__append_end_msg "Logstash Worker Count: $LSPIPELINEWORKERS"
|
||||
__append_end_msg "Logstash Batch Size: $LSPIPELINEBATCH"
|
||||
__append_end_msg "Logstash Input Threads: $LSINPUTTHREADS"
|
||||
__append_end_msg "Curator Close After: $CURCLOSEDAYS days"
|
||||
__append_end_msg "Elasticsearch Storage Space: ${log_size_limit}GB"
|
||||
fi
|
||||
|
||||
|
||||
# ADVANCED
|
||||
if [[ $MANAGERADV == 'ADVANCED' ]]; then
|
||||
__append_end_msg "Advanced Manager Settings:"
|
||||
[[ -n $ESCLUSTERNAME ]] && __append_end_msg " ES Cluster Name: $ESCLUSTERNAME"
|
||||
if [[ ${#BLOGS[@]} -gt 0 ]]; then
|
||||
__append_end_msg " Zeek Logs Enabled:"
|
||||
for log in "${BLOGS[@]}"; do
|
||||
__append_end_msg " - $log"
|
||||
done
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ $NSMSETUP == 'ADVANCED' ]]; then
|
||||
__append_end_msg "Advanced NSM Settings:"
|
||||
if [[ ${#ZEEKPINS[@]} -gt 0 ]]; then
|
||||
local zeek_pin_str
|
||||
for core in "${ZEEKPINS[@]}"; do
|
||||
zeek_pin_str="${zeek_pin_str}${core},"
|
||||
done
|
||||
zeek_pin_str=${zeek_pin_str%,}
|
||||
__append_end_msg " Zeek Pinned Cores: ${zeek_pin_str}"
|
||||
fi
|
||||
if [[ ${#SURIPINS[@]} -gt 0 ]]; then
|
||||
local suri_pin_str
|
||||
for core in "${SURIPINS[@]}"; do
|
||||
suri_pin_str="${suri_pin_str}${core},"
|
||||
done
|
||||
suri_pin_str=${suri_pin_str%,}
|
||||
__append_end_msg " Suricata Pinned Cores: ${suri_pin_str}"
|
||||
fi
|
||||
fi
|
||||
|
||||
whiptail --title "The following options have been set, would you like to proceed?" --yesno "$end_msg" 24 75 --scrolltext
|
||||
|
||||
local exitstatus=$?
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
|
||||
echo "$end_msg" > /root/install_summary
|
||||
printf '%s\n' 'Install summary:' "$end_msg" >> "$setup_log"
|
||||
}
|
||||
|
||||
__append_end_msg() {
|
||||
local newline=$1
|
||||
|
||||
read -r -d '' end_msg <<- EOM
|
||||
$end_msg
|
||||
$newline
|
||||
EOM
|
||||
}
|
||||
|
||||
whiptail_eval_adv() {
|
||||
|
||||
[ -n "$TESTING" ] && return
|
||||
@@ -1491,6 +1697,22 @@ whiptail_so_allow() {
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
}
|
||||
|
||||
whitpail_ssh_warning() {
|
||||
[ -n "$TESTING" ] && return
|
||||
|
||||
local msg
|
||||
|
||||
read -r -d '' msg <<- EOM
|
||||
NOTE: You will recceive a warning upon SSH reconnect that the host key has changed.
|
||||
|
||||
This is expected due to hardening of the OpenSSH server config.
|
||||
|
||||
The host key algorithm will now be ED25519, follow the instructions given by your SSH client to remove the old key fingerprint then retry the connection.
|
||||
EOM
|
||||
|
||||
whiptail --msgbox "$msg" 14 75
|
||||
}
|
||||
|
||||
whiptail_storage_requirements() {
|
||||
local mount=$1
|
||||
local current_val=$2
|
||||
|
||||
Reference in New Issue
Block a user