diff --git a/salt/common/tools/sbin/so-common b/salt/common/tools/sbin/so-common
index 1f63f0073..0bc2c65b0 100755
--- a/salt/common/tools/sbin/so-common
+++ b/salt/common/tools/sbin/so-common
@@ -399,6 +399,26 @@ valid_int() {
# {% raw %}
+valid_proxy() {
+ local proxy=$1
+ local url_prefixes=( 'http://' 'https://' )
+
+ local has_prefix=false
+ for prefix in "${url_prefixes[@]}"; do
+ echo "$proxy" | grep -q "$prefix" && has_prefix=true && proxy=${proxy#"$prefix"} && break
+ done
+
+ local url_arr
+ mapfile -t url_arr <<< "$(echo "$proxy" | tr ":" "\n")"
+
+ local valid_url=true
+ if ! valid_ip4 "${url_arr[0]}" && ! valid_fqdn "${url_arr[0]}"; then
+ valid_url=false
+ fi
+
+ [[ $has_prefix == true ]] && [[ $valid_url == true ]] && return 0 || return 1
+}
+
valid_string() {
local str=$1
local min_length=${2:-1}
diff --git a/salt/common/tools/sbin/so-cortex-user-add b/salt/common/tools/sbin/so-cortex-user-add
index 7b07ddbde..af6ce4647 100755
--- a/salt/common/tools/sbin/so-cortex-user-add
+++ b/salt/common/tools/sbin/so-cortex-user-add
@@ -30,7 +30,7 @@ fi
USER=$1
-CORTEX_KEY=$(lookup_pillar cortexkey)
+CORTEX_KEY=$(lookup_pillar cortexorguserkey)
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
CORTEX_ORG_NAME=$(lookup_pillar cortexorgname)
CORTEX_USER=$USER
diff --git a/salt/common/tools/sbin/so-cortex-user-enable b/salt/common/tools/sbin/so-cortex-user-enable
index fc68e955e..cfc72bd3e 100755
--- a/salt/common/tools/sbin/so-cortex-user-enable
+++ b/salt/common/tools/sbin/so-cortex-user-enable
@@ -30,7 +30,7 @@ fi
USER=$1
-CORTEX_KEY=$(lookup_pillar cortexkey)
+CORTEX_KEY=$(lookup_pillar cortexorguserkey)
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
CORTEX_USER=$USER
diff --git a/salt/common/tools/sbin/so-docker-prune b/salt/common/tools/sbin/so-docker-prune
new file mode 100755
index 000000000..5a56f506d
--- /dev/null
+++ b/salt/common/tools/sbin/so-docker-prune
@@ -0,0 +1,85 @@
+#!/usr/bin/env python3
+
+# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+import sys, argparse, re, docker
+from packaging.version import Version, InvalidVersion
+from itertools import groupby, chain
+
+
+def get_image_name(string) -> str:
+ return ':'.join(string.split(':')[:-1])
+
+
+def get_so_image_basename(string) -> str:
+ return get_image_name(string).split('/so-')[-1]
+
+
+def get_image_version(string) -> str:
+ ver = string.split(':')[-1]
+ if ver == 'latest':
+ # Version doesn't like "latest", so use a high semver
+ return '999999.9.9'
+ else:
+ try:
+ Version(ver)
+ except InvalidVersion:
+ # Strip the last substring following a hyphen for automated branches
+ ver = '-'.join(ver.split('-')[:-1])
+ return ver
+
+
+def main(quiet):
+ client = docker.from_env()
+
+ image_list = client.images.list(filters={ 'dangling': False })
+
+ # Map list of image objects to flattened list of tags (format: "name:version")
+ tag_list = list(chain.from_iterable(list(map(lambda x: x.attrs.get('RepoTags'), image_list))))
+
+ # Filter to only SO images (base name begins with "so-")
+ tag_list = list(filter(lambda x: re.match(r'^.*\/so-[^\/]*$', get_image_name(x)), tag_list))
+
+ # Group tags into lists by base name (sort by same projection first)
+ tag_list.sort(key=lambda x: get_so_image_basename(x))
+ grouped_tag_lists = [ list(it) for _, it in groupby(tag_list, lambda x: get_so_image_basename(x)) ]
+
+ no_prunable = True
+ for t_list in grouped_tag_lists:
+ try:
+ # Keep the 2 most current images
+ t_list.sort(key=lambda x: Version(get_image_version(x)), reverse=True)
+ if len(t_list) <= 2:
+ continue
+ else:
+ no_prunable = False
+ for tag in t_list[2:]:
+ if not quiet: print(f'Removing image {tag}')
+ client.images.remove(tag)
+ except InvalidVersion as e:
+ print(f'so-{get_so_image_basename(t_list[0])}: {e.args[0]}', file=sys.stderr)
+ exit(1)
+
+ if no_prunable and not quiet:
+ print('No Security Onion images to prune')
+
+
+if __name__ == "__main__":
+ main_parser = argparse.ArgumentParser(add_help=False)
+ main_parser.add_argument('-q', '--quiet', action='store_const', const=True, required=False)
+ args = main_parser.parse_args(sys.argv[1:])
+
+ main(args.quiet)
\ No newline at end of file
diff --git a/salt/common/tools/sbin/so-monitor-add b/salt/common/tools/sbin/so-monitor-add
old mode 100644
new mode 100755
diff --git a/salt/common/tools/sbin/so-playbook-sigma-refresh b/salt/common/tools/sbin/so-playbook-sigma-refresh
old mode 100644
new mode 100755
diff --git a/salt/common/tools/sbin/so-raid-status b/salt/common/tools/sbin/so-raid-status
old mode 100644
new mode 100755
diff --git a/salt/common/tools/sbin/so-rule b/salt/common/tools/sbin/so-rule
old mode 100644
new mode 100755
diff --git a/salt/common/tools/sbin/so-suricata-testrule b/salt/common/tools/sbin/so-suricata-testrule
old mode 100644
new mode 100755
diff --git a/salt/common/tools/sbin/soup b/salt/common/tools/sbin/soup
index 75e924116..84627a4ea 100755
--- a/salt/common/tools/sbin/soup
+++ b/salt/common/tools/sbin/soup
@@ -290,15 +290,14 @@ rc1_to_rc2() {
done > /opt/so/saltstack/local/pillar/global.sls
sed -i 's/playbook:/playbook_db:/' /opt/so/saltstack/local/pillar/secrets.sls
@@ -379,7 +378,6 @@ up_2.3.0_to_2.3.20(){
fi
INSTALLEDVERSION=2.3.20
-
}
up_2.3.2X_to_2.3.30() {
@@ -389,11 +387,11 @@ up_2.3.2X_to_2.3.30() {
sed -i -r "s/ (\{\{.*}})$/ '\1'/g" "$pillar"
done
-# Change the IMAGEREPO
+ # Change the IMAGEREPO
sed -i "/ imagerepo: 'securityonion'/c\ imagerepo: 'security-onion-solutions'" /opt/so/saltstack/local/pillar/global.sls
sed -i "/ imagerepo: securityonion/c\ imagerepo: 'security-onion-solutions'" /opt/so/saltstack/local/pillar/global.sls
-# Strelka rule repo pillar addition
+ # Strelka rule repo pillar addition
if [ $is_airgap -eq 0 ]; then
# Add manager as default Strelka YARA rule repo
sed -i "/^strelka:/a \\ repos: \n - https://$HOSTNAME/repo/rules/strelka" /opt/so/saltstack/local/pillar/global.sls;
@@ -431,16 +429,16 @@ thehive_maint() {
COUNT=0
THEHIVE_CONNECTED="no"
while [[ "$COUNT" -le 240 ]]; do
- curl --output /dev/null --silent --head --fail -k "https://localhost/thehive/api/alert"
- if [ $? -eq 0 ]; then
- THEHIVE_CONNECTED="yes"
- echo "connected!"
- break
- else
- ((COUNT+=1))
- sleep 1
- echo -n "."
- fi
+ curl --output /dev/null --silent --head --fail -k "https://localhost/thehive/api/alert"
+ if [ $? -eq 0 ]; then
+ THEHIVE_CONNECTED="yes"
+ echo "connected!"
+ break
+ else
+ ((COUNT+=1))
+ sleep 1
+ echo -n "."
+ fi
done
if [ "$THEHIVE_CONNECTED" == "yes" ]; then
echo "Migrating thehive databases if needed."
@@ -475,80 +473,80 @@ update_version() {
}
upgrade_check() {
- # Let's make sure we actually need to update.
- NEWVERSION=$(cat $UPDATE_DIR/VERSION)
- if [ "$INSTALLEDVERSION" == "$NEWVERSION" ]; then
- echo "You are already running the latest version of Security Onion."
- exit 0
- fi
+ # Let's make sure we actually need to update.
+ NEWVERSION=$(cat $UPDATE_DIR/VERSION)
+ if [ "$INSTALLEDVERSION" == "$NEWVERSION" ]; then
+ echo "You are already running the latest version of Security Onion."
+ exit 0
+ fi
}
upgrade_check_salt() {
- NEWSALTVERSION=$(grep version: $UPDATE_DIR/salt/salt/master.defaults.yaml | awk {'print $2'})
- if [ "$INSTALLEDSALTVERSION" == "$NEWSALTVERSION" ]; then
- echo "You are already running the correct version of Salt for Security Onion."
- else
- UPGRADESALT=1
- fi
+ NEWSALTVERSION=$(grep version: $UPDATE_DIR/salt/salt/master.defaults.yaml | awk {'print $2'})
+ if [ "$INSTALLEDSALTVERSION" == "$NEWSALTVERSION" ]; then
+ echo "You are already running the correct version of Salt for Security Onion."
+ else
+ UPGRADESALT=1
+ fi
}
upgrade_salt() {
- SALTUPGRADED=True
- echo "Performing upgrade of Salt from $INSTALLEDSALTVERSION to $NEWSALTVERSION."
- echo ""
- # If CentOS
- if [ "$OS" == "centos" ]; then
- echo "Removing yum versionlock for Salt."
- echo ""
- yum versionlock delete "salt-*"
- echo "Updating Salt packages and restarting services."
- echo ""
- if [ $is_airgap -eq 0 ]; then
- sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -r -F -M -x python3 stable "$NEWSALTVERSION"
- else
- sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -F -M -x python3 stable "$NEWSALTVERSION"
- fi
- echo "Applying yum versionlock for Salt."
- echo ""
- yum versionlock add "salt-*"
- # Else do Ubuntu things
- elif [ "$OS" == "ubuntu" ]; then
- echo "Removing apt hold for Salt."
- echo ""
- apt-mark unhold "salt-common"
- apt-mark unhold "salt-master"
- apt-mark unhold "salt-minion"
- echo "Updating Salt packages and restarting services."
- echo ""
- sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -F -M -x python3 stable "$NEWSALTVERSION"
- echo "Applying apt hold for Salt."
- echo ""
- apt-mark hold "salt-common"
- apt-mark hold "salt-master"
- apt-mark hold "salt-minion"
- fi
+ SALTUPGRADED=True
+ echo "Performing upgrade of Salt from $INSTALLEDSALTVERSION to $NEWSALTVERSION."
+ echo ""
+ # If CentOS
+ if [ "$OS" == "centos" ]; then
+ echo "Removing yum versionlock for Salt."
+ echo ""
+ yum versionlock delete "salt-*"
+ echo "Updating Salt packages and restarting services."
+ echo ""
+ if [ $is_airgap -eq 0 ]; then
+ sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -r -F -M -x python3 stable "$NEWSALTVERSION"
+ else
+ sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -F -M -x python3 stable "$NEWSALTVERSION"
+ fi
+ echo "Applying yum versionlock for Salt."
+ echo ""
+ yum versionlock add "salt-*"
+ # Else do Ubuntu things
+ elif [ "$OS" == "ubuntu" ]; then
+ echo "Removing apt hold for Salt."
+ echo ""
+ apt-mark unhold "salt-common"
+ apt-mark unhold "salt-master"
+ apt-mark unhold "salt-minion"
+ echo "Updating Salt packages and restarting services."
+ echo ""
+ sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -F -M -x python3 stable "$NEWSALTVERSION"
+ echo "Applying apt hold for Salt."
+ echo ""
+ apt-mark hold "salt-common"
+ apt-mark hold "salt-master"
+ apt-mark hold "salt-minion"
+ fi
}
verify_latest_update_script() {
- # Check to see if the update scripts match. If not run the new one.
- CURRENTSOUP=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/soup | awk '{print $1}')
- GITSOUP=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/soup | awk '{print $1}')
- CURRENTCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-common | awk '{print $1}')
- GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
- CURRENTIMGCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-image-common | awk '{print $1}')
- GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
+ # Check to see if the update scripts match. If not run the new one.
+ CURRENTSOUP=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/soup | awk '{print $1}')
+ GITSOUP=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/soup | awk '{print $1}')
+ CURRENTCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-common | awk '{print $1}')
+ GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
+ CURRENTIMGCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-image-common | awk '{print $1}')
+ GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
- if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" ]]; then
- echo "This version of the soup script is up to date. Proceeding."
- else
- echo "You are not running the latest soup version. Updating soup and its components. Might take multiple runs to complete"
- cp $UPDATE_DIR/salt/common/tools/sbin/soup $DEFAULT_SALT_DIR/salt/common/tools/sbin/
- cp $UPDATE_DIR/salt/common/tools/sbin/so-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
- cp $UPDATE_DIR/salt/common/tools/sbin/so-image-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
- salt-call state.apply common queue=True
- echo ""
- echo "soup has been updated. Please run soup again."
- exit 0
- fi
+ if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" ]]; then
+ echo "This version of the soup script is up to date. Proceeding."
+ else
+ echo "You are not running the latest soup version. Updating soup and its components. Might take multiple runs to complete"
+ cp $UPDATE_DIR/salt/common/tools/sbin/soup $DEFAULT_SALT_DIR/salt/common/tools/sbin/
+ cp $UPDATE_DIR/salt/common/tools/sbin/so-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
+ cp $UPDATE_DIR/salt/common/tools/sbin/so-image-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
+ salt-call state.apply common queue=True
+ echo ""
+ echo "soup has been updated. Please run soup again."
+ exit 0
+ fi
}
main () {
@@ -562,9 +560,10 @@ while getopts ":b" opt; do
echo "Batch size must be a number greater than 0."
exit 1
fi
- ;;
- \? ) echo "Usage: cmd [-b]"
- ;;
+ ;;
+ \? )
+ echo "Usage: cmd [-b]"
+ ;;
esac
done
diff --git a/salt/curator/files/bin/so-curator-closed-delete-delete b/salt/curator/files/bin/so-curator-closed-delete-delete
index 2a9165921..58433ee1a 100755
--- a/salt/curator/files/bin/so-curator-closed-delete-delete
+++ b/salt/curator/files/bin/so-curator-closed-delete-delete
@@ -4,12 +4,11 @@
{%- if grains['role'] in ['so-node', 'so-heavynode'] %}
{%- set ELASTICSEARCH_HOST = salt['pillar.get']('elasticsearch:mainip', '') -%}
{%- set ELASTICSEARCH_PORT = salt['pillar.get']('elasticsearch:es_port', '') -%}
- {%- set LOG_SIZE_LIMIT = salt['pillar.get']('elasticsearch:log_size_limit', '') -%}
{%- elif grains['role'] in ['so-eval', 'so-managersearch', 'so-standalone'] %}
{%- set ELASTICSEARCH_HOST = salt['pillar.get']('manager:mainip', '') -%}
{%- set ELASTICSEARCH_PORT = salt['pillar.get']('manager:es_port', '') -%}
- {%- set LOG_SIZE_LIMIT = salt['pillar.get']('manager:log_size_limit', '') -%}
{%- endif -%}
+{%- set LOG_SIZE_LIMIT = salt['pillar.get']('elasticsearch:log_size_limit', '') -%}
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
#
diff --git a/salt/docker_clean/init.sls b/salt/docker_clean/init.sls
index 006d108eb..a92d3aedd 100644
--- a/salt/docker_clean/init.sls
+++ b/salt/docker_clean/init.sls
@@ -1,86 +1,9 @@
{% from 'allowed_states.map.jinja' import allowed_states %}
{% if sls in allowed_states %}
-{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
-{% set MANAGER = salt['grains.get']('master') %}
-{% set OLDVERSIONS = ['2.0.0-rc.1','2.0.1-rc.1','2.0.2-rc.1','2.0.3-rc.1','2.1.0-rc.2','2.2.0-rc.3','2.3.0','2.3.1','2.3.2','2.3.10','2.3.20']%}
-
-{% for VERSION in OLDVERSIONS %}
-remove_images_{{ VERSION }}:
- docker_image.absent:
- - force: True
- - images:
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-acng:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive-cortex:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-curator:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-domainstats:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-elastalert:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-elasticsearch:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-filebeat:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-fleet:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-fleet-launcher:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-freqserver:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-grafana:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-idstools:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-influxdb:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-kibana:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-kratos:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-logstash:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-minio:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-mysql:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-nginx:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-playbook:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-redis:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-soc:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-soctopus:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-steno:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-strelka-frontend:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-strelka-manager:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-strelka-backend:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-strelka-filestream:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-suricata:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-telegraf:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive-es:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-wazuh:{{ VERSION }}'
- - '{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-zeek:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-acng:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-thehive-cortex:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-curator:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-domainstats:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-elastalert:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-elasticsearch:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-filebeat:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-fleet:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-fleet-launcher:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-freqserver:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-grafana:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-idstools:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-influxdb:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-kibana:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-kratos:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-logstash:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-minio:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-mysql:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-nginx:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-pcaptools:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-playbook:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-redis:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-soc:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-soctopus:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-steno:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-strelka-frontend:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-strelka-manager:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-strelka-backend:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-strelka-filestream:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-suricata:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-telegraf:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-thehive:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-thehive-es:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-wazuh:{{ VERSION }}'
- - '{{ MANAGER }}:5000/securityonion/so-zeek:{{ VERSION }}'
-{% endfor %}
+prune_images:
+ cmd.run:
+ - name: so-docker-prune
{% else %}
diff --git a/salt/idstools/init.sls b/salt/idstools/init.sls
index a88bb82ae..6282ede6c 100644
--- a/salt/idstools/init.sls
+++ b/salt/idstools/init.sls
@@ -45,6 +45,12 @@ so-idstools:
- image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-idstools:{{ VERSION }}
- hostname: so-idstools
- user: socore
+ {% if proxy %}
+ - environment:
+ - http_proxy={{ proxy }}
+ - https_proxy={{ proxy }}
+ - no_proxy={{ salt['pillar.get']('manager:no_proxy') }}
+ {% endif %}
- binds:
- /opt/so/conf/idstools/etc:/opt/so/idstools/etc:ro
- /opt/so/rules/nids:/opt/so/rules/nids:rw
diff --git a/salt/nodered/files/so_flows.json b/salt/nodered/files/so_flows.json
index a8a6e2c69..6a0dea7cf 100644
--- a/salt/nodered/files/so_flows.json
+++ b/salt/nodered/files/so_flows.json
@@ -1,4 +1,4 @@
{%- set MANAGERIP = salt['pillar.get']('global:managerip', '') -%}
{%- set HIVEKEY = salt['pillar.get']('global:hivekey', '') -%}
-{%- set CORTEXKEY = salt['pillar.get']('global:cortexkey', '') -%}
+{%- set CORTEXKEY = salt['pillar.get']('global:cortexorgusekey', '') -%}
[{"id":"dca608c3.7d8af8","type":"tab","label":"TheHive - Webhook Events","disabled":false,"info":""},{"id":"4db74fa6.2556d","type":"tls-config","z":"","name":"","cert":"","key":"","ca":"","certname":"","keyname":"","caname":"","servername":"","verifyservercert":false},{"id":"aa6cf50d.a02fc8","type":"http in","z":"dca608c3.7d8af8","name":"TheHive Listener","url":"/thehive","method":"post","upload":false,"swaggerDoc":"","x":120,"y":780,"wires":[["2b92aebb.853dc2","2fce29bb.1b1376","82ad0f08.7a53f"]]},{"id":"2b92aebb.853dc2","type":"debug","z":"dca608c3.7d8af8","name":"","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"payload","targetType":"msg","x":470,"y":940,"wires":[]},{"id":"a4ecb84a.805958","type":"switch","z":"dca608c3.7d8af8","name":"Operation","property":"payload.operation","propertyType":"msg","rules":[{"t":"eq","v":"Creation","vt":"str"},{"t":"eq","v":"Update","vt":"str"},{"t":"eq","v":"Delete","vt":"str"}],"checkall":"false","repair":false,"outputs":3,"x":580,"y":780,"wires":[["f1e954fd.3c21d8"],["65928861.c90a48"],["a259a26c.a21"]],"outputLabels":["Creation","Update","Delete"]},{"id":"f1e954fd.3c21d8","type":"switch","z":"dca608c3.7d8af8","name":"Creation","property":"payload.objectType","propertyType":"msg","rules":[{"t":"eq","v":"case","vt":"str"},{"t":"eq","v":"case_artifact","vt":"str"},{"t":"eq","v":"case_task","vt":"str"},{"t":"eq","v":"case_task_log","vt":"str"},{"t":"eq","v":"case_artifact_job","vt":"str"},{"t":"eq","v":"alert","vt":"str"},{"t":"eq","v":"user","vt":"str"}],"checkall":"false","repair":false,"outputs":7,"x":900,"y":480,"wires":[["e88b4cc2.f6afe"],["8c54e39.a1b4f2"],["64203fe8.e0ad5"],["3511de51.889a02"],["14544a8b.b6b2f5"],["44c595a4.45d45c"],["3eb4bedf.6e20a2"]],"inputLabels":["Operation"],"outputLabels":["case","case_artifact","case_task","case_task_log","action","alert","user"],"info":"No webhook data is received for the following events:\n\n- Creation of Dashboard\n- Creation of Case Templates\n"},{"id":"65928861.c90a48","type":"switch","z":"dca608c3.7d8af8","name":"Update","property":"payload.objectType","propertyType":"msg","rules":[{"t":"eq","v":"case","vt":"str"},{"t":"eq","v":"case_artifact","vt":"str"},{"t":"eq","v":"case_artifact_job","vt":"str"},{"t":"eq","v":"case_task","vt":"str"},{"t":"eq","v":"case_task_log","vt":"str"},{"t":"eq","v":"alert","vt":"str"},{"t":"eq","v":"user","vt":"str"}],"checkall":"false","repair":false,"outputs":7,"x":900,"y":860,"wires":[["eebe1748.1cd348"],["d703adc0.12fd1"],["2b738415.408d4c"],["6d97371a.406348"],["4ae621e1.9ae6"],["5786cee2.98109"],["54077728.447648"]],"inputLabels":["Operation"],"outputLabels":["case","case_artifact",null,"case_task","case_task_log","alert","user"]},{"id":"a259a26c.a21","type":"switch","z":"dca608c3.7d8af8","name":"Delete","property":"payload.objectType","propertyType":"msg","rules":[{"t":"eq","v":"case","vt":"str"},{"t":"eq","v":"case_artifact","vt":"str"},{"t":"eq","v":"case_task_log","vt":"str"}],"checkall":"false","repair":false,"outputs":3,"x":890,"y":1200,"wires":[["60c8bcfb.eff1f4"],["df708bab.348308"],["e9a8650c.e20cc8"]],"outputLabels":["case","case_artifact",""],"info":"Deleting a case task doesnt actually trigger a delete event. It triggers an `update` event where the status = cancelled"},{"id":"54077728.447648","type":"switch","z":"dca608c3.7d8af8","name":"User","property":"payload.object.status","propertyType":"msg","rules":[{"t":"eq","v":"Locked","vt":"str"},{"t":"eq","v":"Ok","vt":"str"}],"checkall":"false","repair":false,"outputs":2,"x":1130,"y":980,"wires":[["9429d6c5.5ac788"],["4e3e091c.d35388"]]},{"id":"9429d6c5.5ac788","type":"function","z":"dca608c3.7d8af8","name":"status: Locked","func":"msg.topic = \"[The Hive] A user account was locked\";\nmsg.from = \"from@example.com\";\nmsg.to = \"to@example.com\";\nreturn msg;","outputs":1,"noerr":0,"x":1380,"y":972,"wires":[[]],"info":"- User account was locked"},{"id":"4e3e091c.d35388","type":"function","z":"dca608c3.7d8af8","name":"status: Ok","func":"msg.topic = \"[The Hive] A user account was changed\";\nmsg.from = \"from@example.com\";\nmsg.to = \"to@example.com\";\nreturn msg;","outputs":1,"noerr":0,"x":1360,"y":1020,"wires":[[]],"info":"- User account was unlocked\n- User description was changed\n- User role was changed\n- User API key was added\n- User API key was revoked\n"},{"id":"485f3be.1ffcfc4","type":"function","z":"dca608c3.7d8af8","name":"status: Open","func":"// Fires when a Case is updated AND status = open\n// This can include things like TLP/PAP changes\n\nreturn msg;","outputs":1,"noerr":0,"x":1370,"y":660,"wires":[[]]},{"id":"eebe1748.1cd348","type":"switch","z":"dca608c3.7d8af8","name":"case","property":"payload.object.status","propertyType":"msg","rules":[{"t":"eq","v":"Open","vt":"str"}],"checkall":"true","repair":false,"outputs":1,"x":1130,"y":740,"wires":[["485f3be.1ffcfc4","e4b7b4bf.2fb828"]],"info":"- A case was modified"},{"id":"8c54e39.a1b4f2","type":"switch","z":"dca608c3.7d8af8","name":"case_artifact: Run Analyzer","property":"payload.object.dataType","propertyType":"msg","rules":[{"t":"eq","v":"ip","vt":"str"},{"t":"eq","v":"domain","vt":"str"}],"checkall":"true","repair":false,"outputs":2,"x":1600,"y":340,"wires":[["eb8cfeb7.a7118","a5dd8a8a.065b88"],["eb8cfeb7.a7118","a5dd8a8a.065b88"]],"info":"# References\n\n\n"},{"id":"2fce29bb.1b1376","type":"function","z":"dca608c3.7d8af8","name":"Add headers","func":"msg.thehive_url = 'https://{{ MANAGERIP }}/thehive';\nmsg.cortex_url = 'https://{{ MANAGERIP }}/cortex';\nmsg.cortex_id = 'CORTEX-SERVER-ID';\nreturn msg;","outputs":1,"noerr":0,"x":350,"y":780,"wires":[["a4ecb84a.805958"]]},{"id":"e4b7b4bf.2fb828","type":"function","z":"dca608c3.7d8af8","name":"status: Resolved","func":"// Fires when a case is closed (resolved)\n\nreturn msg;","outputs":1,"noerr":0,"x":1390,"y":720,"wires":[[]]},{"id":"e88b4cc2.f6afe","type":"function","z":"dca608c3.7d8af8","name":"case","func":"// Fires when a case is created\n// or when a responder is generated against a case\n\nreturn msg;","outputs":1,"noerr":0,"x":1130,"y":320,"wires":[[]]},{"id":"64203fe8.e0ad5","type":"function","z":"dca608c3.7d8af8","name":"case_task","func":"// Fires when a case task is created\nreturn msg;","outputs":1,"noerr":0,"x":1140,"y":400,"wires":[[]]},{"id":"3511de51.889a02","type":"function","z":"dca608c3.7d8af8","name":"case_task_log","func":"// Fires when a case task log is created\n\nreturn msg;","outputs":1,"noerr":0,"x":1163,"y":440,"wires":[[]]},{"id":"14544a8b.b6b2f5","type":"function","z":"dca608c3.7d8af8","name":"case_artifact_job","func":"// Fires when a Responder or Analyzser is Run on an existing observable\n\nreturn msg;","outputs":1,"noerr":0,"x":1173,"y":480,"wires":[[]]},{"id":"2b738415.408d4c","type":"function","z":"dca608c3.7d8af8","name":"case_artifact_job","func":"\nreturn msg;","outputs":1,"noerr":0,"x":1170,"y":820,"wires":[[]]},{"id":"3eb4bedf.6e20a2","type":"function","z":"dca608c3.7d8af8","name":"user","func":"// Fires when a user is created\n\nreturn msg;","outputs":1,"noerr":0,"x":1133,"y":560,"wires":[[]]},{"id":"d703adc0.12fd1","type":"function","z":"dca608c3.7d8af8","name":"case_artifact","func":"// Fires when an artifact is updated\nreturn msg;","outputs":1,"noerr":0,"x":1150,"y":780,"wires":[[]]},{"id":"6d97371a.406348","type":"function","z":"dca608c3.7d8af8","name":"case_task","func":"// Fires when a case task is updated\nreturn msg;","outputs":1,"noerr":0,"x":1140,"y":860,"wires":[[]]},{"id":"4ae621e1.9ae6","type":"function","z":"dca608c3.7d8af8","name":"case_task_log","func":"//Fires when a case_task_log is updated\n\nreturn msg;","outputs":1,"noerr":0,"x":1160,"y":900,"wires":[[]]},{"id":"60c8bcfb.eff1f4","type":"function","z":"dca608c3.7d8af8","name":"case","func":"//Fires when a case is deleted\nreturn msg;","outputs":1,"noerr":0,"x":1130,"y":1160,"wires":[[]]},{"id":"df708bab.348308","type":"function","z":"dca608c3.7d8af8","name":"case_artifact","func":"//Fires when a case_artifact is deleted\nreturn msg;","outputs":1,"noerr":0,"x":1150,"y":1200,"wires":[[]]},{"id":"e9a8650c.e20cc8","type":"function","z":"dca608c3.7d8af8","name":"case_task_log","func":"//Fires when a case_task_log is deleted\nreturn msg;","outputs":1,"noerr":0,"x":1160,"y":1240,"wires":[[]]},{"id":"5786cee2.98109","type":"function","z":"dca608c3.7d8af8","name":"alert","func":"//Fires when an alert is updated\nreturn msg;","outputs":1,"noerr":0,"x":1130,"y":940,"wires":[[]]},{"id":"44c595a4.45d45c","type":"change","z":"dca608c3.7d8af8","d":true,"name":"Convert Alert Msg to Artifacts","rules":[{"t":"move","p":"payload.object.artifacts","pt":"msg","to":"payload","tot":"msg"}],"action":"","property":"","from":"","to":"","reg":false,"x":1200,"y":520,"wires":[["6dcca25e.04bd2c"]]},{"id":"6dcca25e.04bd2c","type":"split","z":"dca608c3.7d8af8","name":"Split Artifacts","splt":"\\n","spltType":"str","arraySplt":1,"arraySpltType":"len","stream":false,"addname":"","x":1430,"y":520,"wires":[["767c84f2.c9ba2c"]]},{"id":"767c84f2.c9ba2c","type":"switch","z":"dca608c3.7d8af8","name":"alert: Run Analyzer","property":"payload.dataType","propertyType":"msg","rules":[{"t":"eq","v":"ip","vt":"str"},{"t":"eq","v":"domain","vt":"str"}],"checkall":"true","repair":false,"outputs":2,"x":1630,"y":400,"wires":[["eb8cfeb7.a7118","a5dd8a8a.065b88"],["a5dd8a8a.065b88","eb8cfeb7.a7118"]],"info":"# References\n\n\n"},{"id":"82ad0f08.7a53f","type":"http response","z":"dca608c3.7d8af8","name":"Ack Event Receipt","statusCode":"200","headers":{},"x":250,"y":940,"wires":[]},{"id":"a5dd8a8a.065b88","type":"function","z":"dca608c3.7d8af8","name":"Run Analyzer: CERT DNS","func":"msg.analyzer_id = \"4f28afc20d78f98df425e36e561af33f\";\n\nif (msg.payload.objectId) {\n msg.tag = \"case_artifact\"\n msg.artifact_id = msg.payload.objectId\n msg.url = msg.thehive_url + '/api/connector/cortex/job';\n msg.payload = {\n 'cortexId' : msg.cortex_id,\n 'artifactId': msg.artifact_id,\n 'analyzerId': msg.analyzer_id\n };\n}\nelse {\n msg.tag = \"observable\"\n msg.observable = msg.payload.data\n msg.dataType = msg.payload.dataType\n\n msg.url = msg.cortex_url + '/api/analyzer/' + msg.analyzer_id + '/run';\n msg.payload = {\n 'data' : msg.observable,\n 'dataType': msg.dataType \n };\n}\nreturn msg;","outputs":1,"noerr":0,"x":1930,"y":420,"wires":[["f050a09f.b2201"]]},{"id":"eb8cfeb7.a7118","type":"function","z":"dca608c3.7d8af8","name":"Run Analyzer: Urlscan","func":"msg.analyzer_id = \"54e51b62c6c8ddc3cbc3cbdd889a0557\";\n\nif (msg.payload.objectId) {\n msg.tag = \"case_artifact\"\n msg.artifact_id = msg.payload.objectId\n msg.url = msg.thehive_url + '/api/connector/cortex/job';\n msg.payload = {\n 'cortexId' : msg.cortex_id,\n 'artifactId': msg.artifact_id,\n 'analyzerId': msg.analyzer_id\n };\n}\nelse {\n msg.tag = \"observable\"\n msg.observable = msg.payload.data\n msg.dataType = msg.payload.dataType\n\n msg.url = msg.cortex_url + '/api/analyzer/' + msg.analyzer_id + '/run';\n msg.payload = {\n 'data' : msg.observable,\n 'dataType': msg.dataType \n };\n}\nreturn msg;","outputs":1,"noerr":0,"x":1920,"y":320,"wires":[["f050a09f.b2201"]]},{"id":"1c448528.3032fb","type":"http request","z":"dca608c3.7d8af8","name":"Submit to Cortex","method":"POST","ret":"obj","paytoqs":false,"url":"","tls":"4db74fa6.2556d","persist":false,"proxy":"","authType":"bearer","credentials": {"user": "", "password": "{{ CORTEXKEY }}"},"x":2450,"y":420,"wires":[["ea6614fb.752a78"]]},{"id":"ea6614fb.752a78","type":"debug","z":"dca608c3.7d8af8","name":"Debug","active":true,"tosidebar":true,"console":false,"tostatus":false,"complete":"true","targetType":"full","x":2670,"y":360,"wires":[]},{"id":"f050a09f.b2201","type":"switch","z":"dca608c3.7d8af8","name":"Cases vs Alerts","property":"tag","propertyType":"msg","rules":[{"t":"eq","v":"case_artifact","vt":"str"},{"t":"eq","v":"observable","vt":"str"}],"checkall":"true","repair":false,"outputs":2,"x":2200,"y":360,"wires":[["f7fca977.a73b28"],["1c448528.3032fb"]],"inputLabels":["Data"],"outputLabels":["Cases","Alerts"]},{"id":"f7fca977.a73b28","type":"http request","z":"dca608c3.7d8af8","name":"Submit to TheHive","method":"POST","ret":"obj","paytoqs":false,"url":"","tls":"4db74fa6.2556d","persist":false,"proxy":"","authType":"bearer","credentials": {"user": "", "password": "{{ HIVEKEY }}"},"x":2450,"y":280,"wires":[["ea6614fb.752a78"]]}]
diff --git a/salt/yum/etc/yum.conf.jinja b/salt/yum/etc/yum.conf.jinja
index 5e1d30510..506036421 100644
--- a/salt/yum/etc/yum.conf.jinja
+++ b/salt/yum/etc/yum.conf.jinja
@@ -1,3 +1,4 @@
+{% set proxy = salt['pillar.get']('manager:proxy') -%}
[main]
cachedir=/var/cache/yum/$basearch/$releasever
keepcache=0
@@ -11,7 +12,8 @@ installonly_limit={{ salt['pillar.get']('yum:config:installonly_limit', 2) }}
bugtracker_url=http://bugs.centos.org/set_project.php?project_id=23&ref=http://bugs.centos.org/bug_report_page.php?category=yum
distroverpkg=centos-release
clean_requirements_on_remove=1
-
-{% if (grains['role'] not in ['so-eval','so-managersearch', 'so-manager', 'so-standalone']) and salt['pillar.get']('global:managerupdate', '0') %}
+{% if (grains['role'] not in ['so-eval','so-managersearch', 'so-manager', 'so-standalone']) and salt['pillar.get']('global:managerupdate', '0') -%}
proxy=http://{{ salt['pillar.get']('yum:config:proxy', salt['config.get']('master')) }}:3142
+{% elif proxy -%}
+proxy={{ proxy }}
{% endif %}
diff --git a/setup/automation/standalone-net-centos-proxy b/setup/automation/standalone-net-centos-proxy
new file mode 100644
index 000000000..ee2504a98
--- /dev/null
+++ b/setup/automation/standalone-net-centos-proxy
@@ -0,0 +1,78 @@
+#!/bin/bash
+
+# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+TESTING=true
+
+# address_type=DHCP
+ADMINUSER=onionuser
+ADMINPASS1=onionuser
+ADMINPASS2=onionuser
+ALLOW_CIDR=0.0.0.0/0
+ALLOW_ROLE=a
+BASICZEEK=2
+BASICSURI=2
+# BLOGS=
+BNICS=eth1
+ZEEKVERSION=ZEEK
+# CURCLOSEDAYS=
+# EVALADVANCED=BASIC
+GRAFANA=1
+# HELIXAPIKEY=
+HNMANAGER=10.0.0.0/8,192.168.0.0/16,172.16.0.0/12
+HNSENSOR=inherit
+HOSTNAME=standalone
+install_type=STANDALONE
+# LSINPUTBATCHCOUNT=
+# LSINPUTTHREADS=
+# LSPIPELINEBATCH=
+# LSPIPELINEWORKERS=
+MANAGERADV=BASIC
+MANAGERUPDATES=1
+# MDNS=
+# MGATEWAY=
+# MIP=
+# MMASK=
+MNIC=eth0
+# MSEARCH=
+# MSRV=
+# MTU=
+NIDS=Suricata
+# NODE_ES_HEAP_SIZE=
+# NODE_LS_HEAP_SIZE=
+NODESETUP=NODEBASIC
+NSMSETUP=BASIC
+NODEUPDATES=MANAGER
+# OINKCODE=
+OSQUERY=1
+# PATCHSCHEDULEDAYS=
+# PATCHSCHEDULEHOURS=
+PATCHSCHEDULENAME=auto
+PLAYBOOK=1
+so_proxy=http://onionuser:0n10nus3r@10.66.166.30:3128
+# REDIRECTHOST=
+REDIRECTINFO=IP
+RULESETUP=ETOPEN
+# SHARDCOUNT=
+# SKIP_REBOOT=
+SOREMOTEPASS1=onionuser
+SOREMOTEPASS2=onionuser
+STRELKA=1
+THEHIVE=1
+WAZUH=1
+WEBUSER=onionuser@somewhere.invalid
+WEBPASSWD1=0n10nus3r
+WEBPASSWD2=0n10nus3r
diff --git a/setup/proxies/docker.conf b/setup/proxies/docker.conf
deleted file mode 100644
index 9ab2c4b4c..000000000
--- a/setup/proxies/docker.conf
+++ /dev/null
@@ -1,2 +0,0 @@
-[Service]
-ExecStart=/usr/bin/dockerd /usr/bin/dockerd -H fd:// --registry-mirror "$proxy_addr"
diff --git a/setup/so-functions b/setup/so-functions
index 06f845737..82da7c98d 100755
--- a/setup/so-functions
+++ b/setup/so-functions
@@ -535,6 +535,56 @@ collect_patch_schedule_name_import() {
done
}
+collect_proxy() {
+ [[ -n $TESTING ]] && return
+ collect_proxy_details
+ while ! proxy_validate; do
+ if whiptail_invalid_proxy; then
+ collect_proxy_details no_ask
+ else
+ so_proxy=""
+ break
+ fi
+ done
+}
+
+collect_proxy_details() {
+ local ask=${1:-true}
+ local use_proxy
+ if [[ $ask != true ]]; then
+ use_proxy=0
+ else
+ whiptail_proxy_ask
+ use_proxy=$?
+ fi
+
+ if [[ $use_proxy == 0 ]]; then
+ whiptail_proxy_addr "$proxy_addr"
+
+ while ! valid_proxy "$proxy_addr"; do
+ whiptail_invalid_input
+ whiptail_proxy_addr "$proxy_addr"
+ done
+
+ if whiptail_proxy_auth_ask; then
+ whiptail_proxy_auth_user "$proxy_user"
+ whiptail_proxy_auth_pass "$proxy_pass"
+
+ local url_prefixes=( 'http://' 'https://' )
+ for prefix in "${url_prefixes[@]}"; do
+ if echo "$proxy_addr" | grep -q "$prefix"; then
+ local proxy=${proxy_addr#"$prefix"}
+ so_proxy="${prefix}${proxy_user}:${proxy_pass}@${proxy}"
+ break
+ fi
+ done
+ else
+ so_proxy="$proxy_addr"
+ fi
+ export proxy
+ fi
+}
+
collect_redirect_host() {
whiptail_set_redirect_host "$HOSTNAME"
@@ -691,10 +741,10 @@ check_requirements() {
else
req_storage=100
fi
- if (( $(echo "$free_space_root < $req_storage" | bc -l) )); then
+ if [[ $free_space_root -lt $req_storage ]]; then
whiptail_storage_requirements "/" "${free_space_root} GB" "${req_storage} GB"
fi
- if (( $(echo "$free_space_nsm < $req_storage" | bc -l) )); then
+ if [[ $free_space_nsm -lt $req_storage ]]; then
whiptail_storage_requirements "/nsm" "${free_space_nsm} GB" "${req_storage} GB"
fi
else
@@ -703,7 +753,7 @@ check_requirements() {
else
req_storage=200
fi
- if (( $(echo "$free_space_root < $req_storage" | bc -l) )); then
+ if [[ $free_space_root -lt $req_storage ]]; then
whiptail_storage_requirements "/" "${free_space_root} GB" "${req_storage} GB"
fi
fi
@@ -743,12 +793,14 @@ check_sos_appliance() {
compare_main_nic_ip() {
if ! [[ $MNIC =~ ^(tun|wg|vpn).*$ ]]; then
if [[ "$MAINIP" != "$MNIC_IP" ]]; then
+ error "[ERROR] Main gateway ($MAINIP) does not match ip address of managament NIC ($MNIC_IP)."
+
read -r -d '' message <<- EOM
The IP being routed by Linux is not the IP address assigned to the management interface ($MNIC).
This is not a supported configuration, please remediate and rerun setup.
- EOM
- whiptail --title "Security Onion Setup" --msgbox "$message" 10 75
+ EOM
+ [[ -n $TESTING ]] || whiptail --title "Security Onion Setup" --msgbox "$message" 10 75
kill -SIGINT "$(ps --pid $$ -oppid=)"; exit 1
fi
else
@@ -939,36 +991,26 @@ detect_os() {
}
-installer_prereq_packages() {
+installer_progress_loop() {
+ local i=0
+ while true; do
+ [[ $i -lt 98 ]] && ((i++))
+ set_progress_str "$i" 'Checking that all required packages are installed and enabled...' nolog
+ [[ $i -gt 0 ]] && sleep 5s
+ done
+}
+installer_prereq_packages() {
if [ "$OS" == centos ]; then
- # Print message to stdout so the user knows setup is doing something
- echo "Installing required packages to run installer..."
- # Install bind-utils so the host command exists
if [[ ! $is_iso ]]; then
- if ! command -v host > /dev/null 2>&1; then
- yum -y install bind-utils >> "$setup_log" 2>&1
- fi
- if ! command -v nmcli > /dev/null 2>&1; then
- {
- yum -y install NetworkManager;
- systemctl enable NetworkManager;
- systemctl start NetworkManager;
- } >> "$setup_log" 2<&1
- fi
- if ! command -v bc > /dev/null 2>&1; then
- yum -y install bc >> "$setup_log" 2>&1
- fi
if ! yum versionlock > /dev/null 2>&1; then
- yum -y install yum-plugin-versionlock >> "$setup_log" 2>&1
+ yum -y install yum-plugin-versionlock >> "$setup_log" 2>&1
fi
- else
- logCmd "systemctl enable NetworkManager"
- logCmd "systemctl start NetworkManager"
- fi
+ fi
+ logCmd "systemctl enable NetworkManager"
+ logCmd "systemctl start NetworkManager"
elif [ "$OS" == ubuntu ]; then
# Print message to stdout so the user knows setup is doing something
- echo "Installing required packages to run installer..."
retry 50 10 "apt-get update" >> "$setup_log" 2>&1 || exit 1
# Install network manager so we can do interface stuff
if ! command -v nmcli > /dev/null 2>&1; then
@@ -978,7 +1020,7 @@ installer_prereq_packages() {
systemctl start NetworkManager
} >> "$setup_log" 2<&1
fi
- retry 50 10 "apt-get -y install bc curl" >> "$setup_log" 2>&1 || exit 1
+ retry 50 10 "apt-get -y install curl" >> "$setup_log" 2>&1 || exit 1
fi
}
@@ -1002,11 +1044,11 @@ disable_ipv6() {
sysctl -w net.ipv6.conf.all.disable_ipv6=1
sysctl -w net.ipv6.conf.default.disable_ipv6=1
} >> "$setup_log" 2>&1
- {
- echo "net.ipv6.conf.all.disable_ipv6 = 1"
- echo "net.ipv6.conf.default.disable_ipv6 = 1"
- echo "net.ipv6.conf.lo.disable_ipv6 = 1"
- } >> /etc/sysctl.conf
+ {
+ echo "net.ipv6.conf.all.disable_ipv6 = 1"
+ echo "net.ipv6.conf.default.disable_ipv6 = 1"
+ echo "net.ipv6.conf.lo.disable_ipv6 = 1"
+ } >> /etc/sysctl.conf
}
#disable_misc_network_features() {
@@ -1390,6 +1432,8 @@ install_cleanup() {
info "Removing so-setup permission entry from sudoers file"
sed -i '/so-setup/d' /etc/sudoers
fi
+
+ so-ssh-harden -q
}
import_registry_docker() {
@@ -1437,6 +1481,8 @@ manager_pillar() {
"manager:"\
" mainip: '$MAINIP'"\
" mainint: '$MNIC'"\
+ " proxy: '$so_proxy'"\
+ " no_proxy: '$no_proxy_string'"\
" esheap: '$ES_HEAP_SIZE'"\
" esclustername: '{{ grains.host }}'"\
" freq: 0"\
@@ -1451,7 +1497,6 @@ manager_pillar() {
printf '%s\n'\
" elastalert: 1"\
" es_port: $node_es_port"\
- " log_size_limit: $log_size_limit"\
" cur_close_days: $CURCLOSEDAYS"\
" grafana: $GRAFANA"\
" osquery: $OSQUERY"\
@@ -1517,7 +1562,6 @@ manager_global() {
" hnmanager: '$HNMANAGER'"\
" ntpserver: '$NTPSERVER'"\
" dockernet: '$DOCKERNET'"\
- " proxy: '$PROXY'"\
" mdengine: '$ZEEKVERSION'"\
" ids: '$NIDS'"\
" url_base: '$REDIRECTIT'"\
@@ -1700,7 +1744,6 @@ network_init() {
network_init_whiptail() {
case "$setup_type" in
'iso')
- collect_hostname
whiptail_management_nic
whiptail_dhcp_or_static
@@ -1714,7 +1757,6 @@ network_init_whiptail() {
'network')
whiptail_network_notice
whiptail_dhcp_warn
- collect_hostname
whiptail_management_nic
;;
esac
@@ -1782,6 +1824,22 @@ print_salt_state_apply() {
echo "Applying $state Salt state"
}
+proxy_validate() {
+ local test_url="https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS"
+ proxy_test_err=$(curl -sS "$test_url" --proxy "$so_proxy" 2>&1)
+ local ret=$?
+
+ if [[ $ret != 0 ]]; then
+ error "Could not reach $test_url using proxy $so_proxy"
+ error "Received error: $proxy_test_err"
+ if [[ -n $TESTING ]]; then
+ error "Exiting setup"
+ kill -SIGINT "$(ps --pid $$ -oppid=)"; exit 1
+ fi
+ fi
+ return $ret
+}
+
reserve_group_ids() {
# This is a hack to fix CentOS from taking group IDs that we need
groupadd -g 928 kratos
@@ -1875,6 +1933,24 @@ reinstall_init() {
} >> "$setup_log" 2>&1
}
+reset_proxy() {
+ [[ -f /etc/profile.d/so-proxy.sh ]] && rm -f /etc/profile.d/so-proxy.sh
+
+ [[ -f /etc/systemd/system/docker.service.d/http-proxy.conf ]] && rm -f /etc/systemd/system/docker.service.d/http-proxy.conf
+ systemctl daemon-reload
+ command -v docker &> /dev/null && systemctl restart docker
+
+ [[ -f /root/.docker/config.json ]] && rm -f /root/.docker/config.json
+
+ [[ -f /etc/gitconfig ]] && rm -f /etc/gitconfig
+
+ if [[ $OS == 'centos' ]]; then
+ sed -i "/proxy=/d" /etc/yum.conf
+ else
+ [[ -f /etc/apt/apt.conf.d/00-proxy.conf ]] && rm -f /etc/apt/apt.conf.d/00-proxy.conf
+ fi
+}
+
backup_dir() {
dir=$1
backup_suffix=$2
@@ -1968,6 +2044,7 @@ saltify() {
python36-dateutil\
python36-m2crypto\
python36-mysql\
+ python36-packaging\
yum-utils\
device-mapper-persistent-data\
lvm2\
@@ -2056,9 +2133,9 @@ saltify() {
retry 50 10 "apt-get -y install salt-minion=3002.5+ds-1 salt-common=3002.5+ds-1" >> "$setup_log" 2>&1 || exit 1
retry 50 10 "apt-mark hold salt-minion salt-common" >> "$setup_log" 2>&1 || exit 1
if [[ $OSVER != 'xenial' ]]; then
- retry 50 10 "apt-get -y install python3-pip python3-dateutil python3-m2crypto python3-mysqldb" >> "$setup_log" 2>&1 || exit 1
+ retry 50 10 "apt-get -y install python3-pip python3-dateutil python3-m2crypto python3-mysqldb python3-packaging" >> "$setup_log" 2>&1 || exit 1
else
- retry 50 10 "apt-get -y install python-pip python-dateutil python-m2crypto python-mysqldb" >> "$setup_log" 2>&1 || exit 1
+ retry 50 10 "apt-get -y install python-pip python-dateutil python-m2crypto python-mysqldb python-packaging" >> "$setup_log" 2>&1 || exit 1
fi
fi
}
@@ -2200,7 +2277,70 @@ set_main_ip() {
# Add /usr/sbin to everyone's path
set_path() {
- echo "complete -cf sudo" > /etc/profile.d/securityonion.sh
+ echo "complete -cf sudo" >> /etc/profile.d/securityonion.sh
+}
+
+set_proxy() {
+
+ # Don't proxy localhost, local ip, and management ip
+ no_proxy_string="localhost, 127.0.0.1, ${MAINIP}, ${HOSTNAME}"
+ if [[ -n $MSRV ]] && [[ -n $MSRVIP ]];then
+ no_proxy_string="${no_proxy_string}, ${MSRVIP}, ${MSRV}"
+ fi
+
+ # Set proxy environment variables used by curl, wget, docker, and others
+ {
+ echo "export use_proxy=on"
+ echo "export http_proxy=\"${so_proxy}\""
+ echo "export https_proxy=\"\$http_proxy\""
+ echo "export ftp_proxy=\"\$http_proxy\""
+ echo "export no_proxy=\"${no_proxy_string}\""
+ } > /etc/profile.d/so-proxy.sh
+
+ source /etc/profile.d/so-proxy.sh
+
+ [[ -d '/etc/systemd/system/docker.service.d' ]] || mkdir -p /etc/systemd/system/docker.service.d
+
+ # Create proxy config for dockerd
+ printf '%s\n'\
+ "[Service]"\
+ "Environment=\"HTTP_PROXY=${so_proxy}\""\
+ "Environment=\"HTTPS_PROXY=${so_proxy}\""\
+ "Environment=\"NO_PROXY=${no_proxy_string}\"" > /etc/systemd/system/docker.service.d/http-proxy.conf
+
+ systemctl daemon-reload
+ command -v docker &> /dev/null && systemctl restart docker
+
+ # Create config.json for docker containers
+ [[ -d /root/.docker ]] || mkdir /root/.docker
+ printf '%s\n'\
+ "{"\
+ " \"proxies\":"\
+ " {"\
+ " \"default\":"\
+ " {"\
+ " \"httpProxy\":\"${so_proxy}\","\
+ " \"httpsProxy\":\"${so_proxy}\","\
+ " \"ftpProxy\":\"${so_proxy}\","\
+ " \"noProxy\":\"${no_proxy_string}\""\
+ " }"\
+ " }"\
+ "}" > /root/.docker/config.json
+
+ # Set proxy for package manager
+ if [ "$OS" = 'centos' ]; then
+ echo "proxy=$so_proxy" >> /etc/yum.conf
+ else
+ # Set it up so the updates roll through the manager
+ printf '%s\n'\
+ "Acquire::http::Proxy \"$so_proxy\";"\
+ "Acquire::https::Proxy \"$so_proxy\";" > /etc/apt/apt.conf.d/00-proxy.conf
+ fi
+
+ # Set global git proxy
+ printf '%s\n'\
+ "[http]"\
+ " proxy = ${so_proxy}" > /etc/gitconfig
}
setup_salt_master_dirs() {
@@ -2231,6 +2371,7 @@ set_progress_str() {
local percentage_input=$1
progress_bar_text=$2
export progress_bar_text
+ local nolog=$2
if (( "$percentage_input" >= "$percentage" )); then
percentage="$percentage_input"
@@ -2240,12 +2381,14 @@ set_progress_str() {
echo -e "$percentage_str"
- info "Progressing ($percentage%): $progress_bar_text"
+ if [[ -z $nolog ]]; then
+ info "Progressing ($percentage%): $progress_bar_text"
- printf '%s\n' \
- '----'\
- "$percentage% - ${progress_bar_text^^}"\
- "----" >> "$setup_log" 2>&1
+ # printf '%s\n' \
+ # '----'\
+ # "$percentage% - ${progress_bar_text^^}"\
+ # "----" >> "$setup_log" 2>&1
+ fi
}
set_ssh_cmds() {
diff --git a/setup/so-setup b/setup/so-setup
index e9039f164..de0ffefd4 100755
--- a/setup/so-setup
+++ b/setup/so-setup
@@ -27,6 +27,8 @@ original_args=("$@")
cd "$(dirname "$0")" || exit 255
+echo "Getting started..."
+
# Source the generic function libraries that are also used by the product after
# setup. These functions are intended to be reusable outside of the setup process.
source ../salt/common/tools/sbin/so-common
@@ -93,12 +95,23 @@ if ! [ -f $install_opt_file ]; then
analyze_system
fi
+# Set up handler for setup to exit early (use `kill -SIGUSR1 "$setup_proc"; exit 1` in child scripts)
+trap 'catch $LINENO' SIGUSR1
+setup_proc="$$"
+catch() {
+ info "Fatal error occurred at $1 in so-setup, failing setup."
+ grep --color=never "ERROR" "$setup_log" > "$error_log"
+ whiptail_setup_failed
+ exit 1
+}
+
automated=no
-function progress() {
- local title='Security Onion Install'
+progress() {
+ local title='Security Onion Setup'
+ local msg=${1:-'Please wait while installing...'}
if [ $automated == no ]; then
- whiptail --title "$title" --gauge 'Please wait while installing...' 6 60 0 # append to text
+ whiptail --title "$title" --gauge "$msg" 6 70 0 # append to text
else
cat >> $setup_log 2>&1
fi
@@ -154,7 +167,6 @@ set_ssh_cmds $automated
local_sbin="$(pwd)/../salt/common/tools/sbin"
export PATH=$PATH:$local_sbin
-installer_prereq_packages && detect_cloud
set_network_dev_status_list
set_palette >> $setup_log 2>&1
@@ -190,19 +202,24 @@ if ! [[ -f $install_opt_file ]]; then
if [[ $setup_type == 'iso' ]] && [ "$automated" == no ]; then
whiptail_first_menu_iso
if [[ $option == "CONFIGURENETWORK" ]]; then
+ collect_hostname
network_init_whiptail
whiptail_management_interface_setup
network_init
printf '%s\n' \
"MNIC=$MNIC" \
"HOSTNAME=$HOSTNAME" > "$net_init_file"
+ set_main_ip >> $setup_log 2>&1
+ compare_main_nic_ip
+ reset_proxy >> $setup_log 2>&1
+ collect_proxy
+ [[ -n "$so_proxy" ]] && set_proxy >> $setup_log 2>&1
whiptail_net_setup_complete
else
- whiptail_install_type
+ true
fi
- else
- whiptail_install_type
fi
+ whiptail_install_type
else
source $install_opt_file
fi
@@ -275,25 +292,32 @@ if ! [[ -f $install_opt_file ]]; then
[[ -f $net_init_file ]] && whiptail_net_reinit && reinit_networking=true
if [[ $reinit_networking ]] || ! [[ -f $net_init_file ]]; then
+ collect_hostname
network_init_whiptail
else
source "$net_init_file"
fi
- if [[ $is_minion ]]; then
- collect_mngr_hostname
- fi
-
- if [[ $is_minion ]] || [[ $reinit_networking ]] || [[ $is_iso ]] && ! [[ -f $net_init_file ]]; then
- whiptail_management_interface_setup
- fi
if [[ $reinit_networking ]] || ! [[ -f $net_init_file ]]; then
network_init
fi
-
- if [[ -n "$TURBO" ]]; then
- use_turbo_proxy
+
+ set_main_ip >> $setup_log 2>&1
+ compare_main_nic_ip
+
+ if [[ $is_minion ]]; then
+ collect_mngr_hostname
+ fi
+
+ reset_proxy >> $setup_log 2>&1
+ if [[ -z $is_airgap ]]; then
+ collect_proxy
+ [[ -n "$so_proxy" ]] && set_proxy >> $setup_log 2>&1
+ fi
+
+ if [[ $is_minion ]] || [[ $reinit_networking ]] || [[ $is_iso ]] && ! [[ -f $net_init_file ]]; then
+ whiptail_management_interface_setup
fi
if [[ $is_minion ]]; then
@@ -312,6 +336,7 @@ if ! [[ -f $install_opt_file ]]; then
"HOSTNAME=$HOSTNAME" \
"MSRV=$MSRV" \
"MSRVIP=$MSRVIP" > "$install_opt_file"
+ [[ -n $so_proxy ]] && echo "so_proxy=$so_proxy" >> "$install_opt_file"
download_repo_tarball
exec bash /root/manager_setup/securityonion/setup/so-setup "${original_args[@]}"
fi
@@ -325,6 +350,22 @@ else
rm -rf $install_opt_file >> "$setup_log" 2>&1
fi
+percentage=0
+{
+ installer_progress_loop & # Run progress bar to 98 in ~8 minutes while waiting for package installs
+ progress_bg_proc=$!
+ installer_prereq_packages
+ install_success=$?
+ kill -9 "$progress_bg_proc"
+ wait "$progress_bg_proc" &> /dev/null # Kill just sends signal, redirect output of wait to catch stdout
+ if [[ $install_success -gt 0 ]]; then
+ echo "Could not install packages required for setup, exiting now." >> "$setup_log" 2>&1
+ kill -SIGUSR1 "$setup_proc"; exit 1
+ fi
+ set_progress_str 99 "Detecting whether setup is running in the cloud." nolog
+ detect_cloud
+} | progress '...'
+
short_name=$(echo "$HOSTNAME" | awk -F. '{print $1}')
MINION_ID=$(echo "${short_name}_${install_type}" | tr '[:upper:]' '[:lower:]')
@@ -338,14 +379,14 @@ minion_type=$(get_minion_type)
set_default_log_size >> $setup_log 2>&1
if [[ $is_helix ]]; then
- RULESETUP=${RULESETUP:-ETOPEN}
+ RULESETUP=${RULESETUP:-ETOPEN}
NSMSETUP=${NSMSETUP:-BASIC}
HNSENSOR=${HNSENSOR:-inherit}
MANAGERUPDATES=${MANAGERUPDATES:-0}
fi
if [[ $is_helix || ( $is_manager && $is_node ) ]]; then
- RULESETUP=${RULESETUP:-ETOPEN}
+ RULESETUP=${RULESETUP:-ETOPEN}
NSMSETUP=${NSMSETUP:-BASIC}
fi
@@ -365,7 +406,7 @@ fi
if [[ $is_import ]]; then
PATCHSCHEDULENAME=${PATCHSCHEDULENAME:-auto}
MTU=${MTU:-1500}
- RULESETUP=${RULESETUP:-ETOPEN}
+ RULESETUP=${RULESETUP:-ETOPEN}
NSMSETUP=${NSMSETUP:-BASIC}
HNSENSOR=${HNSENSOR:-inherit}
MANAGERUPDATES=${MANAGERUPDATES:-0}
@@ -529,21 +570,10 @@ whiptail_make_changes
# From here on changes will be made.
echo "1" > /root/accept_changes
-# Set up handler for setup to exit early (use `kill -SIGUSR1 "$(ps --pid $$ -oppid=)"; exit 1` in child scripts)
-trap 'catch $LINENO' SIGUSR1
-
-catch() {
- info "Fatal error occurred at $1 in so-setup, failing setup."
- grep --color=never "ERROR" "$setup_log" > "$error_log"
- whiptail_setup_failed
- exit
-}
-
# This block sets REDIRECTIT which is used by a function outside the below subshell
-set_main_ip >> $setup_log 2>&1
-compare_main_nic_ip
set_redirect >> $setup_log 2>&1
+
# Begin install
{
# Set initial percentage to 0
diff --git a/setup/so-whiptail b/setup/so-whiptail
index b4f81eb15..9753cc290 100755
--- a/setup/so-whiptail
+++ b/setup/so-whiptail
@@ -588,8 +588,21 @@ whiptail_invalid_input() { # TODO: This should accept a list of arguments to spe
}
+whiptail_invalid_proxy() {
+ [ -n "$TESTING" ] && return
+
+ local message
+ read -r -d '' message <<- EOM
+ Could not reach test url using proxy ${proxy_addr}.
+
+ Error was: ${proxy_test_err}
+ EOM
+
+ whiptail --title "Security Onion Setup" --yesno "$message" --yes-button "Enter Again" --no-button "Skip" 11 60
+}
+
whiptail_invalid_string() {
- [ -n "$TESTING" ] && return
+ [ -n "$TESTING" ] && return
whiptail --title "Security Onion Setup" --msgbox "Invalid input, please try again.\n\nThe $1 cannot contain spaces." 9 45
@@ -1216,6 +1229,58 @@ whiptail_patch_schedule_select_hours() {
}
+whiptail_proxy_ask() {
+ [ -n "$TESTING" ] && return
+
+ whiptail --title "Security Onion Setup" --yesno "Do you want to set a proxy server for this installation?" 7 60
+}
+
+whiptail_proxy_addr() {
+ [ -n "$TESTING" ] && return
+
+ local message
+ read -r -d '' message <<- EOM
+ Please input the proxy server you wish to use, including the URL prefix (ex: https://your.proxy.com:1234).
+
+ If your proxy requires a username and password do not include them in your input. Setup will ask for those values next.
+ EOM
+
+ proxy_addr=$(whiptail --title "Security Onion Setup" --inputbox "$message" 13 60 "$1" 3>&1 1>&2 2>&3)
+
+ local exitstatus=$?
+ whiptail_check_exitstatus $exitstatus
+}
+
+whiptail_proxy_auth_ask() {
+ [ -n "$TESTING" ] && return
+
+ whiptail --title "Security Onion Setup" --yesno "Does your proxy require authentication?" 7 60
+}
+
+whiptail_proxy_auth_user() {
+ [ -n "$TESTING" ] && return
+
+ proxy_user=$(whiptail --title "Security Onion Setup" --inputbox "Please input the proxy user:" 8 60 "$1" 3>&1 1>&2 2>&3)
+
+ local exitstatus=$?
+ whiptail_check_exitstatus $exitstatus
+}
+
+whiptail_proxy_auth_pass() {
+ local arg=$1
+
+ [ -n "$TESTING" ] && return
+
+ if [[ $arg != 'confirm' ]]; then
+ proxy_pass=$(whiptail --title "Security Onion Setup" --passwordbox "Please input the proxy password:" 8 60 3>&1 1>&2 2>&3)
+ else
+ proxy_pass_confirm=$(whiptail --title "Security Onion Setup" --passwordbox "Please confirm the proxy password:" 8 60 3>&1 1>&2 2>&3)
+ fi
+
+ local exitstatus=$?
+ whiptail_check_exitstatus $exitstatus
+}
+
whiptail_requirements_error() {
local requirement_needed=$1