From f7df14b48f3f1945fe9540920080ee1fad4c6647 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 08:58:51 -0500 Subject: [PATCH 01/12] add os patch scheduling options to the network install script - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 107 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) diff --git a/so-setup-network.sh b/so-setup-network.sh index 6e54b8ce4..23cde3a5b 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -619,6 +619,42 @@ node_pillar() { } +patch_pillar() { + OSPATCHPILLARDIR="$TMP/patch/os" + OSPATCHPILLAR="$OSPATCHPILLARDIR/$MINION_ID.sls" + + if [ ! -d $OSPATCHPILLARDIR ] ; then + mkdir -p $OSPATCHPILLARDIR + fi + touch $OSPATCHPILLAR + echo "patch:" > $OSPATCHPILLAR + case $PATCHSCHEDULE in + Scheduled) + echo " os:" >> $OSPATCHPILLAR + echo " schedule:" >> $OSPATCHPILLAR + for psd in "${PATCHSCHEDULEDAYS[@]}" + do + psd=$(echo $psd | sed 's/"//g') + echo " - $psd:" >> $OSPATCHPILLAR + for psh in "${PATCHSCHEDULEHOURS[@]}" + do + psh=$(echo $psh | sed 's/"//g') + echo " - $psh" >> $OSPATCHPILLAR + done + done + ;; + Automatic) + echo " os:" >> $OSPATCHPILLAR + echo " schedule: auto" >> $OSPATCHPILLAR + ;; + Manual) + echo " os:" >> $OSPATCHPILLAR + echo " schedule: manual" >> $OSPATCHPILLAR + ;; + esac + +} + process_components() { CLEAN=${COMPONENTS//\"} GRAFANA=0 @@ -1469,6 +1505,69 @@ whiptail_passwords_dont_match() { } +whiptail_patch_schedule() { + + # What kind of patch schedule are we doing? + PATCHSCHEDULE=$(whiptail --title "Security Onion Setup" --radiolist \ + "Choose OS patch schedule. This will NOT update Security Onion related tools such as Zeek, Elasticsearch, Kibana, SaltStack, etc." 25 75 5 \ + "Automatic" "Package updates will be installed automatically" ON \ + "Manual" "Package updates will need to be installed manually" OFF \ + "Scheduled" "Select a schedule on the following screen" OFF 3>&1 1>&2 2>&3 ) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus + +} + +whiptail_patch_schedule_select_days() { + # Select the days to patch + PATCHSCHEDULEDAYS=($(whiptail --title "Security Onion Setup" --checklist \ + "Which days do you want to apply OS patches?" 20 55 9 \ + "Monday" "" OFF \ + "Tuesday" "" ON \ + "Wednesday" "" OFF \ + "Thursday" "" OFF \ + "Friday" "" OFF \ + "Saturday" "" OFF \ + "Sunday" "" OFF 3>&1 1>&2 2>&3 )) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus +} + +whiptail_patch_schedule_select_hours() { + # Select the hours to patch + PATCHSCHEDULEHOURS=($(whiptail --title "Security Onion Setup" --checklist \ + "At which time, UTC, do you want to apply OS patches on the selected days?" 35 55 26 \ + "00:00" "" OFF \ + "01:00" "" OFF \ + "02:00" "" OFF \ + "03:00" "" OFF \ + "04:00" "" OFF \ + "05:00" "" OFF \ + "06:00" "" OFF \ + "07:00" "" OFF \ + "08:00" "" OFF \ + "09:00" "" OFF \ + "10:00" "" OFF \ + "11:00" "" OFF \ + "12:00" "" OFF \ + "13:00" "" OFF \ + "14:00" "" OFF \ + "15:00" "" ON \ + "16:00" "" OFF \ + "17:00" "" OFF \ + "18:00" "" OFF \ + "19:00" "" OFF \ + "20:00" "" OFF \ + "21:00" "" OFF \ + "22:00" "" OFF \ + "23:00" "" OFF 3>&1 1>&2 2>&3 )) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus +} + whiptail_rule_setup() { # Get pulled pork info @@ -1609,6 +1708,14 @@ if (whiptail_you_sure); then # What kind of install are we doing? whiptail_install_type + # How do we want to handle OS patching? manual, auto or scheduled days and hours + whiptail_patch_schedule + if [[ $PATCHSCHEDULE == "Scheduled" ]] ; then + whiptail_patch_schedule_select_days + whiptail_patch_schedule_select_hours + fi + patch_pillar + #################### ## Master ## #################### From 5e5d0d616cdd3e8e2ef07202a6d677fccfa6d85f Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 14:22:37 -0500 Subject: [PATCH 02/12] copy_minion_pillar replaced with copy_minion_pillar to simplify pillar copying, os patch pillar written - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 108 +++++++++++++++++++++++++------------------- 1 file changed, 62 insertions(+), 46 deletions(-) diff --git a/so-setup-network.sh b/so-setup-network.sh index 23cde3a5b..bed63a0c6 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -268,14 +268,15 @@ copy_master_config() { } -copy_minion_pillar() { +copy_minion_pillars() { - # Pass the type so it knows where to copy the pillar - local TYPE=$1 - - # Copy over the pillar - echo "Copying the pillar over" >> $SETUPLOG 2>&1 - scp -v -i /root/.ssh/so.key $TMP/$MINION_ID.sls socore@$MSRV:/opt/so/saltstack/pillar/$TYPE/$MINION_ID.sls + if [ $INSTALLTYPE == 'MASTERONLY' ] || [ $INSTALLTYPE == 'EVALMODE' ]; then + echo "rsyncing TMP pillar files to pillar base" >> $SETUPLOG 2>&1 + rsync -a -v $TMP/pillar/ /opt/so/saltstack/pillar/ >> $SETUPLOG 2>&1 + else + echo "scp TMP pillar files to pillar base on master" >> $SETUPLOG 2>&1 + scp -prv -i /root/.ssh/so.key $TMP/pillar socore@$MSRV:/opt/so/saltstack/pillar >> $SETUPLOG 2>&1 + fi } @@ -599,28 +600,33 @@ minio_generate_keys() { node_pillar() { + NODEPILLARPATH=$TMP/pillar/nodes + if [ ! -d $NODEPILLARPATH ]; then + mkdir -p $NODEPILLARPATH + fi + # Create the node pillar - touch $TMP/$MINION_ID.sls - echo "node:" > $TMP/$MINION_ID.sls - echo " mainip: $MAINIP" >> $TMP/$MINION_ID.sls - echo " mainint: $MAININT" >> $TMP/$MINION_ID.sls - echo " esheap: $NODE_ES_HEAP_SIZE" >> $TMP/$MINION_ID.sls - echo " esclustername: {{ grains.host }}" >> $TMP/$MINION_ID.sls - echo " lsheap: $NODE_LS_HEAP_SIZE" >> $TMP/$MINION_ID.sls - echo " ls_pipeline_workers: $LSPIPELINEWORKERS" >> $TMP/$MINION_ID.sls - echo " ls_pipeline_batch_size: $LSPIPELINEBATCH" >> $TMP/$MINION_ID.sls - echo " ls_input_threads: $LSINPUTTHREADS" >> $TMP/$MINION_ID.sls - echo " ls_batch_count: $LSINPUTBATCHCOUNT" >> $TMP/$MINION_ID.sls - echo " es_shard_count: $SHARDCOUNT" >> $TMP/$MINION_ID.sls - echo " node_type: $NODETYPE" >> $TMP/$MINION_ID.sls - echo " es_port: $NODE_ES_PORT" >> $TMP/$MINION_ID.sls - echo " log_size_limit: $LOG_SIZE_LIMIT" >> $TMP/$MINION_ID.sls - echo " cur_close_days: $CURCLOSEDAYS" >> $TMP/$MINION_ID.sls + touch $NODEPILLARPATH/$MINION_ID.sls + echo "node:" > $NODEPILLARPATH/$MINION_ID.sls + echo " mainip: $MAINIP" >> $NODEPILLARPATH/$MINION_ID.sls + echo " mainint: $MAININT" >> $NODEPILLARPATH/$MINION_ID.sls + echo " esheap: $NODE_ES_HEAP_SIZE" >> $NODEPILLARPATH/$MINION_ID.sls + echo " esclustername: {{ grains.host }}" >> $NODEPILLARPATH/$MINION_ID.sls + echo " lsheap: $NODE_LS_HEAP_SIZE" >> $NODEPILLARPATH/$MINION_ID.sls + echo " ls_pipeline_workers: $LSPIPELINEWORKERS" >> $NODEPILLARPATH/$MINION_ID.sls + echo " ls_pipeline_batch_size: $LSPIPELINEBATCH" >> $NODEPILLARPATH/$MINION_ID.sls + echo " ls_input_threads: $LSINPUTTHREADS" >> $NODEPILLARPATH/$MINION_ID.sls + echo " ls_batch_count: $LSINPUTBATCHCOUNT" >> $NODEPILLARPATH/$MINION_ID.sls + echo " es_shard_count: $SHARDCOUNT" >> $NODEPILLARPATH/$MINION_ID.sls + echo " node_type: $NODETYPE" >> $NODEPILLARPATH/$MINION_ID.sls + echo " es_port: $NODE_ES_PORT" >> $NODEPILLARPATH/$MINION_ID.sls + echo " log_size_limit: $LOG_SIZE_LIMIT" >> $NODEPILLARPATH/$MINION_ID.sls + echo " cur_close_days: $CURCLOSEDAYS" >> $NODEPILLARPATH/$MINION_ID.sls } patch_pillar() { - OSPATCHPILLARDIR="$TMP/patch/os" + OSPATCHPILLARDIR="$TMP/pillar/patch/os" OSPATCHPILLAR="$OSPATCHPILLARDIR/$MINION_ID.sls" if [ ! -d $OSPATCHPILLARDIR ] ; then @@ -962,37 +968,42 @@ salt_master_directories() { sensor_pillar() { + SENSORPILLARPATH=$TMP/pillar/sensors + if [ ! -d $SENSORPILLARPATH ]; then + mkdir -p $SENSORPILLARPATH + fi + # Create the sensor pillar - touch $TMP/$MINION_ID.sls - echo "sensor:" > $TMP/$MINION_ID.sls - echo " interface: bond0" >> $TMP/$MINION_ID.sls - echo " mainip: $MAINIP" >> $TMP/$MINION_ID.sls - echo " mainint: $MAININT" >> $TMP/$MINION_ID.sls + touch $SENSORPILLARPATH/$MINION_ID.sls + echo "sensor:" > $SENSORPILLARPATH/$MINION_ID.sls + echo " interface: bond0" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " mainip: $MAINIP" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " mainint: $MAININT" >> $SENSORPILLARPATH/$MINION_ID.sls if [ $NSMSETUP == 'ADVANCED' ]; then - echo " bro_pins:" >> $TMP/$MINION_ID.sls + echo " bro_pins:" >> $SENSORPILLARPATH/$MINION_ID.sls for PIN in $BROPINS; do PIN=$(echo $PIN | cut -d\" -f2) - echo " - $PIN" >> $TMP/$MINION_ID.sls + echo " - $PIN" >> $SENSORPILLARPATH/$MINION_ID.sls done - echo " suripins:" >> $TMP/$MINION_ID.sls + echo " suripins:" >> $SENSORPILLARPATH/$MINION_ID.sls for SPIN in $SURIPINS; do SPIN=$(echo $SPIN | cut -d\" -f2) - echo " - $SPIN" >> $TMP/$MINION_ID.sls + echo " - $SPIN" >> $SENSORPILLARPATH/$MINION_ID.sls done else - echo " bro_lbprocs: $BASICBRO" >> $TMP/$MINION_ID.sls - echo " suriprocs: $BASICSURI" >> $TMP/$MINION_ID.sls + echo " bro_lbprocs: $BASICBRO" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " suriprocs: $BASICSURI" >> $SENSORPILLARPATH/$MINION_ID.sls fi - echo " brobpf:" >> $TMP/$MINION_ID.sls - echo " pcapbpf:" >> $TMP/$MINION_ID.sls - echo " nidsbpf:" >> $TMP/$MINION_ID.sls - echo " master: $MSRV" >> $TMP/$MINION_ID.sls - echo " mtu: $MTU" >> $TMP/$MINION_ID.sls + echo " brobpf:" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " pcapbpf:" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " nidsbpf:" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " master: $MSRV" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " mtu: $MTU" >> $SENSORPILLARPATH/$MINION_ID.sls if [ $HNSENSOR != 'inherit' ]; then - echo " hnsensor: $HNSENSOR" >> $TMP/$MINION_ID.sls + echo " hnsensor: $HNSENSOR" >> $SENSORPILLARPATH/$MINION_ID.sls fi - echo " access_key: $ACCESS_KEY" >> $TMP/$MINION_ID.sls - echo " access_secret: $ACCESS_SECRET" >> $TMP/$MINION_ID.sls + echo " access_key: $ACCESS_KEY" >> $SENSORPILLARPATH/$MINION_ID.sls + echo " access_secret: $ACCESS_SECRET" >> $SENSORPILLARPATH/$MINION_ID.sls } @@ -1811,6 +1822,8 @@ if (whiptail_you_sure); then echo "** Generating the master pillar **" >> $SETUPLOG master_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n30\nAccepting Salt Keys... \nXXX" + echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" + copy_minion_pillars >> $SETUPLOG 2>&1 # Do a checkin to push the key up echo "** Pushing the key up to Master **" >> $SETUPLOG salt_firstcheckin >> $SETUPLOG 2>&1 @@ -1931,8 +1944,8 @@ if (whiptail_you_sure); then docker_install >> $SETUPLOG 2>&1 echo -e "XXX\n22\nConfiguring Salt Minion... \nXXX" configure_minion sensor >> $SETUPLOG 2>&1 - echo -e "XXX\n24\nCopying Sensor Pillar to Master... \nXXX" - copy_minion_pillar sensors >> $SETUPLOG 2>&1 + echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" + copy_minion_pillars >> $SETUPLOG 2>&1 echo -e "XXX\n25\nSending Salt Key to Master... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 echo -e "XXX\n26\nTelling the Master to Accept Key... \nXXX" @@ -2044,6 +2057,8 @@ if (whiptail_you_sure); then node_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n8\nCreating firewall policies... \nXXX" set_initial_firewall_policy >> $SETUPLOG 2>&1 + echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" + copy_minion_pillars >> $SETUPLOG 2>&1 echo -e "XXX\n10\nRegistering agent... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 echo -e "XXX\n11\nAccepting Agent... \nXXX" @@ -2185,7 +2200,8 @@ if (whiptail_you_sure); then configure_minion node >> $SETUPLOG 2>&1 set_node_type >> $SETUPLOG 2>&1 node_pillar >> $SETUPLOG 2>&1 - copy_minion_pillar nodes >> $SETUPLOG 2>&1 + echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" + copy_minion_pillars >> $SETUPLOG 2>&1 echo -e "XXX\n35\nSending and Accepting Salt Key... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 # Accept the Salt Key From 21494ab1ffd32a801cd261be95fd5ec6c5cec0d0 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 16:01:12 -0500 Subject: [PATCH 03/12] install package python-dateutil during setup to help with job scheduling - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/so-setup-network.sh b/so-setup-network.sh index bed63a0c6..38f5b7e44 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -833,7 +833,7 @@ EOF fi yum clean expire-cache - yum -y install salt-minion-2018.3.4 yum-utils device-mapper-persistent-data lvm2 openssl + yum -y install salt-minion-2018.3.4 yum-utils device-mapper-persistent-data lvm2 openssl python-dateutil yum -y update exclude=salt* systemctl enable salt-minion @@ -882,7 +882,7 @@ EOF # Initialize the new repos apt-get update >> $SETUPLOG 2>&1 - apt-get -y install salt-minion=2018.3.4+ds-1 salt-common=2018.3.4+ds-1 python-m2crypto >> $SETUPLOG 2>&1 + apt-get -y install salt-minion=2018.3.4+ds-1 salt-common=2018.3.4+ds-1 python-m2cryptoi python-dateutil >> $SETUPLOG 2>&1 apt-mark hold salt-minion salt-common else @@ -896,7 +896,7 @@ EOF echo "deb https://packages.wazuh.com/3.x/apt/ stable main" | tee /etc/apt/sources.list.d/wazuh.list # Initialize the new repos apt-get update >> $SETUPLOG 2>&1 - apt-get -y install salt-minion=2018.3.4+ds-1 salt-common=2018.3.4+ds-1 python-m2crypto >> $SETUPLOG 2>&1 + apt-get -y install salt-minion=2018.3.4+ds-1 salt-common=2018.3.4+ds-1 python-m2crypto python-dateutil >> $SETUPLOG 2>&1 apt-mark hold salt-minion salt-common fi From e6421f45fb3ae862a115ce419d3bdf512c3e902b Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 17:22:27 -0500 Subject: [PATCH 04/12] add state to add os patch schedule and state to apply patches - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- pillar/top.sls | 3 +++ salt/patch/os.sls | 4 ++++ salt/patch/schedule/os.sls | 32 ++++++++++++++++++++++++++++++++ salt/top.sls | 3 +++ 4 files changed, 42 insertions(+) create mode 100644 salt/patch/os.sls create mode 100644 salt/patch/schedule/os.sls diff --git a/pillar/top.sls b/pillar/top.sls index 031352a11..13ea5e5a8 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -1,4 +1,7 @@ base: + '*': + - patch.os.{{ grains.id }} + 'G@role:so-sensor': - sensors.{{ grains.id }} - static diff --git a/salt/patch/os.sls b/salt/patch/os.sls new file mode 100644 index 000000000..d3ae6a1ff --- /dev/null +++ b/salt/patch/os.sls @@ -0,0 +1,4 @@ +patch_os: + pkg.uptodate: + - name: patch_os + - refresh: True diff --git a/salt/patch/schedule/os.sls b/salt/patch/schedule/os.sls new file mode 100644 index 000000000..6056f8ee3 --- /dev/null +++ b/salt/patch/schedule/os.sls @@ -0,0 +1,32 @@ +{% if salt['pillar.get']('patch:os:schedule') != 'manual' and salt['pillar.get']('patch:os:schedule') != 'auto' %} + +patch_os_schedule: + schedule.present: + - function: state.sls + - job_args: + - patch.os + - when: + {% for day in pillar['patch']['os']['schedule'] %} + {% for day, time in day.iteritems() %} + {% for each_time in time %} + - {{day}} {{each_time}} + {% endfor %} + {% endfor %} + {% endfor %} + - splay: + start: 5 + end: 10 + +{% elif salt['pillar.get']('patch:os:schedule') == 'auto' %} + +patch_os_schedule: + schedule.present: + - function: state.sls + - job_args: + - patch.os + - minutes: 20 + - splay: + start: 150 + end: 300 + +{% endif %} diff --git a/salt/top.sls b/salt/top.sls index cf5d47699..08d82285c 100644 --- a/salt/top.sls +++ b/salt/top.sls @@ -5,6 +5,9 @@ {%- set THEHIVE = salt['pillar.get']('master:thehive', '0') -%} {%- set PLAYBOOK = salt['pillar.get']('master:playbook', '0') -%} base: + '*': + - patch.schedule.os + 'G@role:so-sensor': - ca - ssl From c96678f5a0b87530e12d23407b20c7f279fb4865 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 17:34:36 -0500 Subject: [PATCH 05/12] clean up variable name - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- salt/patch/schedule/os.sls | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/salt/patch/schedule/os.sls b/salt/patch/schedule/os.sls index 6056f8ee3..ebb84ad28 100644 --- a/salt/patch/schedule/os.sls +++ b/salt/patch/schedule/os.sls @@ -6,10 +6,10 @@ patch_os_schedule: - job_args: - patch.os - when: - {% for day in pillar['patch']['os']['schedule'] %} - {% for day, time in day.iteritems() %} - {% for each_time in time %} - - {{day}} {{each_time}} + {% for days in pillar['patch']['os']['schedule'] %} + {% for day, times in days.iteritems() %} + {% for time in times %} + - {{day}} {{time}} {% endfor %} {% endfor %} {% endfor %} From 9c83cceba15dff532566cb485f2cbd569265438d Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 5 Nov 2019 18:02:17 -0500 Subject: [PATCH 06/12] put quotes around hours to interpret properly for 24h - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/so-setup-network.sh b/so-setup-network.sh index 38f5b7e44..57280473a 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -645,7 +645,7 @@ patch_pillar() { for psh in "${PATCHSCHEDULEHOURS[@]}" do psh=$(echo $psh | sed 's/"//g') - echo " - $psh" >> $OSPATCHPILLAR + echo " - '$psh'" >> $OSPATCHPILLAR done done ;; From fa87308baca32d1a66e431209f679163543d3ce6 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Wed, 6 Nov 2019 07:47:00 -0500 Subject: [PATCH 07/12] change splay for scheduled pkg update - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- salt/patch/schedule/os.sls | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/patch/schedule/os.sls b/salt/patch/schedule/os.sls index ebb84ad28..a041afeea 100644 --- a/salt/patch/schedule/os.sls +++ b/salt/patch/schedule/os.sls @@ -14,8 +14,8 @@ patch_os_schedule: {% endfor %} {% endfor %} - splay: - start: 5 - end: 10 + start: 30 + end: 120 {% elif salt['pillar.get']('patch:os:schedule') == 'auto' %} From 9914e55ec3248b7f367085bab97f2fe99c6d7947 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Thu, 7 Nov 2019 09:49:36 -0500 Subject: [PATCH 08/12] rework of os patch scheduling, added the abilty to enable/disable and adjust splay - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- pillar/top.sls | 3 - salt/patch/{os.sls => os/init.sls} | 0 salt/patch/os/schedule.sls | 76 ++++++++++ salt/patch/os/schedules/example_schedule.yml | 10 ++ salt/patch/schedule/os.sls | 32 ----- salt/top.sls | 2 +- so-setup-network.sh | 139 ++++++++++++++----- 7 files changed, 188 insertions(+), 74 deletions(-) rename salt/patch/{os.sls => os/init.sls} (100%) create mode 100644 salt/patch/os/schedule.sls create mode 100644 salt/patch/os/schedules/example_schedule.yml delete mode 100644 salt/patch/schedule/os.sls diff --git a/pillar/top.sls b/pillar/top.sls index 13ea5e5a8..031352a11 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -1,7 +1,4 @@ base: - '*': - - patch.os.{{ grains.id }} - 'G@role:so-sensor': - sensors.{{ grains.id }} - static diff --git a/salt/patch/os.sls b/salt/patch/os/init.sls similarity index 100% rename from salt/patch/os.sls rename to salt/patch/os/init.sls diff --git a/salt/patch/os/schedule.sls b/salt/patch/os/schedule.sls new file mode 100644 index 000000000..9ea98ede7 --- /dev/null +++ b/salt/patch/os/schedule.sls @@ -0,0 +1,76 @@ +{% if salt['pillar.get']('patch:os:schedule_name') %} + {% set patch_os_pillar = salt['pillar.get']('patch:os') %} + {% set schedule_name = patch_os_pillar.schedule_name %} + {% set splay = patch_os_pillar.get('splay', 300) %} + + {% if schedule_name != 'manual' and schedule_name != 'auto' %} + {% import_yaml "patch/os/schedules/"~schedule_name~".yml" as os_schedule %} + + {% if patch_os_pillar.enabled %} + +patch_os_schedule: + schedule.present: + - function: state.sls + - job_args: + - patch.os + - when: + {% for days in os_schedule.patch.os.schedule %} + {% for day, times in days.iteritems() %} + {% for time in times %} + - {{day}} {{time}} + {% endfor %} + {% endfor %} + {% endfor %} + - splay: {{splay}} + - return_job: True + + {% else %} + +disable_patch_os_schedule: + schedule.disabled: + - name: patch_os_schedule + + {% endif %} + + + {% elif schedule_name == 'auto' %} + + {% if patch_os_pillar.enabled %} + +patch_os_schedule: + schedule.present: + - function: state.sls + - job_args: + - patch.os + - minutes: 1 + - splay: {{splay}} + - return_job: True + + {% else %} + +disable_patch_os_schedule: + schedule.disabled: + - name: patch_os_schedule + + {% endif %} + + {% elif schedule_name == 'manual' %} + +remove_patch_os_schedule: + schedule.absent: + - name: patch_os_schedule + + {% endif %} + +{% else %} + +no_os_patch_schedule_name_set: + test.fail_without_changes: + - name: "Set a pillar value for patch:os:schedule_name in this minion's .sls file. If an OS patch schedule is not listed as enabled in show_schedule output below, then OS patches will need to be applied manually until this is corrected." + +show_schedule: + module.run: + - name: schedule.is_enabled + - m_name: patch_os_schedule + +{% endif %} diff --git a/salt/patch/os/schedules/example_schedule.yml b/salt/patch/os/schedules/example_schedule.yml new file mode 100644 index 000000000..b2748ab09 --- /dev/null +++ b/salt/patch/os/schedules/example_schedule.yml @@ -0,0 +1,10 @@ +patch: + os: + schedule: + - Tuesday: + - '15:00' + - Thursday: + - '03:00' + - Saturday: + - '01:00' + - '15:00' diff --git a/salt/patch/schedule/os.sls b/salt/patch/schedule/os.sls deleted file mode 100644 index a041afeea..000000000 --- a/salt/patch/schedule/os.sls +++ /dev/null @@ -1,32 +0,0 @@ -{% if salt['pillar.get']('patch:os:schedule') != 'manual' and salt['pillar.get']('patch:os:schedule') != 'auto' %} - -patch_os_schedule: - schedule.present: - - function: state.sls - - job_args: - - patch.os - - when: - {% for days in pillar['patch']['os']['schedule'] %} - {% for day, times in days.iteritems() %} - {% for time in times %} - - {{day}} {{time}} - {% endfor %} - {% endfor %} - {% endfor %} - - splay: - start: 30 - end: 120 - -{% elif salt['pillar.get']('patch:os:schedule') == 'auto' %} - -patch_os_schedule: - schedule.present: - - function: state.sls - - job_args: - - patch.os - - minutes: 20 - - splay: - start: 150 - end: 300 - -{% endif %} diff --git a/salt/top.sls b/salt/top.sls index 08d82285c..f742a66cf 100644 --- a/salt/top.sls +++ b/salt/top.sls @@ -6,7 +6,7 @@ {%- set PLAYBOOK = salt['pillar.get']('master:playbook', '0') -%} base: '*': - - patch.schedule.os + - patch.os.schedule 'G@role:so-sensor': - ca diff --git a/so-setup-network.sh b/so-setup-network.sh index 57280473a..32217a865 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -268,14 +268,14 @@ copy_master_config() { } -copy_minion_pillars() { +copy_minion_tmp_files() { if [ $INSTALLTYPE == 'MASTERONLY' ] || [ $INSTALLTYPE == 'EVALMODE' ]; then - echo "rsyncing TMP pillar files to pillar base" >> $SETUPLOG 2>&1 - rsync -a -v $TMP/pillar/ /opt/so/saltstack/pillar/ >> $SETUPLOG 2>&1 + echo "rsyncing all files in $TMP to /opt/so/saltstack" >> $SETUPLOG 2>&1 + rsync -a -v $TMP/ /opt/so/saltstack/ >> $SETUPLOG 2>&1 else - echo "scp TMP pillar files to pillar base on master" >> $SETUPLOG 2>&1 - scp -prv -i /root/.ssh/so.key $TMP/pillar socore@$MSRV:/opt/so/saltstack/pillar >> $SETUPLOG 2>&1 + echo "scp all files in $TMP to master /opt/so/saltstack" >> $SETUPLOG 2>&1 + scp -prv -i /root/.ssh/so.key $TMP socore@$MSRV:/opt/so/saltstack >> $SETUPLOG 2>&1 fi } @@ -626,38 +626,51 @@ node_pillar() { } patch_pillar() { - OSPATCHPILLARDIR="$TMP/pillar/patch/os" - OSPATCHPILLAR="$OSPATCHPILLARDIR/$MINION_ID.sls" - if [ ! -d $OSPATCHPILLARDIR ] ; then - mkdir -p $OSPATCHPILLARDIR + case $INSTALLTYPE in + MASTERONLY | EVALMODE) + PATCHPILLARPATH=/opt/so/saltstack/pillar/masters + ;; + SENSORONLY) + PATCHPILLARPATH=$SENSORPILLARPATH + ;; + STORAGENODE | PARSINGNODE | HOTNODE | WARMNODE) + PATCHPILLARPATH=$NODEPILLARPATH + ;; + esac + + + echo "" >> $PATCHPILLARPATH/$MINION_ID.sls + echo "patch:" >> $PATCHPILLARPATH/$MINION_ID.sls + echo " os:" >> $PATCHPILLARPATH/$MINION_ID.sls + echo " schedule_name: $PATCHSCHEDULENAME" >> $PATCHPILLARPATH/$MINION_ID.sls + echo " enabled: True" >> $PATCHPILLARPATH/$MINION_ID.sls + echo " splay: 300" >> $PATCHPILLARPATH/$MINION_ID.sls + + +} + +patch_schedule_os_new() { + OSPATCHSCHEDULEDIR="$TMP/salt/patch/os/schedules" + OSPATCHSCHEDULE="$OSPATCHSCHEDULEDIR/$PATCHSCHEDULENAME.yml" + + if [ ! -d $OSPATCHSCHEDULEDIR ] ; then + mkdir -p $OSPATCHSCHEDULEDIR fi - touch $OSPATCHPILLAR - echo "patch:" > $OSPATCHPILLAR - case $PATCHSCHEDULE in - Scheduled) - echo " os:" >> $OSPATCHPILLAR - echo " schedule:" >> $OSPATCHPILLAR + + echo "patch:" > $OSPATCHSCHEDULE + echo " os:" >> $OSPATCHSCHEDULE + echo " schedule:" >> $OSPATCHSCHEDULE for psd in "${PATCHSCHEDULEDAYS[@]}" do psd=$(echo $psd | sed 's/"//g') - echo " - $psd:" >> $OSPATCHPILLAR + echo " - $psd:" >> $OSPATCHSCHEDULE for psh in "${PATCHSCHEDULEHOURS[@]}" do psh=$(echo $psh | sed 's/"//g') - echo " - '$psh'" >> $OSPATCHPILLAR + echo " - '$psh'" >> $OSPATCHSCHEDULE done done - ;; - Automatic) - echo " os:" >> $OSPATCHPILLAR - echo " schedule: auto" >> $OSPATCHPILLAR - ;; - Manual) - echo " os:" >> $OSPATCHPILLAR - echo " schedule: manual" >> $OSPATCHPILLAR - ;; - esac } @@ -1516,20 +1529,50 @@ whiptail_passwords_dont_match() { } +whiptail_patch_name_new_schedule() { + + unset PATCHSCHEDULENAME + while [[ -z "$PATCHSCHEDULENAME" ]]; do + PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 75 3>&1 1>&2 2>&3) + done + + +} + whiptail_patch_schedule() { # What kind of patch schedule are we doing? PATCHSCHEDULE=$(whiptail --title "Security Onion Setup" --radiolist \ - "Choose OS patch schedule. This will NOT update Security Onion related tools such as Zeek, Elasticsearch, Kibana, SaltStack, etc." 25 75 5 \ + "Choose OS patch schedule. This will NOT update Security Onion related tools such as Zeek, Elasticsearch, Kibana, SaltStack, etc." 25 115 5 \ "Automatic" "Package updates will be installed automatically" ON \ "Manual" "Package updates will need to be installed manually" OFF \ - "Scheduled" "Select a schedule on the following screen" OFF 3>&1 1>&2 2>&3 ) + "Import Schedule" "Enter the name of an existing schedule on the following screen and inherit it" OFF \ + "New Schedule" "Configure and name a new schedule on the following screen" OFF 3>&1 1>&2 2>&3 ) local exitstatus=$? whiptail_check_exitstatus $exitstatus } +whiptail_patch_schedule_import() { + + unset PATCHSCHEDULENAME + # Ask to inherit from master + whiptail --title "Security Onion Setup" --yesno "Do you want to inherit the OS patch schedule from the master?" 8 78 + + local exitstatus=$? + if [ $exitstatus == 0 ]; then + PATCHSCHEDULENAME=default + else + while [[ -z "$PATCHSCHEDULENAME" ]]; do + PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "Enter the name of the OS patch schedule you want to inherit. If you leave this as default, it will use the same schedule as the master. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 60 default 3>&1 1>&2 2>&3) + done + fi + +} + whiptail_patch_schedule_select_days() { # Select the days to patch PATCHSCHEDULEDAYS=($(whiptail --title "Security Onion Setup" --checklist \ @@ -1721,11 +1764,23 @@ if (whiptail_you_sure); then # How do we want to handle OS patching? manual, auto or scheduled days and hours whiptail_patch_schedule - if [[ $PATCHSCHEDULE == "Scheduled" ]] ; then - whiptail_patch_schedule_select_days - whiptail_patch_schedule_select_hours - fi - patch_pillar + case $PATCHSCHEDULE in + 'New Schedule') + whiptail_patch_schedule_select_days + whiptail_patch_schedule_select_hours + whiptail_patch_name_new_schedule + patch_schedule_os_new + ;; + 'Import Schedule') + whiptail_patch_schedule_import + ;; + Automatic) + PATCHSCHEDULENAME=auto + ;; + Manual) + PATCHSCHEDULENAME=manual + ;; + esac #################### ## Master ## @@ -1821,9 +1876,11 @@ if (whiptail_you_sure); then master_static >> $SETUPLOG 2>&1 echo "** Generating the master pillar **" >> $SETUPLOG master_pillar >> $SETUPLOG 2>&1 + echo "** Generating the patch pillar **" >> $SETUPLOG + patch_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n30\nAccepting Salt Keys... \nXXX" echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" - copy_minion_pillars >> $SETUPLOG 2>&1 + copy_minion_tmp_files >> $SETUPLOG 2>&1 # Do a checkin to push the key up echo "** Pushing the key up to Master **" >> $SETUPLOG salt_firstcheckin >> $SETUPLOG 2>&1 @@ -1938,6 +1995,8 @@ if (whiptail_you_sure); then network_setup >> $SETUPLOG 2>&1 echo -e "XXX\n4\nGenerating Sensor Pillar... \nXXX" sensor_pillar >> $SETUPLOG 2>&1 + echo "** Generating the patch pillar **" >> $SETUPLOG + patch_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n5\nInstalling Salt Components... \nXXX" saltify >> $SETUPLOG 2>&1 echo -e "XXX\n20\nInstalling Docker... \nXXX" @@ -1945,7 +2004,7 @@ if (whiptail_you_sure); then echo -e "XXX\n22\nConfiguring Salt Minion... \nXXX" configure_minion sensor >> $SETUPLOG 2>&1 echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" - copy_minion_pillars >> $SETUPLOG 2>&1 + copy_minion_tmp_files >> $SETUPLOG 2>&1 echo -e "XXX\n25\nSending Salt Key to Master... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 echo -e "XXX\n26\nTelling the Master to Accept Key... \nXXX" @@ -2049,6 +2108,8 @@ if (whiptail_you_sure); then master_static >> $SETUPLOG 2>&1 echo -e "XXX\n7\nCreating the master pillar... \nXXX" master_pillar >> $SETUPLOG 2>&1 + echo "** Generating the patch pillar **" >> $SETUPLOG + patch_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n7\nConfiguring minion... \nXXX" configure_minion eval >> $SETUPLOG 2>&1 echo -e "XXX\n7\nSetting the node type to eval... \nXXX" @@ -2058,7 +2119,7 @@ if (whiptail_you_sure); then echo -e "XXX\n8\nCreating firewall policies... \nXXX" set_initial_firewall_policy >> $SETUPLOG 2>&1 echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" - copy_minion_pillars >> $SETUPLOG 2>&1 + copy_minion_tmp_files >> $SETUPLOG 2>&1 echo -e "XXX\n10\nRegistering agent... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 echo -e "XXX\n11\nAccepting Agent... \nXXX" @@ -2200,8 +2261,10 @@ if (whiptail_you_sure); then configure_minion node >> $SETUPLOG 2>&1 set_node_type >> $SETUPLOG 2>&1 node_pillar >> $SETUPLOG 2>&1 + echo "** Generating the patch pillar **" >> $SETUPLOG + patch_pillar >> $SETUPLOG 2>&1 echo -e "XXX\n24\nCopying Minion Pillars to Master... \nXXX" - copy_minion_pillars >> $SETUPLOG 2>&1 + copy_minion_tmp_files >> $SETUPLOG 2>&1 echo -e "XXX\n35\nSending and Accepting Salt Key... \nXXX" salt_firstcheckin >> $SETUPLOG 2>&1 # Accept the Salt Key From 98cd96eeddd47f9bf68c9061dba4590c6339dbe0 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Thu, 7 Nov 2019 10:18:40 -0500 Subject: [PATCH 09/12] change auto schedule to every 8 hours and update wording of whiptail auto patch selection - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- salt/patch/os/schedule.sls | 2 +- so-setup-network.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/patch/os/schedule.sls b/salt/patch/os/schedule.sls index 9ea98ede7..128862fa7 100644 --- a/salt/patch/os/schedule.sls +++ b/salt/patch/os/schedule.sls @@ -42,7 +42,7 @@ patch_os_schedule: - function: state.sls - job_args: - patch.os - - minutes: 1 + - hours: 8 - splay: {{splay}} - return_job: True diff --git a/so-setup-network.sh b/so-setup-network.sh index 32217a865..40ada00cc 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -1545,7 +1545,7 @@ whiptail_patch_schedule() { # What kind of patch schedule are we doing? PATCHSCHEDULE=$(whiptail --title "Security Onion Setup" --radiolist \ "Choose OS patch schedule. This will NOT update Security Onion related tools such as Zeek, Elasticsearch, Kibana, SaltStack, etc." 25 115 5 \ - "Automatic" "Package updates will be installed automatically" ON \ + "Automatic" "Package updates will be installed automatically every 8 hours if available" ON \ "Manual" "Package updates will need to be installed manually" OFF \ "Import Schedule" "Enter the name of an existing schedule on the following screen and inherit it" OFF \ "New Schedule" "Configure and name a new schedule on the following screen" OFF 3>&1 1>&2 2>&3 ) From 43915488e27689fb444dfd18d3758d45f126ced6 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Thu, 7 Nov 2019 10:50:27 -0500 Subject: [PATCH 10/12] prevent hostname from being localhost in setup script - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/101 --- so-setup-network.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/so-setup-network.sh b/so-setup-network.sh index 40ada00cc..c82a9f6f5 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -1654,6 +1654,13 @@ whiptail_set_hostname() { HOSTNAME=$(whiptail --title "Security Onion Setup" --inputbox \ "Enter the Hostname you would like to set." 10 60 $HOSTNAME 3>&1 1>&2 2>&3) + while [[ "$HOSTNAME" == 'localhost' ]] ; do + whiptail --title "Security Onion Setup" --msgbox "Please choose a hostname that isn't localhost." 8 65 + HOSTNAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "Enter the Hostname you would like to set." 10 60 $HOSTNAME 3>&1 1>&2 2>&3) + done + + local exitstatus=$? whiptail_check_exitstatus $exitstatus From d849c33b9c37ce53ee306ba26a439c6aa18f33a6 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Thu, 7 Nov 2019 10:57:35 -0500 Subject: [PATCH 11/12] ensure the os patch schedule name is set - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/so-setup-network.sh b/so-setup-network.sh index c82a9f6f5..f43bebd70 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -1530,9 +1530,12 @@ whiptail_passwords_dont_match() { } whiptail_patch_name_new_schedule() { + + PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 75 3>&1 1>&2 2>&3) - unset PATCHSCHEDULENAME while [[ -z "$PATCHSCHEDULENAME" ]]; do + whiptail --title "Security Onion Setup" --msgbox "Please enter a name for this OS patch schedule." 8 65 PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 75 3>&1 1>&2 2>&3) done From c640a0bf9bb55feea412f7c2b3d0cb11f0e397b4 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Thu, 7 Nov 2019 11:22:42 -0500 Subject: [PATCH 12/12] ensure inherit patch schedule name isn't blank, allow cancel from inherit screen - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/84 --- so-setup-network.sh | 31 +++++++++++++++++++------------ 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/so-setup-network.sh b/so-setup-network.sh index f43bebd70..de7b6f137 100644 --- a/so-setup-network.sh +++ b/so-setup-network.sh @@ -1532,12 +1532,17 @@ whiptail_passwords_dont_match() { whiptail_patch_name_new_schedule() { PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ - "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 75 3>&1 1>&2 2>&3) + "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 105 3>&1 1>&2 2>&3) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus while [[ -z "$PATCHSCHEDULENAME" ]]; do whiptail --title "Security Onion Setup" --msgbox "Please enter a name for this OS patch schedule." 8 65 PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ - "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 75 3>&1 1>&2 2>&3) + "What name do you want to give this OS patch schedule? This schedule needs to be named uniquely. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 105 3>&1 1>&2 2>&3) + local exitstatus=$? + whiptail_check_exitstatus $exitstatus done @@ -1561,18 +1566,20 @@ whiptail_patch_schedule() { whiptail_patch_schedule_import() { unset PATCHSCHEDULENAME - # Ask to inherit from master - whiptail --title "Security Onion Setup" --yesno "Do you want to inherit the OS patch schedule from the master?" 8 78 + PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "Enter the name of the OS patch schedule you want to inherit. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 60 3>&1 1>&2 2>&3) local exitstatus=$? - if [ $exitstatus == 0 ]; then - PATCHSCHEDULENAME=default - else - while [[ -z "$PATCHSCHEDULENAME" ]]; do - PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ - "Enter the name of the OS patch schedule you want to inherit. If you leave this as default, it will use the same schedule as the master. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 60 default 3>&1 1>&2 2>&3) - done - fi + whiptail_check_exitstatus $exitstatus + + while [[ -z "$PATCHSCHEDULENAME" ]]; do + whiptail --title "Security Onion Setup" --msgbox "Please enter a name for the OS patch schedule you want to inherit." 8 65 + PATCHSCHEDULENAME=$(whiptail --title "Security Onion Setup" --inputbox \ + "Enter the name of the OS patch schedule you want to inherit. Available schedules can be found on the master under /opt/so/salt/patch/os/schedules/.yml" 10 60 3>&1 1>&2 2>&3) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus + done }