Merge pull request #10124 from Security-Onion-Solutions/2.4/ubuntu

2.4/ubuntu
This commit is contained in:
Josh Patterson
2023-04-13 16:35:53 -04:00
committed by GitHub
20 changed files with 154 additions and 113 deletions

View File

@@ -25,6 +25,7 @@ config_backup_script:
so_config_backup:
cron.present:
- name: /usr/sbin/so-config-backup > /dev/null 2>&1
- identifier: so_config_backup
- user: root
- minute: '1'
- hour: '0'

View File

@@ -133,8 +133,10 @@ so-status_script:
{% if GLOBALS.role in GLOBALS.sensor_roles %}
# Add sensor cleanup
/usr/sbin/so-sensor-clean:
so-sensor-clean:
cron.present:
- name: /usr/sbin/so-sensor-clean
- identifier: so-sensor-clean
- user: root
- minute: '*'
- hour: '*'
@@ -154,8 +156,10 @@ sensorrotateconf:
- source: salt://common/files/sensor-rotate.conf
- mode: 644
/usr/local/bin/sensor-rotate:
sensor-rotate:
cron.present:
- name: /usr/local/bin/sensor-rotate
- identifier: sensor-rotate
- user: root
- minute: '1'
- hour: '0'
@@ -178,8 +182,10 @@ commonlogrotateconf:
- template: jinja
- mode: 644
/usr/local/bin/common-rotate:
common-rotate:
cron.present:
- name: /usr/local/bin/common-rotate
- identifier: common-rotate
- user: root
- minute: '1'
- hour: '0'
@@ -200,17 +206,11 @@ sostatus_log:
- name: /opt/so/log/sostatus/status.log
- mode: 644
common_pip_dependencies:
pip.installed:
- user: root
- pkgs:
- rich
- target: /usr/lib64/python3.6/site-packages
# Install sostatus check cron
sostatus_check_cron:
# Install sostatus check cron. This is used to populate Grid.
so-status_check_cron:
cron.present:
- name: '/usr/sbin/so-status -j > /opt/so/log/sostatus/status.log 2>&1'
- name: 'USER=root /usr/sbin/so-status -j > /opt/so/log/sostatus/status.log 2>&1'
- identifier: so-status_check_cron
- user: root
- minute: '*/1'
- hour: '*'
@@ -220,7 +220,7 @@ sostatus_check_cron:
remove_post_setup_cron:
cron.absent:
- name: 'salt-call state.highstate'
- name: 'PATH=$PATH:/usr/sbin salt-call state.highstate'
- identifier: post_setup_cron
{% if GLOBALS.role not in ['eval', 'manager', 'managersearch', 'standalone'] %}
@@ -246,9 +246,10 @@ raidpkgs:
{% endif %}
# Install raid check cron
so_raid_status:
so-raid-status:
cron.present:
- name: '/usr/sbin/so-raid-status > /dev/null 2>&1'
- identifier: so-raid-status
- user: root
- minute: '*/15'
- hour: '*'

View File

@@ -5,28 +5,37 @@ commonpkgs:
pkg.installed:
- skip_suggestions: True
- pkgs:
- chrony
- apache2-utils
- wget
- ntpdate
- jq
- python3-docker
- curl
- ca-certificates
- software-properties-common
- apt-transport-https
- openssl
- netcat
- python3-mysqldb
- sqlite3
- libssl-dev
- python3-dateutil
- python3-m2crypto
- python3-mysqldb
- python3-packaging
- python3-watchdog
- python3-lxml
- git
- vim
# since Ubuntu requires and internet connection we can use pip to install modules
python3-pip:
pkg.installed
python-rich:
pip.installed:
- name: rich
- target: /usr/local/lib/python3.8/dist-packages/
- require:
- pkg: python3-pip
{% elif GLOBALS.os == 'Rocky' %}
commonpkgs:
pkg.installed:
@@ -51,6 +60,7 @@ commonpkgs:
- python3-m2crypto
- rsync
- python3-rich
- python3-pyyaml
- python3-watchdog
- python3-packaging
- unzip

View File

@@ -126,9 +126,10 @@ delete_so-curator_so-status.disabled:
- regex: ^so-curator$
{% endif %}
so-curatorclusterclose:
so-curator-cluster-close:
cron.present:
- name: /usr/sbin/so-curator-cluster-close > /opt/so/log/curator/cron-close.log 2>&1
- identifier: so-curator-cluster-close
- user: root
- minute: '2'
- hour: '*/1'
@@ -136,9 +137,10 @@ so-curatorclusterclose:
- month: '*'
- dayweek: '*'
so-curatorclusterdeletecron:
so-curator-cluster-delete:
cron.present:
- name: /usr/sbin/so-curator-cluster-delete > /opt/so/log/curator/cron-cluster-delete.log 2>&1
- identifier: so-curator-cluster-delete
- user: root
- minute: '*/5'
- hour: '*'

View File

@@ -20,9 +20,10 @@ idstoolslogdir:
- group: 939
- makedirs: True
so-ruleupdatecron:
so-rule-update:
cron.present:
- name: /usr/sbin/so-rule-update > /opt/so/log/idstools/download.log 2>&1
- identifier: so-rule-update
- user: root
- minute: '1'
- hour: '7'

View File

@@ -127,6 +127,7 @@ metrics_link_file:
get_influxdb_size:
cron.present:
- name: 'du -s -k /nsm/influxdb | cut -f1 > /opt/so/log/telegraf/influxdb_size.log 2>&1'
- identifier: get_influxdb_size
- user: root
- minute: '*/1'
- hour: '*'

View File

@@ -51,7 +51,7 @@ repo_sync_script:
- group: root
- mode: 755
reposync_cron:
so-repo-sync:
{% if MANAGERMERGED.reposync.enabled %}
cron.present:
{% else %}
@@ -59,6 +59,7 @@ reposync_cron:
{% endif %}
- user: socore
- name: '/usr/sbin/so-repo-sync >> /opt/so/log/reposync/reposync.log 2>&1'
- identifier: so-repo-sync
- hour: '{{ MANAGERMERGED.reposync.hour }}'
- minute: '{{ MANAGERMERGED.reposync.minute }}'
@@ -83,10 +84,11 @@ yara_update_script:
ISAIRGAP: {{ GLOBALS.airgap }}
EXCLUDEDRULES: {{ STRELKAMERGED.rules.excluded }}
strelka_yara_update:
strelka-yara-update:
cron.present:
- user: root
- name: '/usr/sbin/so-yara-update >> /nsm/strelka/log/yara-update.log 2>&1'
- identifier: strelka-yara-update
- hour: '7'
- minute: '1'

View File

@@ -24,8 +24,9 @@ sync_es_users:
# we dont want this added too early in setup, so we add the onlyif to verify 'startup_states: highstate'
# is in the minion config. That line is added before the final highstate during setup
sosyncusers:
so-user_sync:
cron.present:
- user: root
- name: 'PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin /usr/sbin/so-user sync &>> /opt/so/log/soc/sync.log'
- identifier: so-user_sync
- onlyif: "grep 'startup_states: highstate' /etc/salt/minion"

View File

@@ -2,8 +2,13 @@
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
{% from 'vars/globals.map.jinja' import GLOBALS %}
{% from 'ntp/config.map.jinja' import NTPCONFIG %}
chrony_pkg:
pkg.installed:
- name: chrony
chronyconf:
file.managed:
- name: /etc/chrony.conf
@@ -12,8 +17,14 @@ chronyconf:
- defaults:
NTPCONFIG: {{ NTPCONFIG }}
{% if GLOBALS.os == 'Rocky' %}
chronyd:
{% else %}
chrony:
{% endif %}
service.running:
- enable: True
- watch:
- file: chronyconf
- require:
- pkg: chrony_pkg

View File

@@ -113,15 +113,17 @@ append_so-playbook_so-status.conf:
{% endif %}
so-playbooksynccron:
so-playbook-sync_cron:
cron.present:
- name: /usr/sbin/so-playbook-sync > /opt/so/log/playbook/sync.log 2>&1
- identifier: so-playbook-sync_cron
- user: root
- minute: '*/5'
so-playbookruleupdatecron:
so-playbook-ruleupdate_cron:
cron.present:
- name: /usr/sbin/so-playbook-ruleupdate > /opt/so/log/playbook/update.log 2>&1
- identifier: so-playbook-ruleupdate_cron
- user: root
- minute: '1'
- hour: '6'

View File

@@ -1,20 +0,0 @@
# this removes the repo file left by bootstrap-salt.sh without -r
remove_salt.list:
file.absent:
- name: /etc/apt/sources.list.d/salt.list
saltstack.list:
file.managed:
- name: /etc/apt/sources.list.d/saltstack.list
- contents:
- deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/{{grains.osrelease}}/amd64/salt3004.2/ {{grains.oscodename}} main
apt_update:
cmd.run:
- name: apt-get update
- onchanges:
- file: saltstack.list
- timeout: 30
- retry:
attempts: 5
interval: 30

View File

@@ -1,16 +1,8 @@
{% from 'vars/globals.map.jinja' import GLOBALS %}
{% if GLOBALS.os != 'Rocky' %}
{% if grains.oscodename == 'focal' %}
saltpymodules:
pkg.installed:
- pkgs:
{% if grains['oscodename'] == 'bionic' %}
- python-m2crypto
- python-docker
{% elif grains['oscodename'] == 'focal' %}
- python3-m2crypto
- python3-docker
{% endif %}
{% endif %}
salt_bootstrap:

View File

@@ -12,8 +12,9 @@ state-apply-test:
start: 0
end: 180
/usr/sbin/so-salt-minion-check -q:
so-salt-minion-check_cron:
cron.present:
- identifier: so-salt-minion-check
- name: /usr/sbin/so-salt-minion-check -q
- identifier: so-salt-minion-check_cron
- user: root
- minute: '*/5'

View File

@@ -1,6 +1,7 @@
post_setup_cron:
cron.present:
- name: 'PATH=$PATH:/usr/sbin salt-call state.highstate'
- identifier: post_setup_cron
- user: root
- minute: '*/1'
- identifier: post_setup_cron

View File

@@ -91,6 +91,7 @@ socusersroles:
salt-relay:
cron.present:
- name: 'ps -ef | grep salt-relay.sh | grep -v grep > /dev/null 2>&1 || /opt/so/saltstack/default/salt/soc/files/bin/salt-relay.sh >> /opt/so/log/soc/salt-relay.log 2>&1 &'
- identifier: salt-relay
so-soc:
docker_container.running:

View File

@@ -205,11 +205,13 @@ filecheck_restart:
filecheck_run:
cron.present:
- name: 'ps -ef | grep filecheck | grep -v grep > /dev/null 2>&1 || python3 /opt/so/conf/strelka/filecheck >> /opt/so/log/strelka/filecheck_stdout.log 2>&1 &'
- identifier: filecheck_run
- user: {{ filecheck_runas }}
filcheck_history_clean:
cron.present:
- name: '/usr/bin/find /nsm/strelka/history/ -type f -mtime +2 -exec rm {} + > /dev/null 2>&1'
- identifier: filecheck_history_clean
- minute: '33'
# End Filecheck Section

View File

@@ -79,8 +79,10 @@ surilogscript:
- source: salt://suricata/cron/surilogcompress
- mode: 755
/usr/local/bin/surilogcompress:
surilogcompress:
cron.present:
- name: /usr/local/bin/surilogcompress
- identifier: surilogcompress
- user: suricata
- minute: '17'
- hour: '*'
@@ -181,16 +183,6 @@ delete_so-suricata_so-status.disabled:
- regex: ^so-suricata$
{% endif %}
surirotate:
cron.absent:
- name: /usr/local/bin/surirotate
- user: root
- minute: '11'
- hour: '*'
- daymonth: '*'
- month: '*'
- dayweek: '*'
so-suricata-eve-clean:
file.managed:
- name: /usr/sbin/so-suricata-eve-clean
@@ -204,6 +196,7 @@ so-suricata-eve-clean:
clean_suricata_eve_files:
cron.present:
- name: /usr/sbin/so-suricata-eve-clean > /dev/null 2>&1
- identifier: clean_suricata_eve_files
- user: root
- minute: '*/5'
- hour: '*'

View File

@@ -674,8 +674,13 @@ configure_ntp() {
'rtcsync' \
'logdir /var/log/chrony' >> $chrony_conf
systemctl enable chronyd
systemctl restart chronyd
if [ "$OS" == 'rocky' ]; then
systemctl enable chronyd
systemctl restart chronyd
elif [ "$OS" == 'ubuntu' ]; then
systemctl enable chrony
systemctl restart chrony
fi
# Tell the chrony daemon to sync time & update the system time
# Since these commands only make a call to chronyd, wait after each command to make sure the changes are made
@@ -960,19 +965,23 @@ installer_progress_loop() {
}
installer_prereq_packages() {
if [ "$OS" == rocky ]; then
if [[ ! $is_iso ]]; then
if ! command -v nmcli > /dev/null 2>&1; then
logCmd "dnf -y install NetworkManager"
fi
fi
logCmd "systemctl enable NetworkManager"
logCmd "systemctl start NetworkManager"
elif [ "$OS" == ubuntu ]; then
# if [ "$OS" == rocky ]; then
# if [[ ! $is_iso ]]; then
# if ! command -v nmcli > /dev/null 2>&1; then
# logCmd "dnf -y install NetworkManager"
# fi
# fi
# logCmd "systemctl enable NetworkManager"
# logCmd "systemctl start NetworkManager"
# el
if [ "$OS" == ubuntu ]; then
# Print message to stdout so the user knows setup is doing something
info "Running apt-get update"
retry 150 10 "apt-get update" "" "Err:" >> "$setup_log" 2>&1 || exit 1
# Install network manager so we can do interface stuff
if ! command -v nmcli > /dev/null 2>&1; then
info "Installing network-manager"
retry 150 10 "apt-get -y install network-manager" >> "$setup_log" 2>&1 || exit 1
{
systemctl enable NetworkManager
@@ -2009,14 +2018,19 @@ saltify() {
)
retry 150 20 "apt-get -y install ${pkg_arr[*]}" || exit 1
logCmd "mkdir -vp /opt/so/gpg"
logCmd "wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.securityonion.net/file/securityonion-repo/ubuntu/20.04/amd64/salt/SALTSTACK-GPG-KEY.pub"
logCmd "wget -q --inet4-only -O /opt/so/gpg/docker.pub https://download.docker.com/linux/ubuntu/gpg"
logCmd "apt-key add /opt/so/gpg/SALTSTACK-GPG-KEY.pub"
logCmd "apt-key add /opt/so/gpg/docker.pub"
logCmd "mkdir -vp /etc/apt/keyrings"
#logCmd "wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.securityonion.net/file/securityonion-repo/ubuntu/20.04/amd64/salt/SALTSTACK-GPG-KEY.pub"
logCmd "wget -q --inet4-only -O /etc/apt/keyrings/docker.pub https://download.docker.com/linux/ubuntu/gpg"
logCmd "curl -fsSL -o /etc/apt/keyrings/salt-archive-keyring-2023.gpg https://repo.saltproject.io/salt_rc/salt/py3/ubuntu/20.04/amd64/minor/3006.0rc3/SALT-PROJECT-GPG-PUBKEY-2023.gpg"
echo "deb [signed-by=/etc/apt/keyrings/salt-archive-keyring-2023.gpg] https://repo.saltproject.io/salt_rc/salt/py3/ubuntu/20.04/amd64/minor/3006.0rc3/ focal main" | sudo tee /etc/apt/sources.list.d/salt.list
logCmd "apt-key add /etc/apt/keyrings/salt-archive-keyring-2023.gpg"
#logCmd "apt-key add /opt/so/gpg/SALTSTACK-GPG-KEY.pub"
logCmd "apt-key add /etc/apt/keyrings/docker.pub"
# Add SO Saltstack Repo
echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/20.04/amd64/salt3004.2/ focal main" > /etc/apt/sources.list.d/saltstack.list
#echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/20.04/amd64/salt3004.2/ focal main" > /etc/apt/sources.list.d/saltstack.list
# Add Docker Repo
add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
@@ -2024,9 +2038,9 @@ saltify() {
# Ain't nothing but a GPG
retry 150 20 "apt-get update" "" "Err:" || exit 1
retry 150 20 "apt-get -y install salt-minion=3004.2+ds-1 salt-common=3004.2+ds-1" || exit 1
retry 150 20 "apt-get -y install salt-common salt-minion" || exit 1
retry 150 20 "apt-mark hold salt-minion salt-common" || exit 1
retry 150 20 "apt-get -y install python3-pip python3-dateutil python3-m2crypto python3-mysqldb python3-packaging python3-influxdb python3-lxml" || exit 1
#retry 150 20 "apt-get -y install python3-pip python3-dateutil python3-m2crypto python3-mysqldb python3-packaging python3-influxdb python3-lxml" || exit 1
fi
@@ -2038,11 +2052,12 @@ saltify() {
# We just need the minion
logCmd "dnf -y install salt salt-minion"
fi
logCmd "mkdir -p /etc/salt/minion.d"
logCmd "salt-pip install docker --no-index --only-binary=:all: --find-links files/salt_module_deps/docker/"
logCmd "salt-pip install pymysql --no-index --only-binary=:all: --find-links files/salt_module_deps/pymysql/"
fi
logCmd "mkdir -p /etc/salt/minion.d"
logCmd "salt-pip install docker --no-index --only-binary=:all: --find-links files/salt_module_deps/docker/"
logCmd "salt-pip install pymysql --no-index --only-binary=:all: --find-links files/salt_module_deps/pymysql/"
}
@@ -2381,7 +2396,9 @@ update_packages() {
logCmd "dnf repolist"
logCmd "dnf -y update --allowerasing --exclude=salt*,wazuh*,docker*,containerd*"
else
retry 150 10 "apt-get -y update" >> "$setup_log" 2>&1 || exit 1
info "Running apt-get update"
retry 150 10 "apt-get -y update" "" "Err:" >> "$setup_log" 2>&1 || exit 1
info "Running apt-get upgrade"
retry 150 10 "apt-get -y upgrade" >> "$setup_log" 2>&1 || exit 1
fi
}

View File

@@ -337,7 +337,7 @@ if ! [[ -f $install_opt_file ]]; then
# If you are a manager ask ALL the manager things here. I know there is code re-use but this makes it easier to add new roles.
if [[ $is_eval ]]; then
waitforstate=true
ubuntu_check
#ubuntu_check
monints=true
check_elastic_license
check_requirements "manager"
@@ -361,7 +361,7 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_standalone ]]; then
waitforstate=true
ubuntu_check
#ubuntu_check
monints=true
check_elastic_license
check_requirements "manager"
@@ -386,7 +386,7 @@ if ! [[ -f $install_opt_file ]]; then
elif [[ $is_manager ]]; then
check_elastic_license
waitforstate=true
ubuntu_check
#ubuntu_check
check_requirements "manager"
networking_needful
collect_net_method
@@ -408,7 +408,7 @@ if ! [[ -f $install_opt_file ]]; then
elif [[ $is_managersearch ]]; then
check_elastic_license
waitforstate=true
ubuntu_check
#ubuntu_check
check_requirements "manager"
networking_needful
collect_net_method
@@ -428,7 +428,8 @@ if ! [[ -f $install_opt_file ]]; then
collect_so_allow
whiptail_end_settings
elif [[ $is_sensor ]]; then
ubuntu_check
#ubuntu_check
installer_prereq_packages
monints=true
check_requirements "sensor"
calculate_useable_cores
@@ -444,7 +445,8 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_searchnode ]]; then
ubuntu_check
#ubuntu_check
installer_prereq_packages
check_requirements "elasticsearch"
networking_needful
check_network_manager_conf
@@ -457,7 +459,8 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_heavynode ]]; then
ubuntu_check
#ubuntu_check
installer_prereq_packages
monints=true
check_requirements "heavynode"
calculate_useable_cores
@@ -469,7 +472,8 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_idh ]]; then
ubuntu_check
#ubuntu_check
installer_prereq_packages
check_requirements "idh"
networking_needful
collect_mngr_hostname
@@ -481,7 +485,7 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_import ]]; then
ubuntu_check
#ubuntu_check
waitforstate=true
monints=true
check_elastic_license
@@ -503,7 +507,8 @@ if ! [[ -f $install_opt_file ]]; then
whiptail_end_settings
elif [[ $is_receiver ]]; then
ubuntu_check
#ubuntu_check
installer_prereq_packages
check_requirements "receiver"
networking_needful
collect_mngr_hostname

View File

@@ -523,15 +523,24 @@ whiptail_install_type() {
[ -n "$TESTING" ] && return
# What kind of install are we doing?
install_type=$(whiptail --title "$whiptail_title" --menu \
"What kind of installation would you like to do?\n\nFor more information, please see:\n$DOC_BASE_URL/architecture.html" 18 65 5 \
"IMPORT" "Import PCAP or log files " \
"EVAL" "Evaluation mode (not for production) " \
"STANDALONE" "Standalone production install " \
"DISTRIBUTED" "Distributed install submenu " \
"OTHER" "Other install types" \
3>&1 1>&2 2>&3
)
if [[ $OS = 'rocky' ]]; then
install_type=$(whiptail --title "$whiptail_title" --menu \
"What kind of installation would you like to do?\n\nFor more information, please see:\n$DOC_BASE_URL/architecture.html" 18 65 5 \
"IMPORT" "Import PCAP or log files " \
"EVAL" "Evaluation mode (not for production) " \
"STANDALONE" "Standalone production install " \
"DISTRIBUTED" "Distributed install submenu " \
"OTHER" "Other install types" \
3>&1 1>&2 2>&3
)
elif [[ $OS = 'ubuntu' ]]; then
install_type=$(whiptail --title "$whiptail_title" --menu \
"What kind of installation would you like to do?\n\nFor more information, please see:\n$DOC_BASE_URL/architecture.html" 18 65 5 \
"DISTRIBUTED" "Distributed install submenu " \
"OTHER" "Other install types" \
3>&1 1>&2 2>&3
)
fi
local exitstatus=$?
whiptail_check_exitstatus $exitstatus
@@ -554,11 +563,19 @@ whiptail_install_type_dist() {
[ -n "$TESTING" ] && return
if [[ $OS = 'rocky' ]]; then
dist_option=$(whiptail --title "$whiptail_title" --menu "Do you want to start a new deployment or join this box to \nan existing deployment?" 11 75 2 \
"New Deployment " "Create a new Security Onion deployment" \
"Existing Deployment " "Join to an existing Security Onion deployment " \
3>&1 1>&2 2>&3
)
elif [[ $OS = 'ubuntu' ]]; then
dist_option=$(whiptail --title "$whiptail_title" --menu "Since this is Ubuntu, this box can only be connected to \nan existing deployment." 11 75 2 \
"Existing Deployment " "Join to an existing Security Onion deployment " \
3>&1 1>&2 2>&3
)
fi
local exitstatus=$?
whiptail_check_exitstatus $exitstatus