diff --git a/.github/workflows/pythontest.yml b/.github/workflows/pythontest.yml new file mode 100644 index 000000000..c0e692730 --- /dev/null +++ b/.github/workflows/pythontest.yml @@ -0,0 +1,31 @@ +name: python-test + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + python-version: ["3.10"] + python-code-path: ["salt/sensoroni/files/analyzers"] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install flake8 pytest pytest-cov + find . -name requirements.txt -exec pip install -r {} \; + - name: Lint with flake8 + run: | + flake8 ${{ matrix.python-code-path }} --show-source --max-complexity=12 --doctests --max-line-length=200 --statistics + - name: Test with pytest + run: | + pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=90 --cov-config=${{ matrix.python-code-path }}/pytest.ini diff --git a/.gitignore b/.gitignore index 19447927b..7f446a041 100644 --- a/.gitignore +++ b/.gitignore @@ -56,4 +56,15 @@ $RECYCLE.BIN/ # Windows shortcuts *.lnk -# End of https://www.gitignore.io/api/macos,windows \ No newline at end of file +# End of https://www.gitignore.io/api/macos,windows + +# Pytest output +__pycache__ +.pytest_cache +.coverage +*.pyc +.venv + +# Analyzer dev/test config files +*_dev.yaml +site-packages \ No newline at end of file diff --git a/HOTFIX b/HOTFIX index e69de29bb..d3f5a12fa 100644 --- a/HOTFIX +++ b/HOTFIX @@ -0,0 +1 @@ + diff --git a/README.md b/README.md index 0dff3fba4..0662e05be 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ -## Security Onion 2.3.110 +## Security Onion 2.3.120 -Security Onion 2.3.110 is here! +Security Onion 2.3.120 is here! ## Screenshots diff --git a/VERIFY_ISO.md b/VERIFY_ISO.md index 300428636..ce56cd48c 100644 --- a/VERIFY_ISO.md +++ b/VERIFY_ISO.md @@ -1,18 +1,18 @@ -### 2.3.110-20220309 ISO image built on 2022/03/09 +### 2.3.120-20220425 ISO image built on 2022/04/25 ### Download and Verify -2.3.110-20220309 ISO image: -https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220309.iso +2.3.120-20220425 ISO image: +https://download.securityonion.net/file/securityonion/securityonion-2.3.120-20220425.iso -MD5: 537564F8B56633E2D46E5E7C4E2BF18A -SHA1: 1E1B42EDB711AC8B5963B3460056770B91AE6BFC -SHA256: 4D73E5BE578DA43DCFD3C1B5F9AF07A7980D8DF90ACDDFEF6CEA177F872EECA0 +MD5: C99729E452B064C471BEF04532F28556 +SHA1: 60BF07D5347C24568C7B793BFA9792E98479CFBF +SHA256: CD17D0D7CABE21D45FA45E1CF91C5F24EB9608C79FF88480134E5592AFDD696E Signature for ISO image: -https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220309.iso.sig +https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.120-20220425.iso.sig Signing key: https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS @@ -26,22 +26,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/ma Download the signature file for the ISO: ``` -wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220309.iso.sig +wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.120-20220425.iso.sig ``` Download the ISO image: ``` -wget https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220309.iso +wget https://download.securityonion.net/file/securityonion/securityonion-2.3.120-20220425.iso ``` Verify the downloaded ISO image using the signature file: ``` -gpg --verify securityonion-2.3.110-20220309.iso.sig securityonion-2.3.110-20220309.iso +gpg --verify securityonion-2.3.120-20220425.iso.sig securityonion-2.3.120-20220425.iso ``` The output should show "Good signature" and the Primary key fingerprint should match what's shown below: ``` -gpg: Signature made Wed 09 Mar 2022 10:20:47 AM EST using RSA key ID FE507013 +gpg: Signature made Mon 25 Apr 2022 08:20:40 AM EDT using RSA key ID FE507013 gpg: Good signature from "Security Onion Solutions, LLC " gpg: WARNING: This key is not certified with a trusted signature! gpg: There is no indication that the signature belongs to the owner. diff --git a/VERSION b/VERSION index fb75c1af3..ef4e79170 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.3.120 +2.3.130 diff --git a/pillar/logstash/search.sls b/pillar/logstash/search.sls index 917657e1f..ebe133056 100644 --- a/pillar/logstash/search.sls +++ b/pillar/logstash/search.sls @@ -13,4 +13,5 @@ logstash: - so/9600_output_ossec.conf.jinja - so/9700_output_strelka.conf.jinja - so/9800_output_logscan.conf.jinja + - so/9801_output_rita.conf.jinja - so/9900_output_endgame.conf.jinja diff --git a/pillar/top.sls b/pillar/top.sls index 097f5b108..1cf3bdc8a 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -131,3 +131,6 @@ base: {% endif %} - global - minions.{{ grains.id }} + + '*_workstation': + - minions.{{ grains.id }} diff --git a/salt/allowed_states.map.jinja b/salt/allowed_states.map.jinja index a1f6cdb8c..3dbc6d24a 100644 --- a/salt/allowed_states.map.jinja +++ b/salt/allowed_states.map.jinja @@ -217,6 +217,8 @@ 'schedule', 'docker_clean' ], + 'so-workstation': [ + ], }, grain='role') %} {% if FILEBEAT and grains.role in ['so-helixsensor', 'so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-import', 'so-receiver'] %} diff --git a/salt/common/init.sls b/salt/common/init.sls index d1acca878..0eaf5e77e 100644 --- a/salt/common/init.sls +++ b/salt/common/init.sls @@ -300,8 +300,17 @@ sostatus_log: - month: '*' - dayweek: '*' - {% if role in ['eval', 'manager', 'managersearch', 'standalone'] %} +# Install cron job to determine size of influxdb for telegraf +'du -s -k /nsm/influxdb | cut -f1 > /opt/so/log/telegraf/influxdb_size.log 2>&1': + cron.present: + - user: root + - minute: '*/1' + - hour: '*' + - daymonth: '*' + - month: '*' + - dayweek: '*' + # Lock permissions on the backup directory backupdir: file.directory: diff --git a/salt/common/tools/sbin/so-analyst-install b/salt/common/tools/sbin/so-analyst-install index 6917725fc..12b940897 100755 --- a/salt/common/tools/sbin/so-analyst-install +++ b/salt/common/tools/sbin/so-analyst-install @@ -15,295 +15,86 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -if [ "$(id -u)" -ne 0 ]; then - echo "This script must be run using sudo!" - exit 1 -fi +doc_workstation_url="https://docs.securityonion.net/en/2.3/analyst-vm.html" +{# we only want the script to install the workstation if it is CentOS -#} +{% if grains.os == 'CentOS' -%} +{# if this is a manager -#} +{% if grains.master == grains.id.split('_')|first -%} -INSTALL_LOG=/root/so-analyst-install.log -exec &> >(tee -a "$INSTALL_LOG") +source /usr/sbin/so-common +pillar_file="/opt/so/saltstack/local/pillar/minions/{{grains.id}}.sls" -log() { - msg=$1 - level=${2:-I} - now=$(TZ=GMT date +"%Y-%m-%dT%H:%M:%SZ") - echo -e "$now | $level | $msg" >> "$INSTALL_LOG" 2>&1 -} +if [ -f "$pillar_file" ]; then + if ! grep -q "^workstation:$" "$pillar_file"; then -error() { - log "$1" "E" -} - -info() { - log "$1" "I" -} - -title() { - echo -e "\n-----------------------------\n $1\n-----------------------------\n" >> "$INSTALL_LOG" 2>&1 -} - -logCmd() { - cmd=$1 - info "Executing command: $cmd" - $cmd >> "$INSTALL_LOG" 2>&1 -} - -analyze_system() { - title "System Characteristics" - logCmd "uptime" - logCmd "uname -a" - logCmd "free -h" - logCmd "lscpu" - logCmd "df -h" - logCmd "ip a" -} - -analyze_system - -OS=$(grep PRETTY_NAME /etc/os-release | grep 'CentOS Linux 7') -if [ $? -ne 0 ]; then - echo "This is an unsupported OS. Please use CentOS 7 to install the analyst node." - exit 1 -fi - -if [[ "$manufacturer" == "Security Onion Solutions" && "$family" == "Automated" ]]; then - INSTALL=yes - CURLCONTINUE=no -else - INSTALL='' - CURLCONTINUE='' -fi - -FIRSTPASS=yes -while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do - if [[ "$FIRSTPASS" == "yes" ]]; then - clear - echo "###########################################" - echo "## ** W A R N I N G ** ##" - echo "## _______________________________ ##" - echo "## ##" - echo "## Installing the Security Onion ##" - echo "## analyst node on this device will ##" - echo "## make permanent changes to ##" - echo "## the system. ##" - echo "## ##" - echo "###########################################" - echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)" - FIRSTPASS=no - else - echo "Please type 'yes' to continue or 'no' to exit." - fi - read INSTALL -done - -if [[ $INSTALL == "no" ]]; then - echo "Exiting analyst node installation." - exit 0 -fi - -echo "Testing for internet connection with curl https://securityonionsolutions.com/" -CANCURL=$(curl -sI https://securityonionsolutions.com/ | grep "200 OK") - if [ $? -ne 0 ]; then FIRSTPASS=yes - while [[ $CURLCONTINUE != "yes" ]] && [[ $CURLCONTINUE != "no" ]]; do + while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do if [[ "$FIRSTPASS" == "yes" ]]; then - echo "We could not access https://securityonionsolutions.com/." - echo "Since packages are downloaded from the internet, internet access is required." - echo "If you would like to ignore this warning and continue anyway, please type 'yes'." - echo "Otherwise, type 'no' to exit." + echo "###########################################" + echo "## ** W A R N I N G ** ##" + echo "## _______________________________ ##" + echo "## ##" + echo "## Installing the Security Onion ##" + echo "## analyst node on this device will ##" + echo "## make permanent changes to ##" + echo "## the system. ##" + echo "## A system reboot will be required ##" + echo "## to complete the install. ##" + echo "## ##" + echo "###########################################" + echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)" FIRSTPASS=no else echo "Please type 'yes' to continue or 'no' to exit." - fi - read CURLCONTINUE + fi + read INSTALL done - if [[ "$CURLCONTINUE" == "no" ]]; then + + if [[ $INSTALL == "no" ]]; then echo "Exiting analyst node installation." exit 0 fi - else - echo "We were able to curl https://securityonionsolutions.com/." - sleep 3 + + # Add workstation pillar to the minion's pillar file + printf '%s\n'\ + "workstation:"\ + " gui:"\ + " enabled: true"\ + "" >> "$pillar_file" + echo "Applying the workstation state. This could take some time since there are many packages that need to be installed." + if salt-call state.apply workstation -linfo queue=True; then # make sure the state ran successfully + echo "" + echo "Analyst workstation has been installed!" + echo "Press ENTER to reboot or Ctrl-C to cancel." + read pause + + reboot; + else + echo "There was an issue applying the workstation state. Please review the log above or at /opt/so/logs/salt/minion." + fi + else # workstation is already added + echo "The workstation pillar already exists in $pillar_file." + echo "To enable/disable the gui, set 'workstation:gui:enabled' to true or false in $pillar_file." + echo "Additional documentation can be found at $doc_workstation_url." fi - -# Install a GUI text editor -yum -y install gedit - -# Install misc utils -yum -y install wget curl unzip epel-release yum-plugin-versionlock; - -# Install xWindows -yum -y groupinstall "X Window System"; -yum -y install gnome-classic-session gnome-terminal nautilus-open-terminal control-center liberation-mono-fonts; -unlink /etc/systemd/system/default.target; -ln -sf /lib/systemd/system/graphical.target /etc/systemd/system/default.target; -yum -y install file-roller - -# Install Mono - prereq for NetworkMiner -yum -y install mono-core mono-basic mono-winforms expect - -# Install NetworkMiner -yum -y install libcanberra-gtk2; -wget https://www.netresec.com/?download=NetworkMiner -O /tmp/nm.zip; -mkdir -p /opt/networkminer/ -unzip /tmp/nm.zip -d /opt/networkminer/; -rm /tmp/nm.zip; -mv /opt/networkminer/NetworkMiner_*/* /opt/networkminer/ -chmod +x /opt/networkminer/NetworkMiner.exe; -chmod -R go+w /opt/networkminer/AssembledFiles/; -chmod -R go+w /opt/networkminer/Captures/; -# Create networkminer shim -cat << EOF >> /bin/networkminer -#!/bin/bash -/bin/mono /opt/networkminer/NetworkMiner.exe --noupdatecheck "\$@" -EOF -chmod +x /bin/networkminer -# Convert networkminer ico file to png format -yum -y install ImageMagick -convert /opt/networkminer/networkminericon.ico /opt/networkminer/networkminericon.png -# Create menu entry -cat << EOF >> /usr/share/applications/networkminer.desktop -[Desktop Entry] -Name=NetworkMiner -Comment=NetworkMiner -Encoding=UTF-8 -Exec=/bin/networkminer %f -Icon=/opt/networkminer/networkminericon-4.png -StartupNotify=true -Terminal=false -X-MultipleArgs=false -Type=Application -MimeType=application/x-pcap; -Categories=Network; -EOF - -# Set default monospace font to Liberation -cat << EOF >> /etc/fonts/local.conf - - - monospace - - - Liberation Mono - - -EOF - -# Install Wireshark for Gnome -yum -y install wireshark-gnome; - -# Install dnsiff -yum -y install dsniff; - -# Install hping3 -yum -y install hping3; - -# Install netsed -yum -y install netsed; - -# Install ngrep -yum -y install ngrep; - -# Install scapy -yum -y install python36-scapy; - -# Install ssldump -yum -y install ssldump; - -# Install tcpdump -yum -y install tcpdump; - -# Install tcpflow -yum -y install tcpflow; - -# Install tcpxtract -yum -y install tcpxtract; - -# Install whois -yum -y install whois; - -# Install foremost -yum -y install https://forensics.cert.org/centos/cert/7/x86_64//foremost-1.5.7-13.1.el7.x86_64.rpm; - -# Install chromium -yum -y install chromium; - -# Install tcpstat -yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcpstat-1.5.0/securityonion-tcpstat-1.5.0.rpm; - -# Install tcptrace -yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcptrace-6.6.7/securityonion-tcptrace-6.6.7.rpm; - -# Install sslsplit -yum -y install libevent; -yum -y install sslsplit; - -# Install Bit-Twist -yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-bittwist-2.0.0/securityonion-bittwist-2.0.0.rpm; - -# Install chaosreader -yum -y install perl-IO-Compress perl-Net-DNS; -yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-chaosreader-0.95.10/securityonion-chaosreader-0.95.10.rpm; -chmod +x /bin/chaosreader; - -if [ -f ../../files/analyst/README ]; then - cp ../../files/analyst/README /; - cp ../../files/analyst/so-wallpaper.jpg /usr/share/backgrounds/; - cp ../../files/analyst/so-lockscreen.jpg /usr/share/backgrounds/; - cp ../../files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/; -else - cp /opt/so/saltstack/default/salt/common/files/analyst/README /; - cp /opt/so/saltstack/default/salt/common/files/analyst/so-wallpaper.jpg /usr/share/backgrounds/; - cp /opt/so/saltstack/default/salt/common/files/analyst/so-lockscreen.jpg /usr/share/backgrounds/; - cp /opt/so/saltstack/default/salt/common/files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/; +else # if the pillar file doesn't exist + echo "Could not find $pillar_file and add the workstation pillar." fi -# Set background wallpaper -cat << EOF >> /etc/dconf/db/local.d/00-background -# Specify the dconf path -[org/gnome/desktop/background] +{#- if this is not a manager #} +{% else -%} -# Specify the path to the desktop background image file -picture-uri='file:///usr/share/backgrounds/so-wallpaper.jpg' -# Specify one of the rendering options for the background image: -# 'none', 'wallpaper', 'centered', 'scaled', 'stretched', 'zoom', 'spanned' -picture-options='zoom' -# Specify the left or top color when drawing gradients or the solid color -primary-color='000000' -# Specify the right or bottom color when drawing gradients -secondary-color='FFFFFF' -EOF +echo "Since this is not a manager, the pillar values to enable analyst workstation must be set manually. Please view the documentation at $doc_workstation_url." -# Set lock screen -cat << EOF >> /etc/dconf/db/local.d/00-screensaver -[org/gnome/desktop/session] -idle-delay=uint32 180 +{#- endif if this is a manager #} +{% endif -%} -[org/gnome/desktop/screensaver] -lock-enabled=true -lock-delay=uint32 120 -picture-options='zoom' -picture-uri='file:///usr/share/backgrounds/so-lockscreen.jpg' -EOF +{#- if not CentOS #} +{%- else %} -cat << EOF >> /etc/dconf/db/local.d/locks/screensaver -/org/gnome/desktop/session/idle-delay -/org/gnome/desktop/screensaver/lock-enabled -/org/gnome/desktop/screensaver/lock-delay -EOF +echo "The Analyst Workstation can only be installed on CentOS. Please view the documentation at $doc_workstation_url." -# Do not show the user list at login screen -cat << EOF >> /etc/dconf/db/local.d/00-login-screen -[org/gnome/login-screen] -logo='/usr/share/pixmaps/so-login-logo-dark.svg' -disable-user-list=true -EOF +{#- endif grains.os == CentOS #} +{% endif -%} -dconf update; - -echo -echo "Analyst workstation has been installed!" -echo "Press ENTER to reboot or Ctrl-C to cancel." -read pause - -reboot; +exit 0 diff --git a/salt/common/tools/sbin/so-common b/salt/common/tools/sbin/so-common index a7677a754..7b5f29c00 100755 --- a/salt/common/tools/sbin/so-common +++ b/salt/common/tools/sbin/so-common @@ -120,6 +120,30 @@ check_elastic_license() { fi } +check_salt_master_status() { + local timeout=$1 + echo "Checking if we can talk to the salt master" + salt-call state.show_top concurrent=true + + return +} + +check_salt_minion_status() { + local timeout=$1 + echo "Checking if the salt minion will respond to jobs" >> "$setup_log" 2>&1 + salt "$MINION_ID" test.ping -t $timeout > /dev/null 2>&1 + local status=$? + if [ $status -gt 0 ]; then + echo " Minion did not respond" >> "$setup_log" 2>&1 + else + echo " Received job response from salt minion" >> "$setup_log" 2>&1 + fi + + return $status +} + + + copy_new_files() { # Copy new files over to the salt dir cd $UPDATE_DIR @@ -367,6 +391,7 @@ run_check_net_err() { exit $exit_code fi } + set_cron_service_name() { if [[ "$OS" == "centos" ]]; then cron_service_name="crond" diff --git a/salt/common/tools/sbin/so-playbook-sync b/salt/common/tools/sbin/so-playbook-sync index c63ee38ad..c2d20766e 100755 --- a/salt/common/tools/sbin/so-playbook-sync +++ b/salt/common/tools/sbin/so-playbook-sync @@ -18,7 +18,7 @@ . /usr/sbin/so-common # Check to see if we are already running -IS_RUNNING=$(ps aux | pgrep -f "so-playbook-sync" | wc -l) -[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - Multiple Playbook Sync processes already running...exiting." && exit 0 +NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-playbook-sync") +[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING Playbook sync processes running...exiting." && exit 0 docker exec so-soctopus python3 playbook_play-sync.py diff --git a/salt/common/tools/sbin/so-saltstack-update b/salt/common/tools/sbin/so-saltstack-update index e68af9a7d..81b00ace5 100755 --- a/salt/common/tools/sbin/so-saltstack-update +++ b/salt/common/tools/sbin/so-saltstack-update @@ -32,11 +32,17 @@ copy_new_files() { # Copy new files over to the salt dir cd /tmp/sogh/securityonion git checkout $BRANCH + VERSION=$(cat VERSION) + # We need to overwrite if there is a repo file + if [ -d /opt/so/repo ]; then + tar -czf /opt/so/repo/"$VERSION".tar.gz -C "$(pwd)/.." . + fi rsync -a salt $default_salt_dir/ rsync -a pillar $default_salt_dir/ chown -R socore:socore $default_salt_dir/salt chown -R socore:socore $default_salt_dir/pillar chmod 755 $default_salt_dir/pillar/firewall/addfirewall.sh + rm -rf /tmp/sogh } diff --git a/salt/common/tools/sbin/so-sensor-clean b/salt/common/tools/sbin/so-sensor-clean index 3b871ad80..624ff8106 100755 --- a/salt/common/tools/sbin/so-sensor-clean +++ b/salt/common/tools/sbin/so-sensor-clean @@ -115,8 +115,8 @@ clean() { } # Check to see if we are already running -IS_RUNNING=$(ps aux | pgrep -f "so-sensor-clean" | wc -l) -[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0 +NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-sensor-clean") +[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0 if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do diff --git a/salt/common/tools/sbin/soup b/salt/common/tools/sbin/soup index 5fbb1771f..fe84bf383 100755 --- a/salt/common/tools/sbin/soup +++ b/salt/common/tools/sbin/soup @@ -34,7 +34,15 @@ check_err() { local err_msg="Unhandled error occured, please check $SOUP_LOG for details." [[ $ERR_HANDLED == true ]] && exit $exit_code + if [[ $exit_code -ne 0 ]]; then + + set +e + systemctl_func "start" "$cron_service_name" + systemctl_func "start" "salt-master" + systemctl_func "start" "salt-minion" + enable_highstate + printf '%s' "Soup failed with error $exit_code: " case $exit_code in 2) @@ -91,10 +99,7 @@ check_err() { if [[ $exit_code -ge 64 && $exit_code -le 113 ]]; then echo "$err_msg" fi - set +e - systemctl_func "start" "$cron_service_name" - echo "Ensuring highstate is enabled." - salt-call state.enable highstate --local + exit $exit_code fi @@ -366,6 +371,12 @@ clone_to_tmp() { fi } +enable_highstate() { + echo "Enabling highstate." + salt-call state.enable highstate -l info --local + echo "" +} + generate_and_clean_tarballs() { local new_version new_version=$(cat $UPDATE_DIR/VERSION) @@ -411,6 +422,7 @@ preupgrade_changes() { [[ "$INSTALLEDVERSION" == 2.3.80 ]] && up_to_2.3.90 [[ "$INSTALLEDVERSION" == 2.3.90 || "$INSTALLEDVERSION" == 2.3.91 ]] && up_to_2.3.100 [[ "$INSTALLEDVERSION" == 2.3.100 ]] && up_to_2.3.110 + [[ "$INSTALLEDVERISON" == 2.3.110 ]] && up_to_2.3.120 true } @@ -424,6 +436,8 @@ postupgrade_changes() { [[ "$POSTVERSION" == 2.3.60 || "$POSTVERSION" == 2.3.61 || "$POSTVERSION" == 2.3.70 || "$POSTVERSION" == 2.3.80 ]] && post_to_2.3.90 [[ "$POSTVERSION" == 2.3.90 || "$POSTVERSION" == 2.3.91 ]] && post_to_2.3.100 [[ "$POSTVERSION" == 2.3.100 ]] && post_to_2.3.110 + [[ "$POSTVERSION" == 2.3.110 ]] && post_to_2.3.120 + true } @@ -487,15 +501,23 @@ post_to_2.3.110() { POSTVERSION=2.3.110 } +post_to_2.3.120() { + echo "Post Processing for 2.3.120" + POSTVERSION=2.3.120 + sed -i '/so-thehive-es/d;/so-thehive/d;/so-cortex/d' /opt/so/conf/so-status/so-status.conf +} + + + stop_salt_master() { # kill all salt jobs across the grid because the hang indefinitely if they are queued and salt-master restarts set +e echo "" echo "Killing all Salt jobs across the grid." - salt \* saltutil.kill_all_jobs + salt \* saltutil.kill_all_jobs >> $SOUP_LOG 2>&1 echo "" echo "Killing any queued Salt jobs on the manager." - pkill -9 -ef "/usr/bin/python3 /bin/salt" + pkill -9 -ef "/usr/bin/python3 /bin/salt" >> $SOUP_LOG 2>&1 set -e echo "" @@ -723,9 +745,6 @@ up_to_2.3.90() { up_to_2.3.100() { fix_wazuh - echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again." - rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched - echo "Adding receiver hostgroup with so-firewall" if so-firewall addhostgroup receiver 2>&1 | grep -q 'Already exists'; then echo 'receiver hostgroup already exists' @@ -738,11 +757,16 @@ up_to_2.3.100() { } up_to_2.3.110() { - echo "Updating to Security Onion 2.3.110" - echo "Updating shard settings for Elasticsearch index templates" sed -i 's|shards|index_template:\n template:\n settings:\n index:\n number_of_shards|g' /opt/so/saltstack/local/pillar/global.sls } +up_to_2.3.120() { + # Stop thehive services since these will be broken in .120 + so-thehive-stop + so-thehive-es-stop + so-cortex-stop + } + verify_upgradespace() { CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//') if [ "$CURRENTSPACE" -lt "10" ]; then @@ -857,7 +881,7 @@ upgrade_salt() { echo "" set +e run_check_net_err \ - "sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -F -M -x python3 stable \"$NEWSALTVERSION\"" \ + "sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -r -F -M -x python3 stable \"$NEWSALTVERSION\"" \ "Could not update salt, please check $SOUP_LOG for details." set -e echo "Applying apt hold for Salt." @@ -866,11 +890,29 @@ upgrade_salt() { apt-mark hold "salt-master" apt-mark hold "salt-minion" fi + + echo "Checking if Salt was upgraded." + echo "" + # Check that Salt was upgraded + SALTVERSIONPOSTUPGRADE=$(salt --versions-report | grep Salt: | awk '{print $2}') + if [[ "$SALTVERSIONPOSTUPGRADE" != "$NEWSALTVERSION" ]]; then + echo "Salt upgrade failed. Check of indicators of failure in $SOUP_LOG." + echo "Once the issue is resolved, run soup again." + echo "Exiting." + echo "" + exit 0 + else + echo "Salt upgrade success." + echo "" + echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again." + rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched + fi + } update_repo() { - echo "Performing repo changes." if [[ "$OS" == "centos" ]]; then + echo "Performing repo changes." # Import GPG Keys gpg_rpm_import echo "Disabling fastestmirror." @@ -890,6 +932,21 @@ update_repo() { yum clean all yum repolist fi + elif [[ "$OS" == "ubuntu" ]]; then + ubuntu_version=$(grep VERSION_ID /etc/os-release | awk -F '[ "]' '{print $2}') + + if grep -q "UBUNTU_CODENAME=bionic" /etc/os-release; then + OSVER=bionic + elif grep -q "UBUNTU_CODENAME=focal" /etc/os-release; then + OSVER=focal + else + echo "We do not support your current version of Ubuntu." + exit 1 + fi + + rm -f /etc/apt/sources.list.d/salt.list + echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt $OSVER main" > /etc/apt/sources.list.d/saltstack.list + apt-get update fi } @@ -922,6 +979,8 @@ verify_latest_update_script() { apply_hotfix() { if [[ "$INSTALLEDVERSION" == "2.3.90" ]] ; then fix_wazuh + elif [[ "$INSTALLEDVERSION" == "2.3.110" ]] ; then + 2_3_10_hotfix_1 else echo "No actions required. ($INSTALLEDVERSION/$HOTFIXVERSION)" fi @@ -943,6 +1002,28 @@ fix_wazuh() { fi } +#upgrade salt to 3004.1 +2_3_10_hotfix_1() { + systemctl_func "stop" "$cron_service_name" + # update mine items prior to stopping salt-minion and salt-master + update_salt_mine + stop_salt_minion + stop_salt_master + update_repo + # Does salt need upgraded. If so update it. + if [[ $UPGRADESALT -eq 1 ]]; then + echo "Upgrading Salt" + # Update the repo files so it can actually upgrade + upgrade_salt + fi + rm -f /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdbmod.py.patched /opt/so/state/influxdb_retention_policy.py.patched + systemctl_func "start" "salt-master" + salt-call state.apply salt.python3-influxdb -l info + systemctl_func "start" "salt-minion" + systemctl_func "start" "$cron_service_name" + +} + main() { trap 'check_err $?' EXIT @@ -954,6 +1035,17 @@ main() { echo "### Preparing soup at $(date) ###" echo "" + set_os + set_cron_service_name + if ! check_salt_master_status; then + echo "Could not talk to salt master" + echo "Please run 'systemctl status salt-master' to ensure the salt-master service is running and check the log at /opt/so/log/salt/master." + echo "SOUP will now attempt to start the salt-master service and exit." + exit 1 + fi + + echo "This node can communicate with the salt-master." + echo "Checking to see if this is a manager." echo "" require_manager @@ -989,8 +1081,6 @@ main() { echo "Verifying we have the latest soup script." verify_latest_update_script echo "" - set_os - set_cron_service_name set_palette check_elastic_license echo "" @@ -1012,12 +1102,19 @@ main() { upgrade_check_salt set -e + if [[ $is_airgap -eq 0 ]]; then + update_centos_repo + yum clean all + check_os_updates + fi + if [ "$is_hotfix" == "true" ]; then echo "Applying $HOTFIXVERSION hotfix" copy_new_files apply_hotfix echo "Hotfix applied" update_version + enable_highstate salt-call state.highstate -l info queue=True else echo "" @@ -1032,9 +1129,6 @@ main() { echo "Updating dockers to $NEWVERSION." if [[ $is_airgap -eq 0 ]]; then airgap_update_dockers - update_centos_repo - yum clean all - check_os_updates # if not airgap but -f was used elif [[ ! -z "$ISOLOC" ]]; then airgap_update_dockers @@ -1057,21 +1151,6 @@ main() { echo "Upgrading Salt" # Update the repo files so it can actually upgrade upgrade_salt - - echo "Checking if Salt was upgraded." - echo "" - # Check that Salt was upgraded - SALTVERSIONPOSTUPGRADE=$(salt --versions-report | grep Salt: | awk '{print $2}') - if [[ "$SALTVERSIONPOSTUPGRADE" != "$NEWSALTVERSION" ]]; then - echo "Salt upgrade failed. Check of indicators of failure in $SOUP_LOG." - echo "Once the issue is resolved, run soup again." - echo "Exiting." - echo "" - exit 0 - else - echo "Salt upgrade success." - echo "" - fi fi preupgrade_changes @@ -1127,9 +1206,7 @@ main() { echo "" fi - echo "Enabling highstate." - salt-call state.enable highstate -l info --local - echo "" + enable_highstate echo "" echo "Running a highstate. This could take several minutes." diff --git a/salt/elasticsearch/defaults.yaml b/salt/elasticsearch/defaults.yaml index 2ca2f463e..830d1372c 100644 --- a/salt/elasticsearch/defaults.yaml +++ b/salt/elasticsearch/defaults.yaml @@ -4064,7 +4064,7 @@ elasticsearch: field: "@timestamp" order: desc refresh_interval: 30s - number_of_shards: 1 + number_of_shards: 2 number_of_replicas: 0 composed_of: - agent-mappings diff --git a/salt/elasticsearch/files/ingest/rita.beacon b/salt/elasticsearch/files/ingest/rita.beacon new file mode 100644 index 000000000..ab53be763 --- /dev/null +++ b/salt/elasticsearch/files/ingest/rita.beacon @@ -0,0 +1,127 @@ +{ + "description": "RITA Beacons", + "processors": [ + { + "set": { + "field": "_index", + "value": "so-rita", + "override": true + } + }, + { + "csv": { + "field": "message", + "target_fields": [ + "beacon.score", + "source.ip", + "destination.ip", + "network.connections", + "network.average_bytes", + "beacon.interval.range", + "beacon.size.range", + "beacon.interval.top", + "beacon.size.top", + "beacon.interval.top_count", + "beacon.size.top_count", + "beacon.interval.skew", + "beacon.size.skew", + "beacon.interval.dispersion", + "beacon.size.dispersion", + "network.bytes" + ] + } + }, + { + "convert": { + "field": "beacon.score", + "type": "float" + } + }, + { + "convert": { + "field": "network.connections", + "type": "integer" + } + }, + { + "convert": { + "field": "network.average_bytes", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.interval.range", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.size.range", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.interval.top", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.size.top", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.interval.top_count", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.size.top_count", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.interval.skew", + "type": "float" + } + }, + { + "convert": { + "field": "beacon.size.skew", + "type": "float" + } + }, + { + "convert": { + "field": "beacon.interval.dispersion", + "type": "integer" + } + }, + { + "convert": { + "field": "beacon.size.dispersion", + "type": "integer" + } + }, + { + "convert": { + "field": "network.bytes", + "type": "integer" + } + }, + { "set": { "if": "ctx.beacon?.score == 1", "field": "dataset", "value": "alert", "override": true }}, + { "set": { "if": "ctx.beacon?.score == 1", "field": "rule.name", "value": "Potential C2 Beacon Activity", "override": true }}, + { "set": { "if": "ctx.beacon?.score == 1", "field": "event.severity", "value": 3, "override": true }}, + { + "pipeline": { + "name": "common" + } + } + ] +} diff --git a/salt/elasticsearch/files/ingest/rita.connection b/salt/elasticsearch/files/ingest/rita.connection new file mode 100644 index 000000000..58cc921f4 --- /dev/null +++ b/salt/elasticsearch/files/ingest/rita.connection @@ -0,0 +1,36 @@ +{ + "description": "RITA Connections", + "processors": [ + { + "set": { + "field": "_index", + "value": "so-rita", + "override": true + } + }, + { + "dissect": { + "field": "message", + "pattern": "%{source.ip},%{destination.ip},%{network.port}:%{network.protocol}:%{network.service},%{connection.duration},%{connection.state}" + } + }, + { + "convert": { + "field": "connection.duration", + "type": "float" + } + }, + { + "set": { + "field": "event.duration", + "value": "{{ connection.duration }}", + "override": true + } + }, + { + "pipeline": { + "name": "common" + } + } + ] +} diff --git a/salt/elasticsearch/files/ingest/rita.dns b/salt/elasticsearch/files/ingest/rita.dns new file mode 100644 index 000000000..7583bc320 --- /dev/null +++ b/salt/elasticsearch/files/ingest/rita.dns @@ -0,0 +1,39 @@ +{ + "description": "RITA DNS", + "processors": [ + { + "set": { + "field": "_index", + "value": "so-rita", + "override": true + } + }, + { + "csv": { + "field": "message", + "target_fields": [ + "dns.question.name", + "dns.question.subdomain_count", + "dns.question.count" + ] + } + }, + { + "convert": { + "field": "dns.question.subdomain_count", + "type": "integer" + } + }, + { + "convert": { + "field": "dns.question.count", + "type": "integer" + } + }, + { + "pipeline": { + "name": "common" + } + } + ] +} diff --git a/salt/elasticsearch/files/ingest/syslog b/salt/elasticsearch/files/ingest/syslog index bf40efec5..8919c3e1f 100644 --- a/salt/elasticsearch/files/ingest/syslog +++ b/salt/elasticsearch/files/ingest/syslog @@ -1,36 +1,157 @@ { - "description" : "syslog", + "description" : "syslog pipeline", "processors" : [ { - "dissect": { - "field": "message", - "pattern" : "%{message}", - "on_failure": [ { "drop" : { } } ] - }, - "remove": { - "field": [ "type", "agent" ], - "ignore_failure": true - } + "dissect": { + "field": "message", + "pattern" : "%{message}", + "on_failure": [ { "drop" : { } } ] + }, + "remove": { + "field": [ "type", "agent" ], + "ignore_failure": true + } + }, { + "grok": { + "field": "message", + "patterns": [ + "^<%{INT:syslog.priority:int}>%{TIMESTAMP_ISO8601:syslog.timestamp} +%{IPORHOST:syslog.host} +%{PROG:syslog.program}(?:\\[%{POSINT:syslog.pid:int}\\])?: %{GREEDYDATA:real_message}$", + + "^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$", + + "^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$" + ], + "ignore_failure": true + } }, { - "grok": - { - "field": "message", - "patterns": [ - "^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$", - "^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$" - ], - "ignore_failure": true - } + "convert" : { + "if": "ctx?.syslog?.priority != null", + "field" : "syslog.priority", + "type": "integer" + } }, - { "set": { "if": "ctx.source?.application == 'filterlog'", "field": "dataset", "value": "firewall", "ignore_failure": true } }, - { "set": { "if": "ctx.vendor != null", "field": "module", "value": "{{ vendor }}", "ignore_failure": true } }, - { "set": { "if": "ctx.product != null", "field": "dataset", "value": "{{ product }}", "ignore_failure": true } }, - { "set": { "field": "event.ingested", "value": "{{ @timestamp }}" } }, - { "date": { "if": "ctx.syslog?.timestamp != null", "field": "syslog.timestamp", "target_field": "@timestamp", "formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"], "ignore_failure": true } }, - { "remove": { "field": ["pid", "program"], "ignore_missing": true, "ignore_failure": true } }, - { "pipeline": { "if": "ctx.vendor != null && ctx.product != null", "name": "{{ vendor }}.{{ product }}", "ignore_failure": true } }, - { "pipeline": { "if": "ctx.dataset == 'firewall'", "name": "filterlog", "ignore_failure": true } }, - { "pipeline": { "name": "common" } } + { + "script": { + "description": "Map syslog priority into facility and level", + "lang": "painless", + "params" : { + "level": [ + "emerg", + "alert", + "crit", + "err", + "warn", + "notice", + "info", + "debug" + ], + "facility" : [ + "kern", + "user", + "mail", + "daemon", + "auth", + "syslog", + "lpr", + "news", + "uucp", + "cron", + "authpriv", + "ftp", + "ntp", + "security", + "console", + "solaris-cron", + "local0", + "local1", + "local2", + "local3", + "local4", + "local5", + "local6", + "local7" + ] + }, + "source": "if (ctx['syslog'] != null && ctx['syslog']['priority'] != null) { int p = ctx['syslog']['priority']; int f = p / 8; int l = p - (f * 8); ctx['syslog']['facility_label'] = [ : ]; ctx['syslog']['severity_label'] = [ : ]; ctx['syslog'].put('severity', l); ctx['syslog'].put('severity_label', params.level[l].toUpperCase()); ctx['syslog'].put('facility', f); ctx['syslog'].put('facility_label', params.facility[f].toUpperCase()); }" + + } + }, + { + "set": { + "if": "ctx.syslog?.host != null", + "field": "host.name", + "value": "{{ syslog.host }}", + "ignore_failure": true + } + }, { + "set": { + "if": "ctx.syslog?.program != null", + "field": "process.name", + "value": "{{ syslog.program }}", + "ignore_failure": true + } + }, { + "set": { + "if": "ctx.syslog?.pid != null", + "field": "process.id", + "value": "{{ syslog.pid }}", + "ignore_failure": true + } + }, { + "set": { + "if": "ctx.source?.application == 'filterlog'", + "field": "dataset", + "value": "firewall", + "ignore_failure": true + } + }, { + "set": { + "if": "ctx.vendor != null", + "field": "module", + "value": "{{ vendor }}", + "ignore_failure": true + } + }, { + "set": { + "if": "ctx.product != null", + "field": "dataset", + "value": "{{ product }}", + "ignore_failure": true + } + }, { + "set": { + "field": "ingest.timestamp", + "value": "{{ @timestamp }}" + } + }, { + "date": { + "if": "ctx.syslog?.timestamp != null", + "field": "syslog.timestamp", + "target_field": "@timestamp", + "formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"], + "ignore_failure": true + } + }, { + "remove": { + "field": ["pid", "program"], + "ignore_missing": true, + "ignore_failure": true + } + }, { + "pipeline": { + "if": "ctx.vendor != null && ctx.product != null", + "name": "{{ vendor }}.{{ product }}", + "ignore_failure": true + } + }, { + "pipeline": { + "if": "ctx.dataset == 'firewall'", + "name": "filterlog", + "ignore_failure": true + } + }, { + "pipeline": { "name": "common" } + } ] } diff --git a/salt/elasticsearch/files/log4j2.properties b/salt/elasticsearch/files/log4j2.properties index 85cf5d8fb..014fa61a1 100644 --- a/salt/elasticsearch/files/log4j2.properties +++ b/salt/elasticsearch/files/log4j2.properties @@ -11,10 +11,17 @@ appender.rolling.name = rolling appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log appender.rolling.layout.type = PatternLayout appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n -appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log +appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log.gz appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy appender.rolling.policies.time.interval = 1 appender.rolling.policies.time.modulate = true +appender.rolling.strategy.type = DefaultRolloverStrategy +appender.rolling.strategy.action.type = Delete +appender.rolling.strategy.action.basepath = /var/log/elasticsearch +appender.rolling.strategy.action.condition.type = IfFileName +appender.rolling.strategy.action.condition.glob = *.gz +appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified +appender.rolling.strategy.action.condition.nested_condition.age = 7D rootLogger.level = info rootLogger.appenderRef.rolling.ref = rolling diff --git a/salt/elasticsearch/templates/component/so/dtc-process-mappings.json b/salt/elasticsearch/templates/component/so/dtc-process-mappings.json index 2b8d8abfb..d3d22139a 100644 --- a/salt/elasticsearch/templates/component/so/dtc-process-mappings.json +++ b/salt/elasticsearch/templates/component/so/dtc-process-mappings.json @@ -60,6 +60,32 @@ }, "type": "wildcard" }, + "entity_id": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "executable": { + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + }, + "ignore_above": 1024, + "type": "keyword" + }, "name": { "fields": { "keyword": { @@ -73,6 +99,133 @@ "ignore_above": 1024, "type": "keyword" }, + "parent": { + "properties": { + "command_line": { + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "text": { + "type": "match_only_text" + }, + "keyword": { + "type": "keyword" + } + }, + "type": "wildcard" + }, + "entity_id": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "executable": { + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + }, + "ignore_above": 1024, + "type": "keyword" + } + } + }, + "pe": { + "properties": { + "architecture": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "company": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "description": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "file_version": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "original_file_name": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + }, + "product": { + "ignore_above": 1024, + "type": "keyword", + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + } + } + } + }, "pid": { "type": "long", "fields": { @@ -88,6 +241,19 @@ "type": "keyword" } } + }, + "working_directory": { + "fields": { + "security": { + "type": "text", + "analyzer": "es_security_analyzer" + }, + "keyword": { + "type": "keyword" + } + }, + "ignore_above": 1024, + "type": "keyword" } } } diff --git a/salt/elasticsearch/tools/sbin/so-elasticsearch-roles-load b/salt/elasticsearch/tools/sbin/so-elasticsearch-roles-load index c407ac183..7ce907f87 100755 --- a/salt/elasticsearch/tools/sbin/so-elasticsearch-roles-load +++ b/salt/elasticsearch/tools/sbin/so-elasticsearch-roles-load @@ -33,6 +33,8 @@ while [[ "$COUNT" -le 240 ]]; do if [ $? -eq 0 ]; then ELASTICSEARCH_CONNECTED="yes" echo "connected!" + # Check cluster health once connected + so-elasticsearch-query _cluster/health?wait_for_status=yellow > /dev/null 2>&1 break else ((COUNT+=1)) diff --git a/salt/filebeat/etc/filebeat.yml b/salt/filebeat/etc/filebeat.yml index 3ac570e89..fb8f3768c 100644 --- a/salt/filebeat/etc/filebeat.yml +++ b/salt/filebeat/etc/filebeat.yml @@ -10,6 +10,7 @@ {%- set ZEEKVER = salt['pillar.get']('global:mdengine', 'COMMUNITY') %} {%- set WAZUHENABLED = salt['pillar.get']('global:wazuh', '0') %} {%- set STRELKAENABLED = salt['pillar.get']('strelka:enabled', '0') %} +{%- set RITAENABLED = salt['pillar.get']('rita:enabled', False) -%} {%- set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) -%} {%- set FLEETNODE = salt['pillar.get']('global:fleet_node', False) -%} {%- set FBMEMEVENTS = salt['pillar.get']('filebeat:mem_events', 2048) -%} @@ -267,6 +268,54 @@ filebeat.inputs: {%- endif %} +{%- if RITAENABLED %} +- type: filestream + paths: + - /nsm/rita/beacons.csv + exclude_lines: ['^Score', '^Source', '^Domain', '^No results'] + fields: + module: rita + dataset: beacon + category: network + processors: + - drop_fields: + fields: ["source", "prospector", "input", "offset", "beat"] + fields_under_root: true + pipeline: "rita.beacon" + index: "so-rita" + +- type: filestream + paths: + - /nsm/rita/long-connections.csv + - /nsm/rita/open-connections.csv + exclude_lines: ['^Source', '^No results'] + fields: + module: rita + dataset: connection + category: network + processors: + - drop_fields: + fields: ["source", "prospector", "input", "offset", "beat"] + fields_under_root: true + pipeline: "rita.connection" + index: "so-rita" + +- type: filestream + paths: + - /nsm/rita/exploded-dns.csv + exclude_lines: ['^Domain', '^No results'] + fields: + module: rita + dataset: dns + category: network + processors: + - drop_fields: + fields: ["source", "prospector", "input", "offset", "beat"] + fields_under_root: true + pipeline: "rita.dns" + index: "so-rita" +{%- endif %} + {%- if grains['role'] in ['so-eval', 'so-standalone', 'so-manager', 'so-managersearch', 'so-import'] %} - type: filestream paths: diff --git a/salt/firewall/assigned_hostgroups.map.yaml b/salt/firewall/assigned_hostgroups.map.yaml index 9e105e567..257c45808 100644 --- a/salt/firewall/assigned_hostgroups.map.yaml +++ b/salt/firewall/assigned_hostgroups.map.yaml @@ -349,6 +349,9 @@ role: osquery_endpoint: portgroups: - {{ portgroups.fleet_api }} + strelka_frontend: + portgroups: + - {{ portgroups.strelka_frontend }} syslog: portgroups: - {{ portgroups.syslog }} @@ -482,6 +485,9 @@ role: self: portgroups: - {{ portgroups.syslog}} + strelka_frontend: + portgroups: + - {{ portgroups.strelka_frontend }} INPUT: hostgroups: anywhere: @@ -511,6 +517,9 @@ role: self: portgroups: - {{ portgroups.syslog}} + strelka_frontend: + portgroups: + - {{ portgroups.strelka_frontend }} INPUT: hostgroups: anywhere: diff --git a/salt/idh/init.sls b/salt/idh/init.sls index 089ecc4df..70a5d370d 100644 --- a/salt/idh/init.sls +++ b/salt/idh/init.sls @@ -19,11 +19,37 @@ {% set VERSION = salt['pillar.get']('global:soversion', 'HH1.2.2') %} {% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %} {% set MANAGER = salt['grains.get']('master') %} +{% set MAININT = salt['pillar.get']('host:mainint') %} +{% set MAINIP = salt['grains.get']('ip_interfaces').get(MAININT)[0] %} +{% set RESTRICTIDHSERVICES = salt['pillar.get']('idh:restrict_management_ip', False) %} include: - idh.openssh.config + - firewall -# IDH State + +# If True, block IDH Services from accepting connections on Managment IP +{% if RESTRICTIDHSERVICES %} + {% from 'idh/opencanary_config.map.jinja' import OPENCANARYCONFIG %} + {% set idh_services = salt['pillar.get']('idh:services', []) %} + + {% for service in idh_services %} + {% if service in ["smnp","ntp", "tftp"] %} + {% set proto = 'udp' %} + {% else %} + {% set proto = 'tcp' %} + {% endif %} +block_mgt_ip_idh_services_{{ proto }}_{{ OPENCANARYCONFIG[service~'.port'] }} : + iptables.insert: + - table: filter + - chain: INPUT + - jump: DROP + - position: 1 + - proto: {{ proto }} + - dport: {{ OPENCANARYCONFIG[service~'.port'] }} + - destination: {{ MAINIP }} + {% endfor %} +{% endif %} # Create a config directory temp: diff --git a/salt/influxdb/defaults.yaml b/salt/influxdb/defaults.yaml index 205c2ba67..c89257c53 100644 --- a/salt/influxdb/defaults.yaml +++ b/salt/influxdb/defaults.yaml @@ -1,4 +1,4 @@ -{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log') %} +{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log', shell='/bin/bash') %} influxdb: retention_policies: diff --git a/salt/kibana/bin/so-kibana-config-load b/salt/kibana/bin/so-kibana-config-load index d1ac21533..4d7c69da1 100644 --- a/salt/kibana/bin/so-kibana-config-load +++ b/salt/kibana/bin/so-kibana-config-load @@ -59,7 +59,7 @@ update() { IFS=$'\r\n' GLOBIGNORE='*' command eval 'LINES=($(cat $1))' for i in "${LINES[@]}"; do - RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/8.1.0" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ") + RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/8.2.0" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ") echo $RESPONSE; if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi done diff --git a/salt/kibana/files/config_saved_objects.ndjson b/salt/kibana/files/config_saved_objects.ndjson index d844e2e44..a0765d57a 100644 --- a/salt/kibana/files/config_saved_objects.ndjson +++ b/salt/kibana/files/config_saved_objects.ndjson @@ -1 +1 @@ -{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "8.1.0","id": "8.1.0","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="} +{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "8.2.0","id": "8.2.0","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="} diff --git a/salt/logstash/etc/log4j2.properties b/salt/logstash/etc/log4j2.properties index 73a646b8d..739756061 100644 --- a/salt/logstash/etc/log4j2.properties +++ b/salt/logstash/etc/log4j2.properties @@ -18,7 +18,7 @@ appender.rolling.name = rolling appender.rolling.fileName = /var/log/logstash/logstash.log appender.rolling.layout.type = PatternLayout appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n -appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log +appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log.gz appender.rolling.policies.type = Policies appender.rolling.policies.time.type = TimeBasedTriggeringPolicy appender.rolling.policies.time.interval = 1 @@ -27,7 +27,7 @@ appender.rolling.strategy.type = DefaultRolloverStrategy appender.rolling.strategy.action.type = Delete appender.rolling.strategy.action.basepath = /var/log/logstash appender.rolling.strategy.action.condition.type = IfFileName -appender.rolling.strategy.action.condition.glob = logstash-*.log +appender.rolling.strategy.action.condition.glob = *.gz appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified appender.rolling.strategy.action.condition.nested_condition.age = 7D rootLogger.level = info diff --git a/salt/logstash/pipelines/config/so/9801_output_rita.conf.jinja b/salt/logstash/pipelines/config/so/9801_output_rita.conf.jinja new file mode 100644 index 000000000..40c6ad33c --- /dev/null +++ b/salt/logstash/pipelines/config/so/9801_output_rita.conf.jinja @@ -0,0 +1,22 @@ +{%- if grains['role'] == 'so-eval' -%} +{%- set ES = salt['pillar.get']('manager:mainip', '') -%} +{%- else %} +{%- set ES = salt['pillar.get']('elasticsearch:mainip', '') -%} +{%- endif %} +{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %} +{%- set ES_PASS = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %} +output { + if [module] =~ "rita" and "import" not in [tags] { + elasticsearch { + pipeline => "%{module}.%{dataset}" + hosts => "{{ ES }}" +{% if salt['pillar.get']('elasticsearch:auth:enabled') is sameas true %} + user => "{{ ES_USER }}" + password => "{{ ES_PASS }}" +{% endif %} + index => "so-rita" + ssl => true + ssl_certificate_verification => false + } + } +} diff --git a/salt/repo/client/centos.sls b/salt/repo/client/centos.sls new file mode 100644 index 000000000..160782267 --- /dev/null +++ b/salt/repo/client/centos.sls @@ -0,0 +1,98 @@ +{% from 'repo/client/map.jinja' import ABSENTFILES with context %} +{% from 'repo/client/map.jinja' import REPOPATH with context %} +{% set ISAIRGAP = salt['pillar.get']('global:airgap', False) %} +{% set managerupdates = salt['pillar.get']('global:managerupdate', 0) %} +{% set role = grains.id.split('_') | last %} + +# from airgap state +{% if ISAIRGAP and grains.os == 'CentOS' %} +{% set MANAGER = salt['grains.get']('master') %} +airgapyum: + file.managed: + - name: /etc/yum/yum.conf + - source: salt://repo/client/files/centos/airgap/yum.conf + +airgap_repo: + pkgrepo.managed: + - humanname: Airgap Repo + - baseurl: https://{{ MANAGER }}/repo + - gpgcheck: 0 + - sslverify: 0 + +{% endif %} + +# from airgap and common +{% if ABSENTFILES|length > 0%} + {% for file in ABSENTFILES %} +{{ file }}: + file.absent: + - name: {{ REPOPATH }}{{ file }} + - onchanges_in: + - cmd: cleanyum + {% endfor %} +{% endif %} + +# from common state +# Remove default Repos +{% if grains['os'] == 'CentOS' %} +repair_yumdb: + cmd.run: + - name: 'mv -f /var/lib/rpm/__db* /tmp && yum clean all' + - onlyif: + - 'yum check-update 2>&1 | grep "Error: rpmdb open failed"' + +crsynckeys: + file.recurse: + - name: /etc/pki/rpm_gpg + - source: salt://repo/client/files/centos/keys/ + +{% if not ISAIRGAP %} + {% if role in ['eval', 'standalone', 'import', 'manager', 'managersearch'] or managerupdates == 0 %} +remove_securityonionrepocache: + file.absent: + - name: /etc/yum.repos.d/securityonioncache.repo + {% endif %} + + {% if role not in ['eval', 'standalone', 'import', 'manager', 'managersearch'] and managerupdates == 1 %} +remove_securityonionrepo: + file.absent: + - name: /etc/yum.repos.d/securityonion.repo + {% endif %} + +crsecurityonionrepo: + file.managed: + {% if role in ['eval', 'standalone', 'import', 'manager', 'managersearch'] or managerupdates == 0 %} + - name: /etc/yum.repos.d/securityonion.repo + - source: salt://repo/client/files/centos/securityonion.repo + {% else %} + - name: /etc/yum.repos.d/securityonioncache.repo + - source: salt://repo/client/files/centos/securityonioncache.repo + {% endif %} + - mode: 644 + +yumconf: + file.managed: + - name: /etc/yum.conf + - source: salt://repo/client/files/centos/yum.conf.jinja + - mode: 644 + - template: jinja + - show_changes: False + +cleanairgap: + file.absent: + - name: /etc/yum.repos.d/airgap_repo.repo +{% endif %} + +cleanyum: + cmd.run: + - name: 'yum clean metadata' + - onchanges: +{% if ISAIRGAP %} + - file: airgapyum + - pkgrepo: airgap_repo +{% else %} + - file: crsecurityonionrepo + - file: yumconf +{% endif %} + +{% endif %} diff --git a/salt/repo/client/init.sls b/salt/repo/client/init.sls index 160782267..154867caf 100644 --- a/salt/repo/client/init.sls +++ b/salt/repo/client/init.sls @@ -1,98 +1,2 @@ -{% from 'repo/client/map.jinja' import ABSENTFILES with context %} -{% from 'repo/client/map.jinja' import REPOPATH with context %} -{% set ISAIRGAP = salt['pillar.get']('global:airgap', False) %} -{% set managerupdates = salt['pillar.get']('global:managerupdate', 0) %} -{% set role = grains.id.split('_') | last %} - -# from airgap state -{% if ISAIRGAP and grains.os == 'CentOS' %} -{% set MANAGER = salt['grains.get']('master') %} -airgapyum: - file.managed: - - name: /etc/yum/yum.conf - - source: salt://repo/client/files/centos/airgap/yum.conf - -airgap_repo: - pkgrepo.managed: - - humanname: Airgap Repo - - baseurl: https://{{ MANAGER }}/repo - - gpgcheck: 0 - - sslverify: 0 - -{% endif %} - -# from airgap and common -{% if ABSENTFILES|length > 0%} - {% for file in ABSENTFILES %} -{{ file }}: - file.absent: - - name: {{ REPOPATH }}{{ file }} - - onchanges_in: - - cmd: cleanyum - {% endfor %} -{% endif %} - -# from common state -# Remove default Repos -{% if grains['os'] == 'CentOS' %} -repair_yumdb: - cmd.run: - - name: 'mv -f /var/lib/rpm/__db* /tmp && yum clean all' - - onlyif: - - 'yum check-update 2>&1 | grep "Error: rpmdb open failed"' - -crsynckeys: - file.recurse: - - name: /etc/pki/rpm_gpg - - source: salt://repo/client/files/centos/keys/ - -{% if not ISAIRGAP %} - {% if role in ['eval', 'standalone', 'import', 'manager', 'managersearch'] or managerupdates == 0 %} -remove_securityonionrepocache: - file.absent: - - name: /etc/yum.repos.d/securityonioncache.repo - {% endif %} - - {% if role not in ['eval', 'standalone', 'import', 'manager', 'managersearch'] and managerupdates == 1 %} -remove_securityonionrepo: - file.absent: - - name: /etc/yum.repos.d/securityonion.repo - {% endif %} - -crsecurityonionrepo: - file.managed: - {% if role in ['eval', 'standalone', 'import', 'manager', 'managersearch'] or managerupdates == 0 %} - - name: /etc/yum.repos.d/securityonion.repo - - source: salt://repo/client/files/centos/securityonion.repo - {% else %} - - name: /etc/yum.repos.d/securityonioncache.repo - - source: salt://repo/client/files/centos/securityonioncache.repo - {% endif %} - - mode: 644 - -yumconf: - file.managed: - - name: /etc/yum.conf - - source: salt://repo/client/files/centos/yum.conf.jinja - - mode: 644 - - template: jinja - - show_changes: False - -cleanairgap: - file.absent: - - name: /etc/yum.repos.d/airgap_repo.repo -{% endif %} - -cleanyum: - cmd.run: - - name: 'yum clean metadata' - - onchanges: -{% if ISAIRGAP %} - - file: airgapyum - - pkgrepo: airgap_repo -{% else %} - - file: crsecurityonionrepo - - file: yumconf -{% endif %} - -{% endif %} +include: + - repo.client.{{grains.os | lower}} diff --git a/salt/repo/client/ubuntu.sls b/salt/repo/client/ubuntu.sls new file mode 100644 index 000000000..301bdabae --- /dev/null +++ b/salt/repo/client/ubuntu.sls @@ -0,0 +1,20 @@ +# this removes the repo file left by bootstrap-salt.sh without -r +remove_salt.list: + file.absent: + - name: /etc/apt/sources.list.d/salt.list + +saltstack.list: + file.managed: + - name: /etc/apt/sources.list.d/saltstack.list + - contents: + - deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/{{grains.osrelease}}/amd64/salt/ {{grains.oscodename}} main + +apt_update: + cmd.run: + - name: apt-get update + - onchanges: + - file: saltstack.list + - timeout: 30 + - retry: + attempts: 5 + interval: 30 diff --git a/salt/salt/map.jinja b/salt/salt/map.jinja index b66ec23eb..389a95607 100644 --- a/salt/salt/map.jinja +++ b/salt/salt/map.jinja @@ -11,6 +11,7 @@ {% set PYTHON3INFLUX= 'influxdb == ' ~ PYTHONINFLUXVERSION %} {% set PYTHON3INFLUXDEPS= ['certifi', 'chardet', 'python-dateutil', 'pytz', 'requests'] %} {% set PYTHONINSTALLER = 'pip' %} + {% set SYSTEMD_UNIT_FILE = '/lib/systemd/system/salt-minion.service' %} {% else %} {% set SPLITCHAR = '-' %} {% set SALTNOTHELD = salt['cmd.run']('yum versionlock list | grep -q salt ; echo $?', python_shell=True) %} @@ -21,6 +22,7 @@ {% set PYTHON3INFLUX= 'securityonion-python3-influxdb' %} {% set PYTHON3INFLUXDEPS= ['python36-certifi', 'python36-chardet', 'python36-dateutil', 'python36-pytz', 'python36-requests'] %} {% set PYTHONINSTALLER = 'pkg' %} + {% set SYSTEMD_UNIT_FILE = '/usr/lib/systemd/system/salt-minion.service' %} {% endif %} {% set INSTALLEDSALTVERSION = salt['pkg.version']('salt-minion').split(SPLITCHAR)[0] %} @@ -29,7 +31,7 @@ {% if grains.os|lower in ['centos', 'redhat'] %} {% set UPGRADECOMMAND = 'yum clean all ; /usr/sbin/bootstrap-salt.sh -s 120 -r -F -x python3 stable ' ~ SALTVERSION %} {% elif grains.os|lower == 'ubuntu' %} - {% set UPGRADECOMMAND = '/usr/sbin/bootstrap-salt.sh -s 120 -F -x python3 stable ' ~ SALTVERSION %} + {% set UPGRADECOMMAND = '/usr/sbin/bootstrap-salt.sh -s 120 -r -F -x python3 stable ' ~ SALTVERSION %} {% endif %} {% else %} {% set UPGRADECOMMAND = 'echo Already running Salt Minion version ' ~ SALTVERSION %} diff --git a/salt/salt/master.defaults.yaml b/salt/salt/master.defaults.yaml index 8d5e85e15..a07f22865 100644 --- a/salt/salt/master.defaults.yaml +++ b/salt/salt/master.defaults.yaml @@ -2,4 +2,4 @@ # When updating the salt version, also update the version in securityonion-builds/images/iso-task/Dockerfile and saltify function in so-functions salt: master: - version: 3004 + version: 3004.1 diff --git a/salt/salt/minion.defaults.yaml b/salt/salt/minion.defaults.yaml index ef7bfe37c..68e044db8 100644 --- a/salt/salt/minion.defaults.yaml +++ b/salt/salt/minion.defaults.yaml @@ -2,6 +2,6 @@ # When updating the salt version, also update the version in securityonion-builds/images/iso-task/Dockerfile and saltify function in so-functions salt: minion: - version: 3004 + version: 3004.1 check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default service_start_delay: 30 # in seconds. diff --git a/salt/salt/minion.sls b/salt/salt/minion.sls index cf26c1249..15e203d82 100644 --- a/salt/salt/minion.sls +++ b/salt/salt/minion.sls @@ -3,6 +3,7 @@ {% from 'salt/map.jinja' import INSTALLEDSALTVERSION %} {% from 'salt/map.jinja' import SALTNOTHELD %} {% from 'salt/map.jinja' import SALTPACKAGES %} +{% from 'salt/map.jinja' import SYSTEMD_UNIT_FILE %} {% import_yaml 'salt/minion.defaults.yaml' as SALTMINION %} {% set service_start_delay = SALTMINION.salt.minion.service_start_delay %} @@ -31,6 +32,22 @@ install_salt_minion: exec 1>&- # close stdout exec 2>&- # close stderr nohup /bin/sh -c '{{ UPGRADECOMMAND }}' & + + {# if we are the salt master #} + {% if grains.id.split('_')|first == grains.master %} +remove_influxdb_continuous_query_state_file: + file.absent: + - name: /opt/so/state/influxdb_continuous_query.py.patched + +remove_influxdbmod_state_file: + file.absent: + - name: /opt/so/state/influxdbmod.py.patched + +remove_influxdb_retention_policy_state_file: + file.absent: + - name: /opt/so/state/influxdb_retention_policy.py.patched + {% endif %} + {% endif %} {% if INSTALLEDSALTVERSION|string == SALTVERSION|string %} @@ -66,7 +83,7 @@ set_log_levels: salt_minion_service_unit_file: file.managed: - - name: /etc/systemd/system/multi-user.target.wants/salt-minion.service + - name: {{ SYSTEMD_UNIT_FILE }} - source: salt://salt/service/salt-minion.service.jinja - template: jinja - defaults: @@ -94,6 +111,7 @@ salt_minion_service: - file: set_log_levels - file: salt_minion_service_unit_file {% endif %} + - order: last patch_pkg: diff --git a/salt/sensoroni/files/analyzers/README.md b/salt/sensoroni/files/analyzers/README.md new file mode 100644 index 000000000..fe311725a --- /dev/null +++ b/salt/sensoroni/files/analyzers/README.md @@ -0,0 +1,248 @@ +# Security Onion Analyzers + +Security Onion provides a means for performing data analysis on varying inputs. This data can be any data of interest sourced from event logs. Examples include hostnames, IP addresses, file hashes, URLs, etc. The analysis is conducted by one or more analyzers that understand that type of input. Analyzers come with the default installation of Security Onion. However, it is also possible to add additional analyzers to extend the analysis across additional areas or data types. + +## Supported Observable Types +The built-in analyzers support the following observable types: + +| Name | Domain | Hash | IP | JA3 | Mail | Other | URI | URL | User Agent | +| ------------------------|--------|-------|-------|-------|-------|-------|-------|-------|------------ +| Alienvault OTX |✓ |✓|✓|✗|✗|✗|✗|✓|✗| +| EmailRep |✗ |✗|✗|✗|✓|✗|✗|✗|✗| +| Greynoise |✗ |✗|✓|✗|✗|✗|✗|✗|✗| +| JA3er |✗ |✗|✗|✓|✗|✗|✗|✗|✗| +| LocalFile |✓ |✓|✓|✓|✗|✓|✗|✓|✗| +| Pulsedive |✓ |✓|✓|✗|✗|✗|✓|✓|✓| +| Spamhaus |✗ |✗|✓|✗|✗|✗|✗|✗|✗| +| Urlhaus |✗ |✗|✗|✗|✗|✗|✗|✓|✗| +| Urlscan |✗ |✗|✗|✗|✗|✗|✗|✓|✗| +| Virustotal |✓ |✓|✓|✗|✗|✗|✗|✓|✗| + +## Developer Guide + +### Python + +Analyzers are Python modules, and can be made up of a single .py script, for simpler analyzers, or a complex set of scripts organized within nested directories. + +The Python language was chosen because of it's wide adoption in the security industry, ease of development and testing, and the abundance of developers with Python skills. + +Specifically, analyzers must be compatible with Python 3.10. + +For more information about Python, see the [Python Documentation](https://docs.python.org). + +### Development + +Custom analyzers should be developed outside of the Security Onion cluster, in a proper software development environment, with version control or other backup mechanisms in place. The analyzer can be developed, unit tested, and integration tested without the need for a Security Onion installation. Once satisifed with the analyzer functionality the analyzer directory should be copied to the Security Onion manager node. + +Developing an analyzer directly on a Security Onion manager node is strongly discouraged, as loss of source code (and time and effort) can occur, should the management node suffer a catastrophic failure with disk storage loss. + +For best results, avoid long, complicated functions in favor of short, discrete functions. This has several benefits: + +- Easier to troubleshoot +- Easier to maintain +- Easier to unit test +- Easier for other developers to review + +### Linting + +Source code should adhere to the [PEP 8 - Style Guide for Python Code](https://peps.python.org/pep-0008/). Developers can use the default configuration of `flake8` to validate conformance, or run the included `build.sh` inside the analyzers directory. Note that linting conformance is mandatory for analyzers that are contributed back to the Security Onion project. + +### Testing + +Python's [unitest](https://docs.python.org/3/library/unittest.html) library can be used for covering analyzer code with unit tests. Unit tests are encouraged for custom analyzers, and mandatory for public analyzers submitted back to the Security Onion project. + +If you are new to unit testing, please see the included `urlhaus_test.py` as an example. + +Unit tests should be named following the pattern `_test.py`. + + +### Analyzer Package Structure + +Delpoyment of a custom analyzer entails copying the analyzer source directory and depenency wheel archives to the Security Onion manager node. The destination locations can be found inside the `securityonion` salt source directory tree. Using the [Saltstack](https://github.com/saltstack/salt) directory pattern allows Security Onion developers to add their own analyzers with minimal additional effort needed to upgrade to newer versions of Security Onion. When the _sensoroni_ salt state executes it will merge the default analyzers with any local analyzers, and copy the merged analyzers into the `/opt/so/conf/sensoroni` directory. + +Do not modify files in the `/opt/so/conf/sensoroni` directory! This is a generated directory and changes made inside will be automatically erased on a frequent interval. + +On a Security Onion manager, custom analyzers should be placed inside the `/opt/so/saltstack/local/salt/sensoroni` directory, as described in the next section. + +#### Directory Tree + +From within the default saltstack directory, the following files and directories exist: + +``` +salt + |- sensoroni + |- files + |- analyzers + |- urlhaus <- Example of an existing analyzer + | |- source-packages <- Contains wheel package bundles for this analyzer's dependencies + | |- site-packages <- Auto-generated site-packages directory (or used for custom dependencies) + | |- requirements.txt <- List of all dependencies needed for this analyzer + | |- urlhaus.py <- Source code for the analyzer + | |- urlhaus_test.py <- Unit tests for the analyzer source code + | |- urlhaus.json <- Metadata for the analyzer + | |- __init__.py <- Package initialization file, often empty + | + |- build.sh <- Simple CI tool for validating linting and unit tests + |- helpers.py <- Common functions shared by many analyzers + |- helpers_test.py <- Unit tests for the shared source code + |- pytest.ini <- Configuration options for the flake8 and pytest + |- README.md <- The file you are currently reading +``` + +Custom analyzers should conform to this same structure, but instead of being placed in the `/opt/so/saltstack/default` directory tree, they should be placed in the `/opt/so/saltstack/local` directory tree. This ensures future Security Onion upgrades will not overwrite customizations. Shared files like `build.sh` and `helpers.py` do not need to be duplicated. They can remain in the _default_ directory tree. Only new or modified files should exist in the _local_ directory tree. + +#### Metadata + +Each analyzer has certain metadata that helps describe the function of the analyzer, required inputs, artifact compatibility, optional configuration options, analyzer version, and other important details of the analyzer. This file is a static file and is not intended to be used for dynamic or custom configuration options. It should only be modified by the author of the analyzer. + +The following example describes the urlhaus metadata content: + +``` +{ + "name": "Urlhaus", <- Unique human-friendly name of this analyzer + "version": "0.1", <- The version of the analyzer + "author": "Security Onion Solutions", <- Author's name, and/or email or other contact information + "description": "This analyzer queries URLHaus...", <- A brief, concise description of the analyzer + "supportedTypes" : ["url"], <- List of types that must match the SOC observable types + "baseUrl": "https://urlhaus-api.abuse.ch/v1/url/" <- Optional hardcoded data used by the analyzer +} +``` + +The `supportedTypes` values should only contain the types that this analyzer can work with. In the case of the URLHaus analyzer, we know that it works with URLs. So adding "hash" to this list wouldn't make sense, since URLHaus doesn't provide information about file hashes. If an analyzer does not support a particular type then it will not show up in the analyzer results in SOC for that observable being analyzed. This is intentional, to eliminate unnecessary screen clutter in SOC. To find a list of available values for the `supportedTypes` field, login to SOC and inside of a Case, click the + button on the Observables tab. You will see a list of types and each of those can be used in this metadata field, when applicable to the analyzer. + +#### Dependencies + +Analyzers will often require the use of third-party packages. For example, if an analyzer needs to make a request to a remote server via HTTPS, then the `requests` package will likely be used. Each analyzer will container a `requirements.txt` file, in which all third-party dependencies can be specified, following the python [Requirements File Specification](https://pip.pypa.io/en/stable/reference/requirements-file-format/). + +Additionally, to support airgapped users, the dependency packages themselves, and any transitive dependencies, should be placed inside the `source-packages` directory. To obtain the full hierarchy of dependencies, execute the following commands: + +```bash +pip download -r /requirements.txt -d /source-packages +``` + + +### Analyzer Architecture + +The Sensoroni Docker container is responsible for executing analyzers. Only the manager's Sensoroni container will process analyzer jobs. Other nodes in the grid, such as sensors and search nodes, will not be assigned analyzer jobs. + +When the Sensoroni Docker container starts, the `/opt/so/conf/sensoroni/analyzer` directory is mapped into the container. The initialization of the Sensoroni Analyze module will scan that directory for any subdirectories. Valid subdirectories will be added as an available analyzer. + +The analyzer itself will only run when a user in SOC enqueues an analyzer job, such as via the Cases -> Observables tab. When the Sensoroni node is ready to run the job it will execute the python command interpretor separately for each loaded analyzer. The command line resembles the following: + +```bash +python -m urlhaus '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}' +``` + +It is up to each analyzer to determine whether the provided input is compatible with that analyzer. This is assisted by the analyzer metadata, as described earlier in this document, with the use of the `supportedTypes` list. + +Once the analyzer completes its functionality, it must terminate promptly. See the following sections for more details on expected internal behavior of the analyzer. + +#### Configuration + +Analyzers may need dynamic configuration data, such as credentials or other secrets, in order to complete their function. Optional configuration files can provide this information, and are expected to reside in the analyzer's directory. Configuration files are typically written in YAML syntax for ease of modification. + +Configuration files for analyzers included with Security Onion will be pillarized, meaning they derive their custom values from the Saltstack pillar data. For example, an analyzer that requires a user supplied credential might contain a config file resembling the following, where Jinja templating syntax is used to extra Salt pillar data: + +```yaml +username: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:username', '') }} +password: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:password', '') }} +``` + +Sensoroni will not provide any inputs to the analyzer during execution, other than the artifact input in JSON format. However, developers will likely need to test the analyzer outside of Sensoroni and without Jinja templating, therefore an alternate config file should normally be supplied as the configuration argument during testing. Analyzers should allow for this additional command line argument, but by default should automatically read a configuration file stored in the analyzer's directory. + +#### Exit Code + +If an analyzer determines it cannot or should not operate on the input then the analyzer should return an exit code of `126`. + +If an analyzer does attempt to operate against the input then the exit code should be 0, regardless of the outcome. The outcome, be it an error, a confirmed threat detection, or perhaps an unknown outcome, should be noted in the output of the analyzer. + +#### Output + +The outcome of the analyzer is reflected in the analyzer's output to `stdout`. The output must be JSON formatted, and should contain the following fields. + +`summary`: A very short summarization of the outcome. This should be under 50 characters, otherwise it will be truncated when displayed on the Analyzer job list. + +`status`: Can be one of the following status values, which most appropriately reflects the outcome: +- `ok`: The analyzer has concluded that the provided input is not a known threat. +- `info`: This analyzer provides informative data, but does not attempt to conclude the input is a threat. +- `caution`: The data provided is inconclusive. Analysts should review this information further. This can be used in error scenarios, such as if the analyzer fails to complete, perhaps due to a remote service being offline. +- `threat`: The analyzer has detected that the input is likely related to a threat. + +`error`: [Optional] If the analyzer encounters an unrecoverable error, those details, useful for administrators to troubleshoot the problem, should be placed in this field. + +Additional fields are allowed, and should contain data that is specific to the analyzer. + +Below is an example of a _urlhaus_ analyzer output. Note that the urlhaus raw JSON is added to a custom field called "response". + +```json +{ + "response": { + "blacklists": { + "spamhaus_dbl": "not listed", + "surbl": "not listed" + }, + "date_added": "2022-04-07 12:39:14 UTC", + "host": "abeibaba.com", + "id": "2135795", + "larted": "false", + "last_online": null, + "payloads": null, + "query_status": "ok", + "reporter": "switchcert", + "tags": [ + "Flubot" + ], + "takedown_time_seconds": null, + "threat": "malware_download", + "url": "https://abeibaba.com/ian/?redacted", + "url_status": "offline", + "urlhaus_reference": "https://urlhaus.abuse.ch/url/2135795/" + }, + "status": "threat", + "summary": "malware_download" +} +``` + +Users in SOC will be able to view the entire JSON output, therefore it is important that sensitive information, such as credentials or other secrets, is excluded from the output. + +#### Internationalization + +Some of the built-in analyzers use snake_case summary values, instead of human friendly words or phrases. These are identifiers that the SOC UI will use to lookup a localized translation for the user. The use of these identifiers is not required for custom analyzers. In fact, in order for an identifier to be properly localized the translations must exist in the SOC product, which is out of scope of this development guide. That said, the following generic translations might be useful for custom analyzers: + +| Identifier | English | +| ------------------ | -------------------------- | +| `malicious` | Malicious | +| `suspicious` | Suspicious | +| `harmless` | Harmless | +| `internal_failure` | Analyzer Internal Failure | +| `timeout` | Remote Host Timed Out | + +#### Timeout + +It is expected that analyzers will finish quickly, but there is a default timeout in place that will abort the analyzer if the timeout is exceeded. By default that timeout is 15 minutes (900000 milliseconds), but can be customized via the `sensoroni:analyze_timeout_ms` salt pillar. + + +## Contributing + +Review the Security Onion project [contribution guidelines](https://github.com/Security-Onion-Solutions/securityonion/blob/master/CONTRIBUTING.md) if you are considering contributing an analyzer to the Security Onion project. + +#### Procedure + +In order to make a custom analyzer into a permanent Security Onion analyzer, the following steps need to be taken: + +1. Fork the [securityonion GitHub repository](https://github.com/Security-Onion-Solutions/securityonion) +2. Copy your custom analyzer directory to the forked project, under the `securityonion/salt/sensoroni/files/analyzers` directory. +3. Ensure the contribution requirements in the following section are met. +4. Submit a [pull request](https://github.com/Security-Onion-Solutions/securityonion/pulls) to merge your GitHub fork back into the `securityonion` _dev_ branch. + +#### Requirements + +The following requirements must be satisfied in order for analyzer pull requests to be accepted into the Security Onion GitHub project: + +- Analyzer contributions must not contain licensed dependencies or source code that is incompatible with the [GPLv2 licensing](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html). +- All source code must pass the `flake8` lint check. This ensures source code conforms to the same style guides as the other analyzers. The Security Onion project will automatically run the linter after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed lint checks will result in the submitter being sent an automated email message. +- All source code must include accompanying unit test coverage. The Security Onion project will automatically run the unit tests after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed unit tests, or insufficient unit test coverage, will result in the submitter being sent an automated email message. +- Documentation of the analyzer, its input requirements, conditions for operation, and other relevant information must be clearly written in an accompanying analyzer metadata file. This file is described in more detail earlier in this document. +- Source code must be well-written and be free of security defects that can put users or their data at unnecessary risk. + + diff --git a/salt/sensoroni/files/analyzers/build.sh b/salt/sensoroni/files/analyzers/build.sh new file mode 100755 index 000000000..17e53c6a7 --- /dev/null +++ b/salt/sensoroni/files/analyzers/build.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +HOME_DIR=$(dirname "$0") +TARGET_DIR=${1:-.} + +PATH=$PATH:/usr/local/bin + +if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then + echo "Missing dependencies. Consider running the following command:" + echo " python -m pip install flake8 pytest pytest-cov" + exit 1 +fi + +flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini" +pytest "$TARGET_DIR" "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100 diff --git a/salt/sensoroni/files/analyzers/emailrep/__init__.py b/salt/sensoroni/files/analyzers/emailrep/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep.json b/salt/sensoroni/files/analyzers/emailrep/emailrep.json new file mode 100644 index 000000000..cfd0656d8 --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep.json @@ -0,0 +1,7 @@ +{ + "name": "EmailRep", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries the EmailRep API for email address reputation information", + "supportedTypes" : ["email", "mail"] +} diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep.py b/salt/sensoroni/files/analyzers/emailrep/emailrep.py new file mode 100755 index 000000000..4e9a8fee9 --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep.py @@ -0,0 +1,67 @@ +import json +import os +import sys +import requests +import helpers +import argparse + + +def checkConfigRequirements(conf): + if "api_key" not in conf: + sys.exit(126) + else: + return True + + +def sendReq(conf, meta, email): + url = conf['base_url'] + email + headers = {"Key": conf['api_key']} + response = requests.request('GET', url=url, headers=headers) + return response.json() + + +def prepareResults(raw): + if "suspicious" in raw: + if raw['suspicious'] is True: + status = "caution" + summary = "suspicious" + elif raw['suspicious'] is False: + status = "ok" + summary = "harmless" + elif "status" in raw: + if raw["reason"] == "invalid email": + status = "caution" + summary = "Invalid email address." + if "exceeded daily limit" in raw["reason"]: + status = "caution" + summary = "Exceeded daily request limit." + else: + status = "caution" + summary = "internal_failure" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = sendReq(conf, meta, data["value"]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/emailrep.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep.yaml b/salt/sensoroni/files/analyzers/emailrep/emailrep.yaml new file mode 100644 index 000000000..360fdc540 --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep.yaml @@ -0,0 +1,2 @@ +base_url: https://emailrep.io/ +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:emailrep:api_key', '') }}" diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py b/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py new file mode 100644 index 000000000..ecd6010b0 --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py @@ -0,0 +1,85 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from emailrep import emailrep +import unittest + + +class TestEmailRepMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + emailrep.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('emailrep.emailrep.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + emailrep.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_checkConfigRequirements_not_present(self): + conf = {"not_a_file_path": "blahblah"} + with self.assertRaises(SystemExit) as cm: + emailrep.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "https://myurl/", "api_key": "abcd1234"} + email = "test@abc.com" + response = emailrep.sendReq(conf=conf, meta=meta, email=email) + mock.assert_called_once_with("GET", headers={"Key": "abcd1234"}, url="https://myurl/test@abc.com") + self.assertIsNotNone(response) + + def test_prepareResults_invalidEmail(self): + raw = {"status": "fail", "reason": "invalid email"} + results = emailrep.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Invalid email address.") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_not_suspicious(self): + raw = {"email": "notsus@domain.com", "reputation": "high", "suspicious": False, "references": 21, "details": {"blacklisted": False, "malicious_activity": False, "profiles": ["twitter"]}} + results = emailrep.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_suspicious(self): + raw = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}} + results = emailrep.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_exceeded_limit(self): + raw = {"status": "fail", "reason": "exceeded daily limit. please wait 24 hrs or visit emailrep.io/key for an api key."} + results = emailrep.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Exceeded daily request limit.") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_error(self): + raw = {} + results = emailrep.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}} + artifactInput = '{"value":"sus@domain.com","artifactType":"email"}' + conf = {"base_url": "myurl/", "api_key": "abcd1234"} + with patch('emailrep.emailrep.sendReq', new=MagicMock(return_value=output)) as mock: + results = emailrep.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "suspicious") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/emailrep/requirements.txt b/salt/sensoroni/files/analyzers/emailrep/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/__init__.py b/salt/sensoroni/files/analyzers/greynoise/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise.json b/salt/sensoroni/files/analyzers/greynoise/greynoise.json new file mode 100644 index 000000000..76cef3324 --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise.json @@ -0,0 +1,7 @@ +{ + "name": "Greynoise IP Analyzer", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries Greynoise for context around an IP address", + "supportedTypes" : ["ip"] +} diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise.py b/salt/sensoroni/files/analyzers/greynoise/greynoise.py new file mode 100755 index 000000000..deeef5414 --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise.py @@ -0,0 +1,78 @@ +import json +import os +import sys +import requests +import helpers +import argparse + + +def checkConfigRequirements(conf): + if "api_key" not in conf or len(conf['api_key']) == 0: + sys.exit(126) + else: + return True + + +def sendReq(conf, meta, ip): + url = conf['base_url'] + if conf['api_version'] == 'community': + url = url + 'v3/community/' + ip + elif conf['api_version'] == 'investigate' or 'automate': + url = url + 'v2/noise/context/' + ip + headers = {"key": conf['api_key']} + response = requests.request('GET', url=url, headers=headers) + return response.json() + + +def prepareResults(raw): + if "message" in raw: + if "Success" in raw["message"]: + if "classification" in raw: + if "benign" in raw['classification']: + status = "ok" + summary = "harmless" + elif "malicious" in raw['classification']: + status = "threat" + summary = "malicious" + elif "unknown" in raw['classification']: + status = "caution" + summary = "Results found." + elif "IP not observed scanning the internet or contained in RIOT data set." in raw["message"]: + status = "ok" + summary = "no_results" + elif "Request is not a valid routable IPv4 address" in raw["message"]: + status = "caution" + summary = "Invalid IP address." + else: + status = "info" + summary = raw["message"] + else: + status = "caution" + summary = "internal_failure" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = sendReq(conf, meta, data["value"]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/greynoise.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise.yaml b/salt/sensoroni/files/analyzers/greynoise/greynoise.yaml new file mode 100644 index 000000000..aee4f961a --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise.yaml @@ -0,0 +1,3 @@ +base_url: https://api.greynoise.io/ +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_key', '') }}" +api_version: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_version', 'community') }}" diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py b/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py new file mode 100644 index 000000000..768107adb --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py @@ -0,0 +1,117 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from greynoise import greynoise +import unittest + + +class TestGreynoiseMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + greynoise.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('greynoise.greynoise.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + greynoise.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_checkConfigRequirements_not_present(self): + conf = {"not_a_file_path": "blahblah"} + with self.assertRaises(SystemExit) as cm: + greynoise.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_sendReq_community(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "community"} + ip = "192.168.1.1" + response = greynoise.sendReq(conf=conf, meta=meta, ip=ip) + mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v3/community/192.168.1.1") + self.assertIsNotNone(response) + + def test_sendReq_investigate(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "investigate"} + ip = "192.168.1.1" + response = greynoise.sendReq(conf=conf, meta=meta, ip=ip) + mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1") + self.assertIsNotNone(response) + + def test_sendReq_automate(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "automate"} + ip = "192.168.1.1" + response = greynoise.sendReq(conf=conf, meta=meta, ip=ip) + mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1") + self.assertIsNotNone(response) + + def test_prepareResults_invalidIP(self): + raw = {"message": "Request is not a valid routable IPv4 address"} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Invalid IP address.") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_not_found(self): + raw = {"ip": "192.190.1.1", "noise": "false", "riot": "false", "message": "IP not observed scanning the internet or contained in RIOT data set."} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_benign(self): + raw = {"ip": "8.8.8.8", "noise": "false", "riot": "true", "classification": "benign", "name": "Google Public DNS", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_malicious(self): + raw = {"ip": "121.142.87.218", "noise": "true", "riot": "false", "classification": "malicious", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_unknown(self): + raw = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_unknown_message(self): + raw = {"message": "unknown"} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "unknown") + self.assertEqual(results["status"], "info") + + def test_prepareResults_error(self): + raw = {} + results = greynoise.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"} + artifactInput = '{"value":"221.4.62.149","artifactType":"ip"}' + conf = {"base_url": "myurl/", "api_key": "abcd1234", "api_version": "community"} + with patch('greynoise.greynoise.sendReq', new=MagicMock(return_value=output)) as mock: + results = greynoise.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "Results found.") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/greynoise/requirements.txt b/salt/sensoroni/files/analyzers/greynoise/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/helpers.py b/salt/sensoroni/files/analyzers/helpers.py new file mode 100644 index 000000000..f4ef4a1e6 --- /dev/null +++ b/salt/sensoroni/files/analyzers/helpers.py @@ -0,0 +1,28 @@ +import json +import os +import sys +import yaml + + +def checkSupportedType(meta, artifact_type): + if artifact_type not in meta['supportedTypes']: + sys.exit(126) + else: + return True + + +def parseArtifact(artifact): + data = json.loads(artifact) + return data + + +def loadMetadata(file): + dir = os.path.dirname(os.path.realpath(file)) + filename = os.path.realpath(file).rsplit('/', 1)[1].split('.')[0] + with open(str(dir + "/" + filename + ".json"), "r") as metafile: + return json.load(metafile) + + +def loadConfig(path): + with open(str(path), "r") as conffile: + return yaml.safe_load(conffile) diff --git a/salt/sensoroni/files/analyzers/helpers_test.py b/salt/sensoroni/files/analyzers/helpers_test.py new file mode 100644 index 000000000..c10ff00d5 --- /dev/null +++ b/salt/sensoroni/files/analyzers/helpers_test.py @@ -0,0 +1,35 @@ +from unittest.mock import patch, MagicMock +import helpers +import os +import unittest + + +class TestHelpersMethods(unittest.TestCase): + + def test_checkSupportedType(self): + with patch('sys.exit', new=MagicMock()) as mock: + meta = {"supportedTypes": ["ip", "foo"]} + result = helpers.checkSupportedType(meta, "ip") + self.assertTrue(result) + mock.assert_not_called() + + result = helpers.checkSupportedType(meta, "bar") + self.assertFalse(result) + mock.assert_called_once_with(126) + + def test_loadMetadata(self): + dir = os.path.dirname(os.path.realpath(__file__)) + input = dir + '/urlhaus/urlhaus.py' + data = helpers.loadMetadata(input) + self.assertEqual(data["name"], "Urlhaus") + + def test_loadConfig(self): + dir = os.path.dirname(os.path.realpath(__file__)) + data = helpers.loadConfig(dir + "/virustotal/virustotal.yaml") + self.assertEqual(data["base_url"], "https://www.virustotal.com/api/v3/search?query=") + + def test_parseArtifact(self): + input = '{"value":"foo","artifactType":"bar"}' + data = helpers.parseArtifact(input) + self.assertEqual(data["artifactType"], "bar") + self.assertEqual(data["value"], "foo") diff --git a/salt/sensoroni/files/analyzers/ja3er/__init__.py b/salt/sensoroni/files/analyzers/ja3er/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er.json b/salt/sensoroni/files/analyzers/ja3er/ja3er.json new file mode 100644 index 000000000..de072d0b7 --- /dev/null +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er.json @@ -0,0 +1,7 @@ +{ + "name": "JA3er Hash Search", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries JA3er user agents and sightings", + "supportedTypes" : ["ja3"] +} diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er.py b/salt/sensoroni/files/analyzers/ja3er/ja3er.py new file mode 100755 index 000000000..330a8dd66 --- /dev/null +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er.py @@ -0,0 +1,53 @@ +import json +import os +import requests +import helpers +import argparse + + +def sendReq(conf, meta, hash): + url = conf['base_url'] + hash + response = requests.request('GET', url) + return response.json() + + +def prepareResults(raw): + if "error" in raw: + if "Sorry" in raw["error"]: + status = "ok" + summary = "No results found." + elif "Invalid hash" in raw["error"]: + status = "caution" + summary = "Invalid hash." + else: + status = "caution" + summary = "internal_failure" + else: + status = "info" + summary = "Results found." + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(conf, input): + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = sendReq(conf, meta, data["value"]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search JA3er for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/ja3er.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er.yaml b/salt/sensoroni/files/analyzers/ja3er/ja3er.yaml new file mode 100644 index 000000000..40d6f64dd --- /dev/null +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er.yaml @@ -0,0 +1 @@ +base_url: https://ja3er.com/search/ diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py b/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py new file mode 100644 index 000000000..8ad22ac69 --- /dev/null +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py @@ -0,0 +1,65 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from ja3er import ja3er +import unittest + + +class TestJa3erMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + ja3er.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('ja3er.ja3er.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + ja3er.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "myurl/"} + hash = "abcd1234" + response = ja3er.sendReq(conf=conf, meta=meta, hash=hash) + mock.assert_called_once_with("GET", "myurl/abcd1234") + self.assertIsNotNone(response) + + def test_prepareResults_none(self): + raw = {"error": "Sorry no values found"} + results = ja3er.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "No results found.") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_invalidHash(self): + raw = {"error": "Invalid hash"} + results = ja3er.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Invalid hash.") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_info(self): + raw = [{"User-Agent": "Blah/5.0", "Count": 24874, "Last_seen": "2022-04-08 16:18:38"}, {"Comment": "Brave browser v1.36.122\n\n", "Reported": "2022-03-28 20:26:42"}] + results = ja3er.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["status"], "info") + + def test_analyze(self): + output = {"info": "Results found."} + artifactInput = '{"value":"abcd1234","artifactType":"ja3"}' + conf = {"base_url": "myurl/"} + with patch('ja3er.ja3er.sendReq', new=MagicMock(return_value=output)) as mock: + results = ja3er.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "Results found.") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/ja3er/requirements.txt b/salt/sensoroni/files/analyzers/ja3er/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/ja3er/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/__init__.py b/salt/sensoroni/files/analyzers/localfile/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/localfile/localfile.json b/salt/sensoroni/files/analyzers/localfile/localfile.json new file mode 100644 index 000000000..5dd379ff4 --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/localfile.json @@ -0,0 +1,7 @@ +{ + "name": "Local File Analyzer", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries one or more local CSV files for a value, then returns all columns within matching rows.", + "supportedTypes" : ["domain", "hash", "ip", "other", "url"] +} diff --git a/salt/sensoroni/files/analyzers/localfile/localfile.py b/salt/sensoroni/files/analyzers/localfile/localfile.py new file mode 100755 index 000000000..745c4b9b6 --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/localfile.py @@ -0,0 +1,79 @@ +import json +import helpers +import os +import sys +import argparse +import csv + + +def checkConfigRequirements(conf): + if "file_path" not in conf or len(conf['file_path']) == 0: + sys.exit(126) + else: + return True + + +def searchFile(artifact, csvfiles): + dir = os.path.dirname(os.path.realpath(__file__)) + found = [] + for f in csvfiles: + filename = dir + "/" + f + with open(filename, "r") as csvfile: + csvdata = csv.DictReader(csvfile) + for row in csvdata: + first_key = list(row.keys())[0] + if artifact in row[first_key]: + row.update({"filename": filename}) + found.append(row) + if len(found) != 0: + if len(found) == 1: + results = found[0] + else: + results = found + else: + results = "No results" + + return results + + +def prepareResults(raw): + if len(raw) > 0: + if "No results" in raw: + status = "ok" + summary = "no_results" + else: + status = "info" + summary = "One or more matches found." + else: + raw = {} + status = "caution" + summary = "internal_failure" + response = raw + results = {'response': response, 'status': status, 'summary': summary} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + search = searchFile(data["value"], conf['file_path']) + results = prepareResults(search) + return results + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search CSV file for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/localfile.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/localfile/localfile.yaml b/salt/sensoroni/files/analyzers/localfile/localfile.yaml new file mode 100644 index 000000000..69740c379 --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/localfile.yaml @@ -0,0 +1 @@ +file_path: [] diff --git a/salt/sensoroni/files/analyzers/localfile/localfile_test.csv b/salt/sensoroni/files/analyzers/localfile/localfile_test.csv new file mode 100644 index 000000000..bf16d3378 --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/localfile_test.csv @@ -0,0 +1,4 @@ +indicator,description,reference +abcd1234,This is a test!,Testing +abcd1234,This is another test!,Testing +192.168.1.1,Yet another test!,Testing diff --git a/salt/sensoroni/files/analyzers/localfile/localfile_test.py b/salt/sensoroni/files/analyzers/localfile/localfile_test.py new file mode 100644 index 000000000..66e4820e1 --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/localfile_test.py @@ -0,0 +1,119 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from localfile import localfile +import unittest + + +class TestLocalfileMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + localfile.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('localfile.localfile.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + localfile.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_checkConfigRequirements_present(self): + conf = {"file_path": "['intel.csv']"} + self.assertTrue(localfile.checkConfigRequirements(conf)) + + def test_checkConfigRequirements_not_present(self): + conf = {"not_a_file_path": "blahblah"} + with self.assertRaises(SystemExit) as cm: + localfile.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_checkConfigRequirements_empty(self): + conf = {"file_path": ""} + with self.assertRaises(SystemExit) as cm: + localfile.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_searchFile_multiple_found(self): + artifact = "abcd1234" + results = localfile.searchFile(artifact, ["localfile_test.csv"]) + self.assertEqual(results[0]["indicator"], "abcd1234") + self.assertEqual(results[0]["description"], "This is a test!") + self.assertEqual(results[0]["reference"], "Testing") + self.assertEqual(results[1]["indicator"], "abcd1234") + self.assertEqual(results[1]["description"], "This is another test!") + + def test_searchFile_single_found(self): + artifact = "192.168.1.1" + results = localfile.searchFile(artifact, ["localfile_test.csv"]) + self.assertEqual(results["indicator"], "192.168.1.1") + self.assertEqual(results["description"], "Yet another test!") + self.assertEqual(results["reference"], "Testing") + + def test_searchFile_not_found(self): + artifact = "youcan'tfindme" + results = localfile.searchFile(artifact, ["localfile_test.csv"]) + self.assertEqual(results, "No results") + + def test_prepareResults_none(self): + raw = "No results" + results = localfile.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_ok(self): + raw = [ + { + "description": "This is one BAD piece of malware!", + "filename": "/opt/sensoroni/analyzers/localfile/intel.csv", + "indicator": "abc1234", + "reference": "https://myintelservice" + }, + { + "filename": "/opt/sensoroni/analyzers/localfile/random.csv", + "randomcol1": "myothervalue", + "randomcol2": "myotherothervalue", + "value": "abc1234" + } + ] + results = localfile.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "One or more matches found.") + self.assertEqual(results["status"], "info") + + def test_prepareResults_error(self): + raw = {} + results = localfile.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = [ + { + "description": "This is one BAD piece of malware!", + "filename": "/opt/sensoroni/analyzers/localfile/intel.csv", + "indicator": "abc1234", + "reference": "https://myintelservice" + }, + { + "filename": "/opt/sensoroni/analyzers/localfile/random.csv", + "randomcol1": "myothervalue", + "randomcol2": "myotherothervalue", + "value": "abc1234" + } + ] + artifactInput = '{"value":"foo","artifactType":"url"}' + conf = {"file_path": "/home/intel.csv"} + with patch('localfile.localfile.searchFile', new=MagicMock(return_value=output)) as mock: + results = localfile.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "One or more matches found.") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/localfile/requirements.txt b/salt/sensoroni/files/analyzers/localfile/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/__init__.py b/salt/sensoroni/files/analyzers/otx/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/otx/otx.json b/salt/sensoroni/files/analyzers/otx/otx.json new file mode 100644 index 000000000..f1cdea4c3 --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/otx.json @@ -0,0 +1,7 @@ +{ + "name": "Alienvault OTX", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries Alienvault OTX for a domain, hash, IP, or URL, then returns a report for it.", + "supportedTypes" : ["domain", "hash", "ip", "url"] +} diff --git a/salt/sensoroni/files/analyzers/otx/otx.py b/salt/sensoroni/files/analyzers/otx/otx.py new file mode 100755 index 000000000..2d4e8e592 --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/otx.py @@ -0,0 +1,88 @@ +import json +import requests +import helpers +import sys +import os +import argparse + + +def buildReq(conf, artifact_type, artifact_value): + headers = {"X-OTX-API-KEY": conf["api_key"]} + base_url = conf['base_url'] + if artifact_type == "ip": + uri = "indicators/IPv4/" + elif artifact_type == "url": + uri = "indicators/url/" + elif artifact_type == "domain": + uri = "indicators/domain/" + elif artifact_type == "hash": + uri = "indicators/file/" + section = "/general" + url = base_url + uri + artifact_value + section + return url, headers + + +def checkConfigRequirements(conf): + if "api_key" not in conf or len(conf['api_key']) == 0: + sys.exit(126) + else: + return True + + +def sendReq(url, headers): + response = requests.request('GET', url, headers=headers) + return response.json() + + +def prepareResults(response): + if len(response) != 0: + raw = response + if 'reputation' in raw: + reputation = raw["reputation"] + if reputation == 0: + status = "ok" + summaryinfo = "harmless" + elif reputation > 0 and reputation < 50: + status = "ok" + summaryinfo = "Likely Harmless" + elif reputation >= 50 and reputation < 75: + status = "caution" + summaryinfo = "suspicious" + elif reputation >= 75 and reputation <= 100: + status = "threat" + summaryinfo = "malicious" + else: + status = "info" + summaryinfo = "Analysis complete." + else: + raw = {} + status = "caution" + summaryinfo = "internal_failure" + results = {'response': raw, 'status': status, 'summary': summaryinfo} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + request = buildReq(conf, data["artifactType"], data["value"]) + response = sendReq(request[0], request[1]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search Alienvault OTX for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/otx.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/otx/otx.yaml b/salt/sensoroni/files/analyzers/otx/otx.yaml new file mode 100644 index 000000000..5b7dfa6a8 --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/otx.yaml @@ -0,0 +1,2 @@ +base_url: https://otx.alienvault.com/api/v1/ +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:otx:api_key', '') }}" diff --git a/salt/sensoroni/files/analyzers/otx/otx_test.py b/salt/sensoroni/files/analyzers/otx/otx_test.py new file mode 100644 index 000000000..6f5764ca4 --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/otx_test.py @@ -0,0 +1,250 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from otx import otx +import unittest + + +class TestOtxMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + otx.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('otx.otx.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + otx.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def checkConfigRequirements(self): + conf = {"not_a_key": "abcd12345"} + with self.assertRaises(SystemExit) as cm: + otx.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_buildReq_domain(self): + conf = {'base_url': 'https://myurl/', 'api_key': 'abcd12345'} + artifact_type = "domain" + artifact_value = "abc.com" + result = otx.buildReq(conf, artifact_type, artifact_value) + self.assertEqual("https://myurl/indicators/domain/abc.com/general", result[0]) + self.assertEqual({'X-OTX-API-KEY': 'abcd12345'}, result[1]) + + def test_buildReq_hash(self): + conf = {'base_url': 'https://myurl/', 'api_key': 'abcd12345'} + artifact_type = "hash" + artifact_value = "abcd1234" + result = otx.buildReq(conf, artifact_type, artifact_value) + self.assertEqual("https://myurl/indicators/file/abcd1234/general", result[0]) + self.assertEqual({'X-OTX-API-KEY': 'abcd12345'}, result[1]) + + def test_buildReq_ip(self): + conf = {'base_url': 'https://myurl/', 'api_key': 'abcd12345'} + artifact_type = "ip" + artifact_value = "192.168.1.1" + result = otx.buildReq(conf, artifact_type, artifact_value) + self.assertEqual("https://myurl/indicators/IPv4/192.168.1.1/general", result[0]) + self.assertEqual({'X-OTX-API-KEY': 'abcd12345'}, result[1]) + + def test_buildReq_url(self): + conf = {'base_url': 'https://myurl/', 'api_key': 'abcd12345'} + artifact_type = "url" + artifact_value = "https://abc.com" + result = otx.buildReq(conf, artifact_type, artifact_value) + self.assertEqual("https://myurl/indicators/url/https://abc.com/general", result[0]) + self.assertEqual({'X-OTX-API-KEY': 'abcd12345'}, result[1]) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + url = "https://myurl=" + response = otx.sendReq(url, headers={"x-apikey": "xyz"}) + mock.assert_called_once_with("GET", "https://myurl=", headers={"x-apikey": "xyz"}) + self.assertIsNotNone(response) + + def test_prepareResults_harmless(self): + raw = { + "whois": "http://whois.domaintools.com/192.168.1.1", + "reputation": 0, + "indicator": "192.168.1.1", + "type": "IPv4", + "pulse_info": { + "count": 0, + "pulses": [], + "related": { + "alienvault": { + "adversary": [], + "malware_families": [] + } + } + }, + "false_positive": [], + "sections": [ + "general" + ] + } + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_likely_harmless(self): + raw = { + "whois": "http://whois.domaintools.com/192.168.1.1", + "reputation": 49, + "indicator": "192.168.1.1", + "type": "IPv4", + "pulse_info": { + "count": 0, + "pulses": [], + "related": { + "alienvault": { + "adversary": [], + "malware_families": [] + } + } + }, + "false_positive": [], + "sections": [ + "general" + ] + } + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Likely Harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_suspicious(self): + raw = { + "whois": "http://whois.domaintools.com/192.168.1.1", + "reputation": 50, + "indicator": "192.168.1.1", + "type": "IPv4", + "pulse_info": { + "count": 0, + "pulses": [], + "related": { + "alienvault": { + "adversary": [], + "malware_families": [] + } + } + }, + "false_positive": [], + "sections": [ + "general" + ] + } + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_threat(self): + raw = { + "whois": "http://whois.domaintools.com/192.168.1.1", + "reputation": 75, + "indicator": "192.168.1.1", + "type": "IPv4", + "pulse_info": { + "count": 0, + "pulses": [], + "related": { + "alienvault": { + "adversary": [], + "malware_families": [] + } + } + }, + "false_positive": [], + "sections": [ + "general" + ] + } + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_undetermined(self): + raw = { + "alexa": "", + "base_indicator": {}, + "domain": "Unavailable", + "false_positive": [], + "hostname": "Unavailable", + "indicator": "http://192.168.1.1", + "pulse_info": { + "count": 0, + "pulses": [], + "references": [], + "related": { + "alienvault": { + "adversary": [], + "industries": [], + "malware_families": [], + "unique_indicators": 0 + }, + "other": { + "adversary": [], + "industries": [], + "malware_families": [], + "unique_indicators": 0 + } + } + }, + "sections": [ + "general" + ], + "type": "url", + "type_title": "URL", + "validation": [] + } + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Analysis complete.") + self.assertEqual(results["status"], "info") + + def test_prepareResults_error(self): + raw = {} + results = otx.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = { + "whois": "http://whois.domaintools.com/192.168.1.1", + "reputation": 0, + "indicator": "192.168.1.1", + "type": "IPv4", + "pulse_info": { + "count": 0, + "pulses": [], + "related": { + "alienvault": { + "adversary": [], + "malware_families": [] + } + } + }, + "false_positive": [], + "sections": [ + "general" + ] + } + + artifactInput = '{"value":"192.168.1.1","artifactType":"ip"}' + conf = {"base_url": "https://myurl/", "api_key": "xyz"} + with patch('otx.otx.sendReq', new=MagicMock(return_value=output)) as mock: + results = otx.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "harmless") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/otx/requirements.txt b/salt/sensoroni/files/analyzers/otx/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/otx/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/otx/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/otx/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/otx/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/otx/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/__init__.py b/salt/sensoroni/files/analyzers/pulsedive/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive.json b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.json new file mode 100644 index 000000000..87d4889e7 --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.json @@ -0,0 +1,7 @@ +{ + "name": "Pulsedive", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries Pulsedive for context around an observable", + "supportedTypes": ["domain", "ip", "hash", "uri_path", "url", "user-agent"] +} diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py new file mode 100644 index 000000000..719d760e6 --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py @@ -0,0 +1,107 @@ +import json +import requests +import argparse +import helpers +import os +import sys + + +def checkConfigRequirements(conf): + if "api_key" not in conf or len(conf['api_key']) == 0: + sys.exit(126) + else: + return True + + +def buildReq(conf, artifactType, artifactValue): + indicatorTypes = ["domain", "hash", "ip" "url"] + if artifactType in indicatorTypes: + url = conf['base_url'] + '/info.php' + params = {"key": conf["api_key"], "indicator": artifactValue} + else: + if artifactType == "uri_path": + query = "http.location=" + artifactValue + url = conf['base_url'] + '/explore.php' + elif artifactType == "user-agent": + query = "http.useragent_normaliser=" + artifactValue + url = conf['base_url'] + '/explore.php' + params = {"key": conf["api_key"], "q": query, "limit": 100} + + return url, params + + +def sendReq(url, params): + response = requests.request('GET', url, params=params) + return response.json() + + +def prepareResults(raw): + classified = [] + classification = { + "high": "malicious", + "medium": "suspicious", + "low": "harmless", + "none": "none", + "unknown": "unknown" + } + + if raw: + if 'results' in raw: + if raw['results'] == []: + classified.append("no_results") + else: + for r in raw['results']: + risk = r['risk'] + classified.append(classification.get(risk)) + else: + classified.append(classification.get(raw['risk'])) + + if classified.count('malicious') > 0: + summary = "malicious" + status = "threat" + elif classified.count('suspicious') > 0: + summary = "suspicious" + status = "caution" + elif classified.count('harmless') > 0: + summary = "harmless" + status = "ok" + elif classified.count('none') > 0: + summary = "harmless" + status = "ok" + elif classified.count('unknown') > 0: + summary = "" + status = "unknown" + elif classified.count('no_results') > 0: + summary = "no_results" + status = "ok" + else: + summary = "internal_failure" + status = "caution" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + request = buildReq(conf, data["artifactType"], data["value"]) + response = sendReq(request[0], request[1]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search VirusTotal for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/pulsedive.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive.yaml b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.yaml new file mode 100644 index 000000000..c29f61ad8 --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.yaml @@ -0,0 +1,2 @@ +base_url: https://pulsedive.com/api/ +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:pulsedive:api_key', '') }}" diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py b/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py new file mode 100644 index 000000000..47b60efdd --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py @@ -0,0 +1,121 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from pulsedive import pulsedive +import unittest + + +class TestVirusTotalMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + pulsedive.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('pulsedive.pulsedive.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + pulsedive.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_checkConfigRequirements(self): + conf = {"not_a_key": "abcd12345"} + with self.assertRaises(SystemExit) as cm: + pulsedive.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_buildReq_domain(self): + conf = {"api_key": "xyz", "base_url": "https://myurl"} + artifactType = "domain" + artifactValue = "pulsedive.com" + result = pulsedive.buildReq(conf, artifactType, artifactValue) + self.assertEqual("https://myurl/info.php", result[0]) + self.assertEqual({"key": "xyz", "indicator": "pulsedive.com"}, result[1]) + + def test_buildReq_uri_path(self): + conf = {"api_key": "xyz", "base_url": "https://myurl"} + artifactType = "uri_path" + artifactValue = "/main.php" + result = pulsedive.buildReq(conf, artifactType, artifactValue) + self.assertEqual("https://myurl/explore.php", result[0]) + self.assertEqual({"key": "xyz", "q": "http.location=/main.php", "limit": 100}, result[1]) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + url = 'https://myurl/api/' + params = {"key": "abcd1234", "q": "http.location=/main.php", "limit": 100} + response = pulsedive.sendReq(url=url, params=params) + mock.assert_called_once_with("GET", "https://myurl/api/", params={"key": "abcd1234", "q": "http.location=/main.php", "limit": 100}) + self.assertIsNotNone(response) + + def test_prepareResults_risk_high(self): + raw = {"results": [{"risk": "high"}]} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_risk_med(self): + raw = {"results": [{"risk": "medium"}]} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_risk_low(self): + raw = {"results": [{"risk": "low"}]} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_risk_none(self): + raw = {"results": [{"risk": "none"}]} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_risk_unknown(self): + raw = {"results": [{"risk": "unknown"}]} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "") + self.assertEqual(results["status"], "unknown") + + def test_prepareResults_no_results(self): + raw = {"results": []} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_risk_none_indicator(self): + raw = {"iid": "1234", "risk": "none"} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_error(self): + raw = {} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"results": [{"risk": "low"}]} + artifactInput = '{"value":"chrome","artifactType":"user-agent"}' + conf = {"api_key": "xyz", "base_url": "https://myurl"} + with patch('pulsedive.pulsedive.sendReq', new=MagicMock(return_value=output)) as mock: + results = pulsedive.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "harmless") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/pulsedive/requirements.txt b/salt/sensoroni/files/analyzers/pulsedive/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..1dfb5c2d3 Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/pytest.ini b/salt/sensoroni/files/analyzers/pytest.ini new file mode 100644 index 000000000..e74b52dd1 --- /dev/null +++ b/salt/sensoroni/files/analyzers/pytest.ini @@ -0,0 +1,27 @@ +[flake8] +exclude = + .venv + */site-packages/, + */source-packages/, + */__pycache__ +show_source = true +max_complexity = 12 +max_line_length = 200 +statistics = true +doctests = true + +[pytest] +python_files = *_test.py +python_classes = Test +python_functions = test_* +norecursedirs = site-packages + +[report] +exclude_lines = + if __name__ == .__main__.: + +show_missing = True +omit = + *_test.py, + */site-packages/*, + */source-packages/* diff --git a/salt/sensoroni/files/analyzers/spamhaus/__init__.py b/salt/sensoroni/files/analyzers/spamhaus/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/spamhaus/requirements.txt b/salt/sensoroni/files/analyzers/spamhaus/requirements.txt new file mode 100644 index 000000000..9ec0d5539 --- /dev/null +++ b/salt/sensoroni/files/analyzers/spamhaus/requirements.txt @@ -0,0 +1,2 @@ +dnspython>=2.2.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl b/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl new file mode 100644 index 000000000..b390ce52f Binary files /dev/null and b/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/spamhaus/source-packages/dnspython-2.2.1-py3-none-any.whl b/salt/sensoroni/files/analyzers/spamhaus/source-packages/dnspython-2.2.1-py3-none-any.whl new file mode 100644 index 000000000..645d5bb5b Binary files /dev/null and b/salt/sensoroni/files/analyzers/spamhaus/source-packages/dnspython-2.2.1-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/spamhaus/spamhaus.json b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.json new file mode 100644 index 000000000..abeced090 --- /dev/null +++ b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.json @@ -0,0 +1,7 @@ +{ + "name": "Spamhaus", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries Spamhaus to see if an IP is considered malicious.", + "supportedTypes" : ["ip"] +} diff --git a/salt/sensoroni/files/analyzers/spamhaus/spamhaus.py b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.py new file mode 100644 index 000000000..cf6bfa198 --- /dev/null +++ b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.py @@ -0,0 +1,83 @@ +import argparse +import dns.resolver +import dns.reversename +import json +import os +import helpers + + +def resolve(config, meta, ip): + value = str(dns.reversename.from_address(ip)).replace("in-addr.arpa.", config["lookup_host"] + ".") + resolver = dns.resolver.Resolver() + if len(config["nameservers"]) > 0 and len(config["nameservers"][0]) > 0: + resolver.nameservers = config["nameservers"] + try: + responses = resolver.resolve(value) + except dns.resolver.NXDOMAIN: + responses = [] + + return responses + + +def prepareResults(responses): + resultMap = { + "127.0.0.2": {'severity': 200, 'summary': 'spam', 'status': 'caution'}, + "127.0.0.3": {'severity': 200, 'summary': 'spam', 'status': 'caution'}, + "127.0.0.4": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.0.5": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.0.6": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.0.7": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.0.10": {'severity': 100, 'summary': 'suspicious', 'status': 'caution'}, + "127.0.0.11": {'severity': 100, 'summary': 'suspicious', 'status': 'caution'}, + + "127.0.1.2": {'severity': 200, 'summary': 'spam', 'status': 'caution'}, + "127.0.1.4": {'severity': 250, 'summary': 'phishing', 'status': 'threat'}, + "127.0.1.5": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.1.6": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.1.102": {'severity': 200, 'summary': 'spam', 'status': 'caution'}, + "127.0.1.103": {'severity': 200, 'summary': 'spam', 'status': 'caution'}, + "127.0.1.104": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.1.105": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.1.106": {'severity': 300, 'summary': 'malicious', 'status': 'threat'}, + "127.0.1.107": {'severity': 100, 'summary': 'suspicious', 'status': 'caution'}, + + "127.255.255.252": {'severity': 1, 'summary': 'internal_failure', 'status': 'caution'}, + "127.255.255.254": {'severity': 2, 'summary': 'internal_failure', 'status': 'caution'}, + "127.255.255.255": {'severity': 3, 'summary': 'excessive_usage', 'status': 'caution'}, + } + + raw = [] + currentResult = {'severity': 0, 'summary': 'harmless', 'status': 'ok'} + for response in responses: + raw.append(response.to_text()) + if response.address in resultMap: + result = resultMap[response.address] + if currentResult is None or currentResult['severity'] < result['severity']: + currentResult = result + + currentResult['response'] = raw + return currentResult + + +def analyze(config, input): + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = resolve(config, meta, data["value"]) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search Spamhaus for an IP') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/spamhaus.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/spamhaus/spamhaus.yaml b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.yaml new file mode 100644 index 000000000..271cf27a0 --- /dev/null +++ b/salt/sensoroni/files/analyzers/spamhaus/spamhaus.yaml @@ -0,0 +1,2 @@ +lookup_host: zen.spamhaus.org +nameservers: ["{{ salt['pillar.get']('sensoroni:analyzers:spamhaus:nameserver', '') }}"] \ No newline at end of file diff --git a/salt/sensoroni/files/analyzers/spamhaus/spamhaus_test.py b/salt/sensoroni/files/analyzers/spamhaus/spamhaus_test.py new file mode 100644 index 000000000..b62024444 --- /dev/null +++ b/salt/sensoroni/files/analyzers/spamhaus/spamhaus_test.py @@ -0,0 +1,126 @@ +from io import StringIO +import dns +import sys +from unittest.mock import patch, MagicMock +from spamhaus import spamhaus +import unittest + + +class FakeAnswer: + address = '' + + def __init__(self, ip='127.0.0.1'): + self.address = ip + + def to_text(self): + return str(self.address) + + +class TestSpamhausMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + spamhaus.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('spamhaus.spamhaus.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + spamhaus.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_resolve(self): + with patch('dns.resolver.Resolver.resolve', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"nameservers": ["1.2.3.4"], "lookup_host": "some.host"} + response = spamhaus.resolve(config=conf, meta=meta, ip="127.0.0.1") + mock.assert_called_once_with("1.0.0.127.some.host.") + self.assertIsNotNone(response) + + def test_resolve_not_found(self): + mock = MagicMock() + mock.side_effect = dns.resolver.NXDOMAIN + with patch('dns.resolver.Resolver.resolve', new=mock): + meta = {} + conf = {"nameservers": ["1.2.3.4"], "lookup_host": "some.host"} + response = spamhaus.resolve(config=conf, meta=meta, ip="127.0.0.1") + mock.assert_called_once_with("1.0.0.127.some.host.") + self.assertIsNotNone(response) + + def test_prepareResults_ok_multiple(self): + raw = [FakeAnswer("127.0.0.0"), FakeAnswer("127.0.0.1")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.0.0.0', '127.0.0.1']) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_failure2(self): + raw = [FakeAnswer("127.255.255.252")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.255.255.252']) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_failure4(self): + raw = [FakeAnswer("127.255.255.254")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.255.255.254']) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_excessive(self): + raw = [FakeAnswer("127.255.255.255")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.255.255.255']) + self.assertEqual(results["summary"], "excessive_usage") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_sus_multiple(self): + raw = [FakeAnswer("127.0.0.10"), FakeAnswer("127.0.0.11")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.0.0.10', '127.0.0.11']) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_spam_multiple(self): + raw = [FakeAnswer("127.0.0.2")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.0.0.2']) + self.assertEqual(results["summary"], "spam") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_threat_multiple(self): + raw = [FakeAnswer("127.0.0.1"), FakeAnswer("127.0.0.4")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.0.0.1', '127.0.0.4']) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_threat(self): + raw = [FakeAnswer("127.0.0.4")] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], ['127.0.0.4']) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_error(self): + raw = [] + results = spamhaus.prepareResults(raw) + self.assertEqual(results["response"], []) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_analyze(self): + output = [FakeAnswer()] + artifactInput = '{"value":"1.2.3.4","artifactType":"ip"}' + with patch('spamhaus.spamhaus.resolve', new=MagicMock(return_value=output)) as mock: + results = spamhaus.analyze({}, artifactInput) + self.assertEqual(results["summary"], "harmless") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/urlhaus/__init__.py b/salt/sensoroni/files/analyzers/urlhaus/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/urlhaus/requirements.txt b/salt/sensoroni/files/analyzers/urlhaus/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlhaus/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/urlhaus.json b/salt/sensoroni/files/analyzers/urlhaus/urlhaus.json new file mode 100644 index 000000000..d9cf1dce0 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlhaus/urlhaus.json @@ -0,0 +1,8 @@ +{ + "name": "Urlhaus", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries URLHaus to see if a URL is considered malicious.", + "supportedTypes" : ["url"], + "baseUrl": "https://urlhaus-api.abuse.ch/v1/url/" +} diff --git a/salt/sensoroni/files/analyzers/urlhaus/urlhaus.py b/salt/sensoroni/files/analyzers/urlhaus/urlhaus.py new file mode 100644 index 000000000..3c326d3b0 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlhaus/urlhaus.py @@ -0,0 +1,52 @@ +import json +import requests +import sys +import helpers + + +def buildReq(artifact_value): + return {"url": artifact_value} + + +def sendReq(meta, payload): + url = meta['baseUrl'] + response = requests.request('POST', url, data=payload) + return response.json() + + +def prepareResults(raw): + if 'threat' in raw: + summary = raw['threat'] + status = "threat" + elif 'query_status' in raw: + summary = raw['query_status'] + if summary == 'no_results': + status = "ok" + else: + status = "caution" + else: + summary = "internal_failure" + status = "caution" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(input): + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + payload = buildReq(data["value"]) + response = sendReq(meta, payload) + return prepareResults(response) + + +def main(): + if len(sys.argv) == 2: + results = analyze(sys.argv[1]) + print(json.dumps(results)) + else: + print("ERROR: Missing input JSON") + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/urlhaus/urlhaus_test.py b/salt/sensoroni/files/analyzers/urlhaus/urlhaus_test.py new file mode 100644 index 000000000..ae4584ee5 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlhaus/urlhaus_test.py @@ -0,0 +1,72 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from urlhaus import urlhaus +import unittest + + +class TestUrlhausMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.stdout', new=StringIO()) as mock_stdout: + sys.argv = ["cmd"] + urlhaus.main() + self.assertEqual(mock_stdout.getvalue(), "ERROR: Missing input JSON\n") + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('urlhaus.urlhaus.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + urlhaus.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_buildReq(self): + result = urlhaus.buildReq("test") + self.assertEqual("test", result["url"]) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {"baseUrl": "myurl"} + response = urlhaus.sendReq(meta, "mypayload") + mock.assert_called_once_with("POST", "myurl", data="mypayload") + self.assertIsNotNone(response) + + def test_prepareResults_none(self): + raw = {"query_status": "no_results"} + results = urlhaus.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_invalidUrl(self): + raw = {"query_status": "invalid_url"} + results = urlhaus.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "invalid_url") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_threat(self): + raw = {"query_status": "invalid_url"} # This is overrided in this scenario + raw["threat"] = "bad_actor" + results = urlhaus.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "bad_actor") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_error(self): + raw = {} + results = urlhaus.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"threat": "malware_download"} + artifactInput = '{"value":"foo","artifactType":"url"}' + with patch('urlhaus.urlhaus.sendReq', new=MagicMock(return_value=output)) as mock: + results = urlhaus.analyze(artifactInput) + self.assertEqual(results["summary"], "malware_download") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/urlscan/__init__.py b/salt/sensoroni/files/analyzers/urlscan/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/urlscan/requirements.txt b/salt/sensoroni/files/analyzers/urlscan/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan.json b/salt/sensoroni/files/analyzers/urlscan/urlscan.json new file mode 100644 index 000000000..75e85bdbf --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan.json @@ -0,0 +1,7 @@ +{ + "name": "Urlscan", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer submits a URL to Urlscan for context around an observable.", + "supportedTypes" : ["url"] +} diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan.py b/salt/sensoroni/files/analyzers/urlscan/urlscan.py new file mode 100755 index 000000000..94c3ec8db --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan.py @@ -0,0 +1,91 @@ +import json +import requests +import helpers +import sys +import os +import argparse +import time + + +def checkConfigRequirements(conf): + if "enabled" in conf: + if "api_key" not in conf or len(conf['api_key']) == 0: + sys.exit(126) + else: + return True + else: + sys.exit(126) + + +def buildReq(conf, artifact_type, artifact_value): + headers = {"API-Key": conf["api_key"]} + url = conf['base_url'] + 'scan/' + visibility = conf['visibility'] + data = {"url": artifact_value, "visibility": visibility} + return url, headers, data + + +def getReport(conf, report_url): + report = requests.request('GET', report_url) + timeout = conf.get('timeout', 300) + counter = 0 + while report.status_code == 404: + time.sleep(2) + counter += 2 + if counter >= timeout: + break + report = requests.request('GET', report_url) + return report + + +def sendReq(url, headers, data): + submission = requests.request('POST', url=url, headers=headers, data=data).json() + report_url = submission['api'] + return report_url + + +def prepareResults(raw): + if raw and "verdicts" in raw: + if raw["verdicts"]["overall"]["malicious"] is True: + status = "threat" + summary = "malicious" + elif raw["verdicts"]["overall"]["score"] > 0: + status = "caution" + summary = "suspicious" + else: + status = "info" + summary = "Scan complete." + else: + status = "caution" + summary = "internal_failure" + + results = {'response': raw, 'status': status, 'summary': summary} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + request = buildReq(conf, data["artifactType"], data["value"]) + report_url = sendReq(request[0], request[1], request[2]) + time.sleep(10) + report = getReport(conf, report_url) + return prepareResults(report.json()) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search Alienvault OTX for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/urlscan.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan.yaml b/salt/sensoroni/files/analyzers/urlscan/urlscan.yaml new file mode 100644 index 000000000..986a61359 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan.yaml @@ -0,0 +1,5 @@ +base_url: https://urlscan.io/api/v1/ +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:urlscan:api_key', '') }}" +enabled: "{{ salt['pillar.get']('sensoroni:analyzers:urlscan:enabled', 'False') }}" +visibility: "{{ salt['pillar.get']('sensoroni:analyzers:urlscan:visibility', 'public') }}" +timeout: "{{ salt['pillar.get']('sensoroni:analyzers:urlscan:visibility', '180') }}" diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py b/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py new file mode 100644 index 000000000..487e6dbe3 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py @@ -0,0 +1,121 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock, PropertyMock, call +from urlscan import urlscan +import unittest + + +class TestUrlScanMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + urlscan.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('urlscan.urlscan.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + urlscan.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_checkConfigRequirements_notEnabled(self): + conf = {"not_a_key": "abcd12345"} + with self.assertRaises(SystemExit) as cm: + urlscan.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_checkConfigRequirements_noApikey(self): + conf = {"enabled": True, "not_a_key": "abcd12345"} + with self.assertRaises(SystemExit) as cm: + urlscan.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_checkConfigRequirements_Exist(self): + conf = {"enabled": True, "api_key": "abcd12345"} + config_exists = urlscan.checkConfigRequirements(conf) + self.assertTrue(config_exists) + + def test_buildReq(self): + conf = {'base_url': 'https://myurl/api/v1/', 'api_key': 'abcd12345', 'visibility': 'public'} + artifact_type = "url" + artifact_value = "https://abc.com" + result = urlscan.buildReq(conf, artifact_type, artifact_value) + self.assertEqual("https://myurl/api/v1/scan/", result[0]) + self.assertEqual({'API-Key': 'abcd12345'}, result[1]) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + headers = {"API-Key": "abcd1234"} + data = {"url": "https://urlscan.io", "visibility": "public"} + response = urlscan.sendReq("https://myurl", headers=headers, data=data) + mock.assert_called_once_with("POST", url="https://myurl", headers={"API-Key": "abcd1234"}, data={"url": "https://urlscan.io", "visibility": "public"}) + self.assertIsNotNone(response) + + def test_getReport_noRetry(self): + output_report = MagicMock() + type(output_report).status_code = PropertyMock(return_value=404) + output_report_body = {"requests": "body"} + output_report.json.return_value = output_report_body + with patch('requests.request', new=MagicMock(return_value=output_report)) as mock: + result = urlscan.getReport({'timeout': 0}, "https://abc.com/report") + self.assertEqual(404, result.status_code) + mock.assert_called_once() + + def test_getReport_withRetry(self): + output_report = MagicMock() + type(output_report).status_code = PropertyMock(return_value=404) + output_report_body = {"requests": "body"} + output_report.json.return_value = output_report_body + with patch('requests.request', new=MagicMock(return_value=output_report)) as mock: + result = urlscan.getReport({'timeout': 3}, "https://abc.com/report") + self.assertEqual(404, result.status_code) + mock.assert_has_calls([call('GET', 'https://abc.com/report'), call('GET', 'https://abc.com/report')]) + + def test_prepareResults_sus(self): + raw = {"requests": [{"request": {"requestId": "1"}}], "verdicts": {"overall": {"score": 50, "malicious": False, "hasVerdicts": False}}} + results = urlscan.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_mal(self): + raw = {"requests": [{"request": {"requestId": "2"}}], "verdicts": {"overall": {"score": 100, "malicious": True, "hasVerdicts": False}}} + results = urlscan.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_info(self): + raw = {"requests": [{"request": {"requestId": "3"}}], "verdicts": {"overall": {"score": 0, "malicious": False, "hasVerdicts": False}}} + results = urlscan.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "Scan complete.") + self.assertEqual(results["status"], "info") + + def test_prepareResults_error(self): + raw = {} + results = urlscan.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output_req = "https://myurl/report" + output_report = MagicMock() + output_report_body = {"requests": [{"request": {"requestId": "3"}}], "verdicts": {"overall": {"score": 0, "malicious": False, "hasVerdicts": False}}} + output_report.json.return_value = output_report_body + artifactInput = '{"value":"https://abc.com","artifactType":"url"}' + conf = {'enabled': True, 'base_url': 'https://myurl/api/v1/', 'api_key': 'abcd12345', 'visibility': 'public'} + with patch('urlscan.urlscan.sendReq', new=MagicMock(return_value=output_req)) as mock_req: + with patch('urlscan.urlscan.getReport', new=MagicMock(return_value=output_report)) as mock_report: + results = urlscan.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "Scan complete.") + mock_req.assert_called_once() + mock_report.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/virustotal/__init__.py b/salt/sensoroni/files/analyzers/virustotal/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/virustotal/requirements.txt b/salt/sensoroni/files/analyzers/virustotal/requirements.txt new file mode 100644 index 000000000..a8980057f --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +pyyaml>=6.0 diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl new file mode 100644 index 000000000..b390ce52f Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/virustotal.json b/salt/sensoroni/files/analyzers/virustotal/virustotal.json new file mode 100644 index 000000000..e26d67b4b --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/virustotal.json @@ -0,0 +1,7 @@ +{ + "name": "VirusTotal", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries VirusTotal to see if a string value is considered malicious.", + "supportedTypes" : ["domain", "hash", "ip", "url"] +} diff --git a/salt/sensoroni/files/analyzers/virustotal/virustotal.py b/salt/sensoroni/files/analyzers/virustotal/virustotal.py new file mode 100644 index 000000000..f06c5d14f --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/virustotal.py @@ -0,0 +1,91 @@ +import json +import requests +import argparse +import helpers +import os +import sys + + +def checkConfigRequirements(conf): + if "api_key" not in conf or len(conf['api_key']) == 0: + sys.exit(126) + else: + return True + + +def buildHeaders(conf): + headers = {"x-apikey": conf["api_key"]} + return headers + + +def sendReq(conf, meta, payload, headers): + url = conf['base_url'] + response = requests.request('GET', url + payload, headers=headers) + return response.json() + + +def prepareResults(raw): + malicious = 0 + harmless = 0 + undetected = 0 + suspicious = 0 + timeout = 0 + + if "data" in raw: + entries = raw["data"] + for data in entries: + if "attributes" in data: + attrs = data["attributes"] + if "last_analysis_stats" in attrs: + stats = attrs["last_analysis_stats"] + if len(stats) > 0: + suspicious += stats["suspicious"] + malicious += stats["malicious"] + harmless += stats["harmless"] + undetected += stats["undetected"] + timeout += stats["timeout"] + + if malicious > 0: + summary = "malicious" + status = "threat" + elif suspicious > 0: + summary = "suspicious" + status = "caution" + elif timeout > 0: + summary = "timeout" + status = "caution" + elif harmless > 0 or undetected > 0: + summary = "harmless" + status = "ok" + else: + summary = "internal_failure" + status = "caution" + + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(conf, input): + checkConfigRequirements(conf) + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + headers = buildHeaders(conf) + response = sendReq(conf, meta, data["value"], headers) + return prepareResults(response) + + +def main(): + dir = os.path.dirname(os.path.realpath(__file__)) + parser = argparse.ArgumentParser(description='Search VirusTotal for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/virustotal.yaml", help='optional config file to use instead of the default config file') + + args = parser.parse_args() + if args.artifact: + results = analyze(helpers.loadConfig(args.config), args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/virustotal/virustotal.yaml b/salt/sensoroni/files/analyzers/virustotal/virustotal.yaml new file mode 100644 index 000000000..0eb017842 --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/virustotal.yaml @@ -0,0 +1,2 @@ +base_url: https://www.virustotal.com/api/v3/search?query= +api_key: "{{ salt['pillar.get']('sensoroni:analyzers:virustotal:api_key', '') }}" \ No newline at end of file diff --git a/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py b/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py new file mode 100644 index 000000000..103e137ce --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py @@ -0,0 +1,155 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from virustotal import virustotal +import unittest + + +class TestVirusTotalMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + virustotal.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('virustotal.virustotal.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + virustotal.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def checkConfigRequirements(self): + conf = {"not_a_key": "abcd12345"} + with self.assertRaises(SystemExit) as cm: + virustotal.checkConfigRequirements(conf) + self.assertEqual(cm.exception.code, 126) + + def test_buildHeaders(self): + result = virustotal.buildHeaders({"api_key": "xyz"}) + self.assertEqual("xyz", result["x-apikey"]) + + def test_sendReq(self): + with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock: + meta = {} + conf = {"base_url": "myurl="} + response = virustotal.sendReq(conf=conf, meta=meta, payload="mypayload", headers={"x-apikey": "xyz"}) + mock.assert_called_once_with("GET", "myurl=mypayload", headers={"x-apikey": "xyz"}) + self.assertIsNotNone(response) + + def test_prepareResults_timeout(self): + raw = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 1, + "malicious": 0, + "suspicious": 0, + "undetected": 1, + "timeout": 1 + }}}, {"attributes": {"last_analysis_stats": { + "harmless": 7, + "malicious": 0, + "suspicious": 0, + "undetected": 11, + "timeout": 0 + }}}]} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "timeout") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_ok_multiple(self): + raw = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 1, + "malicious": 0, + "suspicious": 0, + "undetected": 0, + "timeout": 0 + }}}, {"attributes": {"last_analysis_stats": { + "harmless": 7, + "malicious": 0, + "suspicious": 0, + "undetected": 11, + "timeout": 0 + }}}]} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_sus_multiple(self): + raw = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 10, + "malicious": 0, + "suspicious": 2, + "undetected": 0, + "timeout": 0 + }}}, {"attributes": {"last_analysis_stats": { + "harmless": 76, + "malicious": 0, + "suspicious": 1, + "undetected": 11, + "timeout": 0 + }}}]} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_threat_multiple(self): + raw = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 1, + "malicious": 0, + "suspicious": 0, + "undetected": 0, + "timeout": 0 + }}}, {"attributes": {"last_analysis_stats": { + "harmless": 76, + "malicious": 5, + "suspicious": 1, + "undetected": 11, + "timeout": 0 + }}}]} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_threat(self): + raw = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 76, + "malicious": 5, + "suspicious": 1, + "undetected": 11, + "timeout": 0 + }}}]} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_error(self): + raw = {} + results = virustotal.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"data": [{"attributes": {"last_analysis_stats": { + "harmless": 0, + "malicious": 0, + "suspicious": 0, + "undetected": 1, + "timeout": 0 + }}}]} + artifactInput = '{"value":"foo","artifactType":"url"}' + conf = {"api_key": "xyz"} + with patch('virustotal.virustotal.sendReq', new=MagicMock(return_value=output)) as mock: + results = virustotal.analyze(conf, artifactInput) + self.assertEqual(results["summary"], "harmless") + mock.assert_called_once() diff --git a/salt/sensoroni/files/sensoroni.json b/salt/sensoroni/files/sensoroni.json index 743021a7d..04f2abf93 100644 --- a/salt/sensoroni/files/sensoroni.json +++ b/salt/sensoroni/files/sensoroni.json @@ -2,6 +2,8 @@ {%- set DESCRIPTION = salt['pillar.get']('sensoroni:node_description', '') %} {%- set MODEL = salt['grains.get']('sosmodel', '') %} {%- set ADDRESS = salt['pillar.get']('sensoroni:node_address') %} +{%- set ANALYZE_TIMEOUT_MS = salt['pillar.get']('sensoroni:analyze_timeout_ms', 900000) %} +{%- set ANALYZE_PARALLEL_LIMIT = salt['pillar.get']('sensoroni:analyze_parallel_limit', 5) %} {%- set SENSORONIKEY = salt['pillar.get']('global:sensoronikey', '') %} {%- set CHECKININTERVALMS = salt['pillar.get']('sensoroni:node_checkin_interval_ms', 10000) %} {%- set ROLE = grains.id.split('_') | last %} @@ -11,6 +13,12 @@ {%- set STENODEFAULT = False %} {%- endif %} {%- set STENOENABLED = salt['pillar.get']('steno:enabled', STENODEFAULT) %} +{%- if ROLE in ['eval', 'standalone', 'import', 'manager', 'managersearch'] %} +{%- set ANALYZEDEFAULT = True %} +{%- else %} +{%- set ANALYZEDEFAULT = False %} +{%- endif %} +{%- set ANALYZEENABLED = salt['pillar.get']('sensoroni:analyze_enabled', ANALYZEDEFAULT) %} { "logFilename": "/opt/sensoroni/logs/sensoroni.log", "logLevel":"info", @@ -24,6 +32,12 @@ "serverUrl": "https://{{ URLBASE }}/sensoroniagents", "verifyCert": false, "modules": { +{%- if ANALYZEENABLED %} + "analyze": { + "timeoutMs": {{ ANALYZE_TIMEOUT_MS }}, + "parallelLimit": {{ ANALYZE_PARALLEL_LIMIT }} + }, +{%- endif %} "importer": {}, "statickeyauth": { "apiKey": "{{ SENSORONIKEY }}" diff --git a/salt/sensoroni/init.sls b/salt/sensoroni/init.sls index 1405c72bf..6d49d33ab 100644 --- a/salt/sensoroni/init.sls +++ b/salt/sensoroni/init.sls @@ -18,6 +18,13 @@ sensoroniagentconf: - mode: 600 - template: jinja +analyzersdir: + file.directory: + - name: /opt/so/conf/sensoroni/analyzers + - user: 939 + - group: 939 + - makedirs: True + sensoronilog: file.directory: - name: /opt/so/log/sensoroni @@ -25,6 +32,15 @@ sensoronilog: - group: 939 - makedirs: True +analyzerscripts: + file.recurse: + - name: /opt/so/conf/sensoroni/analyzers + - user: 939 + - group: 939 + - file_mode: 755 + - template: jinja + - source: salt://sensoroni/files/analyzers + so-sensoroni: docker_container.running: - image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-soc:{{ VERSION }} @@ -35,6 +51,7 @@ so-sensoroni: - /nsm/import:/nsm/import:rw - /nsm/pcapout:/nsm/pcapout:rw - /opt/so/conf/sensoroni/sensoroni.json:/opt/sensoroni/sensoroni.json:ro + - /opt/so/conf/sensoroni/analyzers:/opt/sensoroni/analyzers:rw - /opt/so/log/sensoroni:/opt/sensoroni/logs:rw - watch: - file: /opt/so/conf/sensoroni/sensoroni.json diff --git a/salt/soc/files/soc/dashboards.queries.json b/salt/soc/files/soc/dashboards.queries.json new file mode 100644 index 000000000..14247721c --- /dev/null +++ b/salt/soc/files/soc/dashboards.queries.json @@ -0,0 +1,45 @@ +[ + { "name": "Overview", "description": "Show all events grouped by the origin host", "query": "* | groupby event.dataset | groupby event.module | groupby event.category | groupby observer.name | groupby source.ip | groupby destination.ip"}, + { "name": "Elastalerts", "description": "", "query": "_type:elastalert | groupby rule.name"}, + { "name": "Alerts", "description": "Show all alerts", "query": "event.dataset: alert | groupby event.module | groupby rule.name | groupby event.severity | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "NIDS Alerts", "description": "NIDS alerts", "query": "event.category: network AND event.dataset: alert | groupby rule.category | groupby rule.gid | groupby rule.uuid | groupby rule.name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Wazuh/OSSEC", "description": "Wazuh/OSSEC HIDS alerts and logs", "query": "event.module:ossec | groupby rule.category | groupby rule.uuid | groupby rule.name | groupby agent.id | groupby agent.name | groupby log.full"}, + { "name": "Sysmon", "description": "Sysmon logs", "query": "event.module:sysmon | groupby event.dataset | groupby user.name | groupby process.executable | groupby process.command_line | groupby process.parent.command_line"}, + { "name": "Strelka", "description": "Strelka logs", "query": "event.module:strelka | groupby file.mime_type | groupby file.name | groupby file.source"}, + { "name": "Zeek Notice", "description": "Zeek Notice logs", "query": "event.dataset:notice | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Connections", "description": "Connection logs", "query": "event.dataset:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name"}, + { "name": "DCE_RPC", "description": "DCE_RPC logs", "query": "event.dataset:dce_rpc | groupby dce_rpc.operation | groupby dce_rpc.endpoint | groupby dce_rpc.named_pipe | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "DHCP", "description": "Dynamic Host Configuration Protocol leases", "query": "event.dataset:dhcp | groupby host.hostname | groupby host.domain | groupby dhcp.message_types | groupby client.address | groupby server.address"}, + { "name": "DNP3", "description": "DNP3 logs", "query": "event.dataset:dnp3 | groupby dnp3.fc_request | groupby dnp3.fc_reply | groupby dnp3.iin | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "DNS", "description": "Domain Name System queries", "query": "event.dataset:dns | groupby dns.query.name | groupby dns.query.type_name | groupby dns.response.code_name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "DPD", "description": "Dynamic Protocol Detection errors", "query": "event.dataset:dpd | groupby error.reason | groupby source.ip | groupby destination.ip | groupby destination.port | groupby network.protocol"}, + { "name": "Files", "description": "Files seen in network traffic", "query": "event.dataset:file | groupby file.mime_type | groupby file.source | groupby file.bytes.total | groupby source.ip | groupby destination.ip"}, + { "name": "FTP", "description": "File Transfer Protocol logs", "query": "event.dataset:ftp | groupby ftp.command | groupby ftp.argument | groupby ftp.user | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "HTTP", "description": "Hyper Text Transport Protocol logs", "query": "event.dataset:http | groupby http.method | groupby http.status_code | groupby http.status_message | groupby http.useragent | groupby http.virtual_host | groupby http.uri | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Intel", "description": "Zeek Intel framework hits", "query": "event.dataset:intel | groupby intel.indicator | groupby intel.indicator_type | groupby intel.seen_where | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "IRC", "description": "Internet Relay Chat logs", "query": "event.dataset:irc | groupby irc.command.type | groupby irc.username | groupby irc.nickname | groupby irc.command.value | groupby irc.command.info | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "KERBEROS", "description": "KERBEROS logs", "query": "event.dataset:kerberos | groupby kerberos.service | groupby kerberos.client | groupby kerberos.request_type | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "MODBUS", "description": "MODBUS logs", "query": "event.dataset:modbus | groupby modbus.function | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "MYSQL", "description": "MYSQL logs", "query": "event.dataset:mysql | groupby mysql.command | groupby mysql.argument | groupby mysql.success | groupby mysql.response | groupby mysql.rows | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "NOTICE", "description": "Zeek notice logs", "query": "event.dataset:notice | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "NTLM", "description": "NTLM logs", "query": "event.dataset:ntlm | groupby ntlm.server.dns.name | groupby ntlm.server.nb.name | groupby ntlm.server.tree.name | groupby ntlm.success | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Osquery Live Queries", "description": "Osquery Live Query results", "query": "event.dataset:live_query | groupby host.hostname"}, + { "name": "PE", "description": "PE files list", "query": "event.dataset:pe | groupby file.machine | groupby file.os | groupby file.subsystem | groupby file.section_names | groupby file.is_exe | groupby file.is_64bit"}, + { "name": "RADIUS", "description": "RADIUS logs", "query": "event.dataset:radius | groupby user.name.keyword | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "RDP", "description": "RDP logs", "query": "event.dataset:rdp | groupby client.name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "RFB", "description": "RFB logs", "query": "event.dataset:rfb | groupby rfb.desktop.name.keyword | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Signatures", "description": "Zeek signatures", "query": "event.dataset:signatures | groupby signature_id"}, + { "name": "SIP", "description": "SIP logs", "query": "event.dataset:sip | groupby client.user_agent | groupby sip.method | groupby sip.uri | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SMB_Files", "description": "SMB files", "query": "event.dataset:smb_files | groupby file.action | groupby file.path | groupby file.name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SMB_Mapping", "description": "SMB mapping logs", "query": "event.dataset:smb_mapping | groupby smb.share_type | groupby smb.path | groupby smb.service | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SMTP", "description": "SMTP logs", "query": "event.dataset:smtp | groupby smtp.from | groupby smtp.recipient_to | groupby smtp.subject | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SNMP", "description": "SNMP logs", "query": "event.dataset:snmp | groupby snmp.community | groupby snmp.version | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Software", "description": "List of software seen on the network by Zeek", "query": "event.dataset:software | groupby software.type | groupby software.name | groupby source.ip"}, + { "name": "SSH", "description": "SSH connections seen by Zeek", "query": "event.dataset:ssh | groupby ssh.client | groupby ssh.server | groupby ssh.direction | groupby ssh.version | groupby ssh.hassh_version | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SSL", "description": "SSL logs", "query": "event.dataset:ssl | groupby ssl.version | groupby ssl.validation_status | groupby ssl.server_name | groupby ssl.certificate.issuer | groupby ssl.certificate.subject | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SYSLOG", "description": "SYSLOG logs", "query": "event.dataset:syslog | groupby syslog.severity_label | groupby syslog.facility_label | groupby network.protocol | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Tunnel", "description": "Tunnels seen by Zeek", "query": "event.dataset:tunnel | groupby tunnel.type | groupby event.action | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Weird", "description": "Weird network traffic seen by Zeek", "query": "event.dataset:weird | groupby weird.name | groupby weird.additional_info | groupby source.ip | groupby destination.ip | groupby destination.port "}, + { "name": "x509", "description": "x.509 certificates seen by Zeek", "query": "event.dataset:x509 | groupby x509.certificate.key.length | groupby x509.san_dns | groupby x509.certificate.key.type | groupby x509.certificate.subject | groupby x509.certificate.issuer"}, + { "name": "Firewall", "description": "Firewall logs", "query": "event.dataset:firewall | groupby rule.action | groupby interface.name | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port"} +] \ No newline at end of file diff --git a/salt/soc/files/soc/hunt.queries.json b/salt/soc/files/soc/hunt.queries.json index 5a76e0fa1..01692ad0b 100644 --- a/salt/soc/files/soc/hunt.queries.json +++ b/salt/soc/files/soc/hunt.queries.json @@ -1,67 +1,67 @@ [ - { "name": "Default Query", "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name"}, - { "name": "Log Type", "description": "Show all events grouped by module and dataset", "query": "* | groupby event.module event.dataset"}, - { "name": "SOC Auth", "description": "Users authenticated to SOC grouped by IP address and identity", "query": "event.module:kratos AND event.dataset:audit AND msg:authenticated | groupby http_request.headers.x-real-ip identity_id"}, - { "name": "Elastalerts", "description": "", "query": "_type:elastalert | groupby rule.name"}, - { "name": "Alerts", "description": "Show all alerts grouped by alert source", "query": "event.dataset: alert | groupby event.module"}, - { "name": "NIDS Alerts", "description": "Show all NIDS alerts grouped by alert", "query": "event.category: network AND event.dataset: alert | groupby rule.category rule.gid rule.uuid rule.name"}, - { "name": "Wazuh/OSSEC Alerts", "description": "Show all Wazuh alerts at Level 5 or higher grouped by category", "query": "event.module:ossec AND event.dataset:alert AND rule.level:>4 | groupby rule.category rule.name"}, - { "name": "Wazuh/OSSEC Alerts", "description": "Show all Wazuh alerts at Level 4 or lower grouped by category", "query": "event.module:ossec AND event.dataset:alert AND rule.level:<5 | groupby rule.category rule.name"}, - { "name": "Wazuh/OSSEC Users and Commands", "description": "Show all Wazuh alerts grouped by username and command line", "query": "event.module:ossec AND event.dataset:alert | groupby user.escalated.keyword process.command_line"}, - { "name": "Wazuh/OSSEC Processes", "description": "Show all Wazuh alerts grouped by process name", "query": "event.module:ossec AND event.dataset:alert | groupby process.name"}, - { "name": "Sysmon Events", "description": "Show all Sysmon logs grouped by event type", "query": "event.module:sysmon | groupby event.dataset"}, - { "name": "Sysmon Usernames", "description": "Show all Sysmon logs grouped by username", "query": "event.module:sysmon | groupby event.dataset, user.name.keyword"}, - { "name": "Strelka", "description": "Show all Strelka logs grouped by file type", "query": "event.module:strelka | groupby file.mime_type"}, - { "name": "Zeek Notice", "description": "Show notices from Zeek", "query": "event.dataset:notice | groupby notice.note notice.message"}, - { "name": "Connections", "description": "Connections grouped by IP and Port", "query": "event.dataset:conn | groupby source.ip destination.ip network.protocol destination.port"}, - { "name": "Connections", "description": "Connections grouped by Service", "query": "event.dataset:conn | groupby network.protocol destination.port"}, - { "name": "Connections", "description": "Connections grouped by destination country", "query": "event.dataset:conn | groupby destination.geo.country_name"}, - { "name": "Connections", "description": "Connections grouped by source country", "query": "event.dataset:conn | groupby source.geo.country_name"}, - { "name": "DCE_RPC", "description": "DCE_RPC grouped by operation", "query": "event.dataset:dce_rpc | groupby dce_rpc.operation"}, - { "name": "DHCP", "description": "DHCP leases", "query": "event.dataset:dhcp | groupby host.hostname client.address"}, - { "name": "DHCP", "description": "DHCP grouped by message type", "query": "event.dataset:dhcp | groupby dhcp.message_types"}, - { "name": "DNP3", "description": "DNP3 grouped by reply", "query": "event.dataset:dnp3 | groupby dnp3.fc_reply"}, - { "name": "DNS", "description": "DNS queries grouped by port", "query": "event.dataset:dns | groupby dns.query.name destination.port"}, - { "name": "DNS", "description": "DNS queries grouped by type", "query": "event.dataset:dns | groupby dns.query.type_name destination.port"}, - { "name": "DNS", "description": "DNS queries grouped by response code", "query": "event.dataset:dns | groupby dns.response.code_name destination.port"}, - { "name": "DNS", "description": "DNS highest registered domain", "query": "event.dataset:dns | groupby dns.highest_registered_domain.keyword destination.port"}, - { "name": "DNS", "description": "DNS grouped by parent domain", "query": "event.dataset:dns | groupby dns.parent_domain.keyword destination.port"}, - { "name": "DPD", "description": "Dynamic Protocol Detection errors", "query": "event.dataset:dpd | groupby error.reason"}, - { "name": "Files", "description": "Files grouped by mimetype", "query": "event.dataset:file | groupby file.mime_type source.ip"}, - { "name": "Files", "description": "Files grouped by source", "query": "event.dataset:file | groupby file.source source.ip"}, - { "name": "FTP", "description": "FTP grouped by command and argument", "query": "event.dataset:ftp | groupby ftp.command ftp.argument"}, - { "name": "FTP", "description": "FTP grouped by username and argument", "query": "event.dataset:ftp | groupby ftp.user ftp.argument"}, - { "name": "HTTP", "description": "HTTP grouped by destination port", "query": "event.dataset:http | groupby destination.port"}, - { "name": "HTTP", "description": "HTTP grouped by status code and message", "query": "event.dataset:http | groupby http.status_code http.status_message"}, - { "name": "HTTP", "description": "HTTP grouped by method and user agent", "query": "event.dataset:http | groupby http.method http.useragent"}, - { "name": "HTTP", "description": "HTTP grouped by virtual host", "query": "event.dataset:http | groupby http.virtual_host"}, - { "name": "HTTP", "description": "HTTP with exe downloads", "query": "event.dataset:http AND (file.resp_mime_types:dosexec OR file.resp_mime_types:executable) | groupby http.virtual_host"}, - { "name": "Intel", "description": "Intel framework hits grouped by indicator", "query": "event.dataset:intel | groupby intel.indicator.keyword"}, - { "name": "IRC", "description": "IRC grouped by command", "query": "event.dataset:irc | groupby irc.command.type"}, - { "name": "KERBEROS", "description": "KERBEROS grouped by service", "query": "event.dataset:kerberos | groupby kerberos.service"}, - { "name": "MODBUS", "description": "MODBUS grouped by function", "query": "event.dataset:modbus | groupby modbus.function"}, - { "name": "MYSQL", "description": "MYSQL grouped by command", "query": "event.dataset:mysql | groupby mysql.command"}, - { "name": "NOTICE", "description": "Zeek notice logs grouped by note and message", "query": "event.dataset:notice | groupby notice.note notice.message"}, - { "name": "NTLM", "description": "NTLM grouped by computer name", "query": "event.dataset:ntlm | groupby ntlm.server.dns.name"}, - { "name": "Osquery Live Queries", "description": "Osquery Live Query results grouped by computer name", "query": "event.dataset:live_query | groupby host.hostname"}, - { "name": "PE", "description": "PE files list", "query": "event.dataset:pe | groupby file.machine file.os file.subsystem"}, - { "name": "RADIUS", "description": "RADIUS grouped by username", "query": "event.dataset:radius | groupby user.name.keyword"}, - { "name": "RDP", "description": "RDP grouped by client name", "query": "event.dataset:rdp | groupby client.name"}, - { "name": "RFB", "description": "RFB grouped by desktop name", "query": "event.dataset:rfb | groupby rfb.desktop.name.keyword"}, - { "name": "Signatures", "description": "Zeek signatures grouped by signature id", "query": "event.dataset:signatures | groupby signature_id"}, - { "name": "SIP", "description": "SIP grouped by user agent", "query": "event.dataset:sip | groupby client.user_agent"}, - { "name": "SMB_Files", "description": "SMB files grouped by action", "query": "event.dataset:smb_files | groupby file.action"}, - { "name": "SMB_Mapping", "description": "SMB mapping grouped by path", "query": "event.dataset:smb_mapping | groupby smb.path"}, - { "name": "SMTP", "description": "SMTP grouped by subject", "query": "event.dataset:smtp | groupby smtp.subject"}, - { "name": "SNMP", "description": "SNMP grouped by version and string", "query": "event.dataset:snmp | groupby snmp.community snmp.version"}, - { "name": "Software", "description": "List of software seen on the network", "query": "event.dataset:software | groupby software.type software.name"}, - { "name": "SSH", "description": "SSH grouped by version and client", "query": "event.dataset:ssh | groupby ssh.version ssh.client"}, - { "name": "SSL", "description": "SSL grouped by version and server name", "query": "event.dataset:ssl | groupby ssl.version ssl.server_name"}, - { "name": "SYSLOG", "description": "SYSLOG grouped by severity and facility ", "query": "event.dataset:syslog | groupby syslog.severity_label syslog.facility_label"}, - { "name": "Tunnel", "description": "Tunnels grouped by type and action", "query": "event.dataset:tunnel | groupby tunnel.type event.action"}, - { "name": "Weird", "description": "Zeek weird log grouped by name", "query": "event.dataset:weird | groupby weird.name"}, - { "name": "x509", "description": "x.509 grouped by key length and name", "query": "event.dataset:x509 | groupby x509.certificate.key.length x509.san_dns"}, - { "name": "x509", "description": "x.509 grouped by name and issuer", "query": "event.dataset:x509 | groupby x509.san_dns x509.certificate.issuer"}, - { "name": "x509", "description": "x.509 grouped by name and subject", "query": "event.dataset:x509 | groupby x509.san_dns x509.certificate.subject"}, - { "name": "Firewall", "description": "Firewall events grouped by action", "query": "event.dataset:firewall | groupby rule.action"} + { "name": "Default Query", "showSubtitle": true, "showSubtitle": true, "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name"}, + { "name": "Log Type", "showSubtitle": true, "description": "Show all events grouped by module and dataset", "query": "* | groupby event.module event.dataset"}, + { "name": "SOC Auth", "showSubtitle": true, "description": "Users authenticated to SOC grouped by IP address and identity", "query": "event.module:kratos AND event.dataset:audit AND msg:authenticated | groupby http_request.headers.x-real-ip identity_id"}, + { "name": "Elastalerts", "showSubtitle": true, "description": "", "query": "_type:elastalert | groupby rule.name"}, + { "name": "Alerts", "showSubtitle": true, "description": "Show all alerts grouped by alert source", "query": "event.dataset: alert | groupby event.module"}, + { "name": "NIDS Alerts", "showSubtitle": true, "description": "Show all NIDS alerts grouped by alert", "query": "event.category: network AND event.dataset: alert | groupby rule.category rule.gid rule.uuid rule.name"}, + { "name": "Wazuh/OSSEC Alerts", "showSubtitle": true, "description": "Show all Wazuh alerts at Level 5 or higher grouped by category", "query": "event.module:ossec AND event.dataset:alert AND rule.level:>4 | groupby rule.category rule.name"}, + { "name": "Wazuh/OSSEC Alerts", "showSubtitle": true, "description": "Show all Wazuh alerts at Level 4 or lower grouped by category", "query": "event.module:ossec AND event.dataset:alert AND rule.level:<5 | groupby rule.category rule.name"}, + { "name": "Wazuh/OSSEC Users and Commands", "showSubtitle": true, "description": "Show all Wazuh alerts grouped by username and command line", "query": "event.module:ossec AND event.dataset:alert | groupby user.escalated.keyword process.command_line"}, + { "name": "Wazuh/OSSEC Processes", "showSubtitle": true, "description": "Show all Wazuh alerts grouped by process name", "query": "event.module:ossec AND event.dataset:alert | groupby process.name"}, + { "name": "Sysmon Events", "showSubtitle": true, "description": "Show all Sysmon logs grouped by event type", "query": "event.module:sysmon | groupby event.dataset"}, + { "name": "Sysmon Usernames", "showSubtitle": true, "description": "Show all Sysmon logs grouped by username", "query": "event.module:sysmon | groupby event.dataset, user.name.keyword"}, + { "name": "Strelka", "showSubtitle": true, "description": "Show all Strelka logs grouped by file type", "query": "event.module:strelka | groupby file.mime_type"}, + { "name": "Zeek Notice", "showSubtitle": true, "description": "Show notices from Zeek", "query": "event.dataset:notice | groupby notice.note notice.message"}, + { "name": "Connections", "showSubtitle": true, "description": "Connections grouped by IP and Port", "query": "event.dataset:conn | groupby source.ip destination.ip network.protocol destination.port"}, + { "name": "Connections", "showSubtitle": true, "description": "Connections grouped by Service", "query": "event.dataset:conn | groupby network.protocol destination.port"}, + { "name": "Connections", "showSubtitle": true, "description": "Connections grouped by destination country", "query": "event.dataset:conn | groupby destination.geo.country_name"}, + { "name": "Connections", "showSubtitle": true, "description": "Connections grouped by source country", "query": "event.dataset:conn | groupby source.geo.country_name"}, + { "name": "DCE_RPC", "showSubtitle": true, "description": "DCE_RPC grouped by operation", "query": "event.dataset:dce_rpc | groupby dce_rpc.operation"}, + { "name": "DHCP", "showSubtitle": true, "description": "DHCP leases", "query": "event.dataset:dhcp | groupby host.hostname client.address"}, + { "name": "DHCP", "showSubtitle": true, "description": "DHCP grouped by message type", "query": "event.dataset:dhcp | groupby dhcp.message_types"}, + { "name": "DNP3", "showSubtitle": true, "description": "DNP3 grouped by reply", "query": "event.dataset:dnp3 | groupby dnp3.fc_reply"}, + { "name": "DNS", "showSubtitle": true, "description": "DNS queries grouped by port", "query": "event.dataset:dns | groupby dns.query.name destination.port"}, + { "name": "DNS", "showSubtitle": true, "description": "DNS queries grouped by type", "query": "event.dataset:dns | groupby dns.query.type_name destination.port"}, + { "name": "DNS", "showSubtitle": true, "description": "DNS queries grouped by response code", "query": "event.dataset:dns | groupby dns.response.code_name destination.port"}, + { "name": "DNS", "showSubtitle": true, "description": "DNS highest registered domain", "query": "event.dataset:dns | groupby dns.highest_registered_domain.keyword destination.port"}, + { "name": "DNS", "showSubtitle": true, "description": "DNS grouped by parent domain", "query": "event.dataset:dns | groupby dns.parent_domain.keyword destination.port"}, + { "name": "DPD", "showSubtitle": true, "description": "Dynamic Protocol Detection errors", "query": "event.dataset:dpd | groupby error.reason"}, + { "name": "Files", "showSubtitle": true, "description": "Files grouped by mimetype", "query": "event.dataset:file | groupby file.mime_type source.ip"}, + { "name": "Files", "showSubtitle": true, "description": "Files grouped by source", "query": "event.dataset:file | groupby file.source source.ip"}, + { "name": "FTP", "showSubtitle": true, "description": "FTP grouped by command and argument", "query": "event.dataset:ftp | groupby ftp.command ftp.argument"}, + { "name": "FTP", "showSubtitle": true, "description": "FTP grouped by username and argument", "query": "event.dataset:ftp | groupby ftp.user ftp.argument"}, + { "name": "HTTP", "showSubtitle": true, "description": "HTTP grouped by destination port", "query": "event.dataset:http | groupby destination.port"}, + { "name": "HTTP", "showSubtitle": true, "description": "HTTP grouped by status code and message", "query": "event.dataset:http | groupby http.status_code http.status_message"}, + { "name": "HTTP", "showSubtitle": true, "description": "HTTP grouped by method and user agent", "query": "event.dataset:http | groupby http.method http.useragent"}, + { "name": "HTTP", "showSubtitle": true, "description": "HTTP grouped by virtual host", "query": "event.dataset:http | groupby http.virtual_host"}, + { "name": "HTTP", "showSubtitle": true, "description": "HTTP with exe downloads", "query": "event.dataset:http AND (file.resp_mime_types:dosexec OR file.resp_mime_types:executable) | groupby http.virtual_host"}, + { "name": "Intel", "showSubtitle": true, "description": "Intel framework hits grouped by indicator", "query": "event.dataset:intel | groupby intel.indicator.keyword"}, + { "name": "IRC", "showSubtitle": true, "description": "IRC grouped by command", "query": "event.dataset:irc | groupby irc.command.type"}, + { "name": "KERBEROS", "showSubtitle": true, "description": "KERBEROS grouped by service", "query": "event.dataset:kerberos | groupby kerberos.service"}, + { "name": "MODBUS", "showSubtitle": true, "description": "MODBUS grouped by function", "query": "event.dataset:modbus | groupby modbus.function"}, + { "name": "MYSQL", "showSubtitle": true, "description": "MYSQL grouped by command", "query": "event.dataset:mysql | groupby mysql.command"}, + { "name": "NOTICE", "showSubtitle": true, "description": "Zeek notice logs grouped by note and message", "query": "event.dataset:notice | groupby notice.note notice.message"}, + { "name": "NTLM", "showSubtitle": true, "description": "NTLM grouped by computer name", "query": "event.dataset:ntlm | groupby ntlm.server.dns.name"}, + { "name": "Osquery Live Queries", "showSubtitle": true, "description": "Osquery Live Query results grouped by computer name", "query": "event.dataset:live_query | groupby host.hostname"}, + { "name": "PE", "showSubtitle": true, "description": "PE files list", "query": "event.dataset:pe | groupby file.machine file.os file.subsystem"}, + { "name": "RADIUS", "showSubtitle": true, "description": "RADIUS grouped by username", "query": "event.dataset:radius | groupby user.name.keyword"}, + { "name": "RDP", "showSubtitle": true, "description": "RDP grouped by client name", "query": "event.dataset:rdp | groupby client.name"}, + { "name": "RFB", "showSubtitle": true, "description": "RFB grouped by desktop name", "query": "event.dataset:rfb | groupby rfb.desktop.name.keyword"}, + { "name": "Signatures", "showSubtitle": true, "description": "Zeek signatures grouped by signature id", "query": "event.dataset:signatures | groupby signature_id"}, + { "name": "SIP", "showSubtitle": true, "description": "SIP grouped by user agent", "query": "event.dataset:sip | groupby client.user_agent"}, + { "name": "SMB_Files", "showSubtitle": true, "description": "SMB files grouped by action", "query": "event.dataset:smb_files | groupby file.action"}, + { "name": "SMB_Mapping", "showSubtitle": true, "description": "SMB mapping grouped by path", "query": "event.dataset:smb_mapping | groupby smb.path"}, + { "name": "SMTP", "showSubtitle": true, "description": "SMTP grouped by subject", "query": "event.dataset:smtp | groupby smtp.subject"}, + { "name": "SNMP", "showSubtitle": true, "description": "SNMP grouped by version and string", "query": "event.dataset:snmp | groupby snmp.community snmp.version"}, + { "name": "Software", "showSubtitle": true, "description": "List of software seen on the network", "query": "event.dataset:software | groupby software.type software.name"}, + { "name": "SSH", "showSubtitle": true, "description": "SSH grouped by version and client", "query": "event.dataset:ssh | groupby ssh.version ssh.client"}, + { "name": "SSL", "showSubtitle": true, "description": "SSL grouped by version and server name", "query": "event.dataset:ssl | groupby ssl.version ssl.server_name"}, + { "name": "SYSLOG", "showSubtitle": true, "description": "SYSLOG grouped by severity and facility ", "query": "event.dataset:syslog | groupby syslog.severity_label syslog.facility_label"}, + { "name": "Tunnel", "showSubtitle": true, "description": "Tunnels grouped by type and action", "query": "event.dataset:tunnel | groupby tunnel.type event.action"}, + { "name": "Weird", "showSubtitle": true, "description": "Zeek weird log grouped by name", "query": "event.dataset:weird | groupby weird.name"}, + { "name": "x509", "showSubtitle": true, "description": "x.509 grouped by key length and name", "query": "event.dataset:x509 | groupby x509.certificate.key.length x509.san_dns"}, + { "name": "x509", "showSubtitle": true, "description": "x.509 grouped by name and issuer", "query": "event.dataset:x509 | groupby x509.san_dns x509.certificate.issuer"}, + { "name": "x509", "showSubtitle": true, "description": "x.509 grouped by name and subject", "query": "event.dataset:x509 | groupby x509.san_dns x509.certificate.subject"}, + { "name": "Firewall", "showSubtitle": true, "description": "Firewall events grouped by action", "query": "event.dataset:firewall | groupby rule.action"} ] diff --git a/salt/soc/files/soc/soc.json b/salt/soc/files/soc/soc.json index 9b034ad57..4eabb7c48 100644 --- a/salt/soc/files/soc/soc.json +++ b/salt/soc/files/soc/soc.json @@ -16,6 +16,7 @@ {%- import_json "soc/files/soc/alerts.eventfields.json" as alerts_eventfields %} {%- import_json "soc/files/soc/hunt.queries.json" as hunt_queries %} {%- import_json "soc/files/soc/hunt.eventfields.json" as hunt_eventfields %} +{%- import_json "soc/files/soc/dashboards.queries.json" as dashboards_queries %} {%- import_json "soc/files/soc/cases.queries.json" as cases_queries %} {%- import_json "soc/files/soc/cases.eventfields.json" as cases_eventfields %} {%- import_json "soc/files/soc/menu.actions.json" as menu_actions %} @@ -169,6 +170,26 @@ "queries": {{ hunt_queries | json }}, "actions": {{ menu_actions | json }} }, + "dashboards": { + "advanced": true, + "groupItemsPerPage": 10, + "groupFetchLimit": 10, + "eventItemsPerPage": 10, + "eventFetchLimit": 100, + "relativeTimeValue": 24, + "relativeTimeUnit": 30, + "mostRecentlyUsedLimit": 5, + "ackEnabled": false, + "escalateEnabled": true, + "escalateRelatedEventsEnabled": {{ 'true' if CASE_MODULE == 'soc' else 'false' }}, + "eventFields": {{ hunt_eventfields | json }}, + "queryBaseFilter": "", + "queryToggleFilters": [ + { "name": "caseExcludeToggle", "filter": "NOT _index:\"*:so-case*\"", "enabled": true } + ], + "queries": {{ dashboards_queries | json }}, + "actions": {{ menu_actions | json }} + }, "job": { "actions": {{ menu_actions | json }} }, @@ -217,6 +238,7 @@ "case": { "mostRecentlyUsedLimit": 5, "renderAbbreviatedCount": 30, + "analyzerNodeId": "{{ grains.host | lower }}", "presets": { "artifactType": {{ presets_artifacttype | json }}, "category": {{ presets_category | json }}, diff --git a/salt/telegraf/init.sls b/salt/telegraf/init.sls index e7b550259..3c46b4956 100644 --- a/salt/telegraf/init.sls +++ b/salt/telegraf/init.sls @@ -13,7 +13,12 @@ tgraflogdir: file.directory: - name: /opt/so/log/telegraf - makedirs: True - + - user: 939 + - group: 939 + - recurse: + - user + - group + tgrafetcdir: file.directory: - name: /opt/so/conf/telegraf/etc @@ -29,7 +34,7 @@ tgrafsyncscripts: - name: /opt/so/conf/telegraf/scripts - user: root - group: 939 - - file_mode: 700 + - file_mode: 770 - template: jinja - source: salt://telegraf/scripts {% if salt['pillar.get']('global:mdengine', 'ZEEK') == 'SURICATA' %} @@ -57,6 +62,8 @@ node_config: so-telegraf: docker_container.running: - image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-telegraf:{{ VERSION }} + - user: 939 + - group_add: 939,920 - environment: - HOST_PROC=/host/proc - HOST_ETC=/host/etc diff --git a/salt/telegraf/scripts/influxdbsize.sh b/salt/telegraf/scripts/influxdbsize.sh index 87571629d..bf4431a10 100644 --- a/salt/telegraf/scripts/influxdbsize.sh +++ b/salt/telegraf/scripts/influxdbsize.sh @@ -18,9 +18,12 @@ # if this script isn't already running if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then - INFLUXSIZE=$(du -s -k /host/nsm/influxdb | awk {'print $1'}) - echo "influxsize kbytes=$INFLUXSIZE" - + INFLUXLOG=/var/log/telegraf/influxdb_size.log + + if [ -f "$INFLUXLOG" ]; then + INFLUXSTATUS=$(cat $INFLUXLOG) + echo "influxsize kbytes=$INFLUXSTATUS" + fi fi exit 0 diff --git a/salt/top.sls b/salt/top.sls index 83c911992..87f96143f 100644 --- a/salt/top.sls +++ b/salt/top.sls @@ -20,26 +20,28 @@ base: '*': - cron.running + - repo.client 'not G@saltversion:{{saltversion}}': - match: compound - salt.minion-state-apply-test - - repo.client - salt.minion 'G@os:CentOS and G@saltversion:{{saltversion}}': - match: compound - - repo.client - yum.packages '* and G@saltversion:{{saltversion}}': - match: compound - salt.minion - - common - patch.os.schedule - motd - salt.minion-check - salt.lasthighstate + + 'not *_workstation and G@saltversion:{{saltversion}}': + - match: compound + - common '*_helixsensor and G@saltversion:{{saltversion}}': - match: compound @@ -507,3 +509,11 @@ base: - docker_clean - filebeat - idh + + 'J@workstation:gui:enabled:^[Tt][Rr][Uu][Ee]$ and ( G@saltversion:{{saltversion}} and G@os:CentOS )': + - match: compound + - workstation + + 'J@workstation:gui:enabled:^[Ff][Aa][Ll][Ss][Ee]$ and ( G@saltversion:{{saltversion}} and G@os:CentOS )': + - match: compound + - workstation.remove_gui diff --git a/salt/workstation/init.sls b/salt/workstation/init.sls new file mode 100644 index 000000000..e270c0430 --- /dev/null +++ b/salt/workstation/init.sls @@ -0,0 +1,7 @@ +include: + - workstation.xwindows +{# If the master is 'salt' then the minion hasn't been configured and isn't connected to the grid. #} +{# We need this since the trusted-ca state uses mine data. #} +{% if grains.master != 'salt' %} + - workstation.trusted-ca +{% endif %} diff --git a/salt/workstation/packages.sls b/salt/workstation/packages.sls new file mode 100644 index 000000000..59b24ec2a --- /dev/null +++ b/salt/workstation/packages.sls @@ -0,0 +1,60 @@ +{# we only want this state to run it is CentOS #} +{% if grains.os == 'CentOS' %} + +xwindows_group: + pkg.group_installed: + - name: X Window System + +graphical_extras: + pkg.installed: + - pkgs: + - gnome-classic-session + - gnome-terminal + - gnome-terminal-nautilus + - control-center + - liberation-mono-fonts + - file-roller + +workstation_packages: + pkg.installed: + - pkgs: + - wget + - curl + - unzip + - gedit + - mono-core + - mono-basic + - mono-winforms + - expect + - wireshark-gnome + - dsniff + - hping3 + - netsed + - ngrep + - python36-scapy + - ssldump + - tcpdump + - tcpflow + - whois + - chromium + - libevent + - sslsplit + - perl-IO-Compress + - perl-Net-DNS + - securityonion-networkminer + - securityonion-chaosreader + - securityonion-analyst-extras + - securityonion-bittwist + - securityonion-tcpstat + - securityonion-tcptrace + - securityonion-foremost + - securityonion-strelka-oneshot + - securityonion-strelka-fileshot + +{% else %} + +workstation_packages_os_fail: + test.fail_without_changes: + - comment: 'SO Analyst Workstation can only be installed on CentOS' + +{% endif %} diff --git a/salt/workstation/remove_gui.sls b/salt/workstation/remove_gui.sls new file mode 100644 index 000000000..097e23151 --- /dev/null +++ b/salt/workstation/remove_gui.sls @@ -0,0 +1,15 @@ +{# we only want this state to run it is CentOS #} +{% if grains.os == 'CentOS' %} + +remove_graphical_target: + file.symlink: + - name: /etc/systemd/system/default.target + - target: /lib/systemd/system/multi-user.target + - force: True + +{% else %} +workstation_trusted-ca_os_fail: + test.fail_without_changes: + - comment: 'SO Analyst Workstation can only be installed on CentOS' + +{% endif %} diff --git a/salt/workstation/trusted-ca.sls b/salt/workstation/trusted-ca.sls new file mode 100644 index 000000000..008d3573f --- /dev/null +++ b/salt/workstation/trusted-ca.sls @@ -0,0 +1,35 @@ + +{# we only want this state to run it is CentOS #} +{% if grains.os == 'CentOS' %} + + {% set global_ca_text = [] %} + {% set global_ca_server = [] %} + {% set manager = salt['grains.get']('master') %} + {% set x509dict = salt['mine.get'](manager | lower~'*', 'x509.get_pem_entries') %} + {% for host in x509dict %} + {% if host.split('_')|last in ['manager', 'managersearch', 'standalone', 'import', 'eval'] %} + {% do global_ca_text.append(x509dict[host].get('/etc/pki/ca.crt')|replace('\n', '')) %} + {% do global_ca_server.append(host) %} + {% endif %} + {% endfor %} + {% set trusttheca_text = global_ca_text[0] %} + {% set ca_server = global_ca_server[0] %} + +trusted_ca: + x509.pem_managed: + - name: /etc/pki/ca-trust/source/anchors/ca.crt + - text: {{ trusttheca_text }} + +update_ca_certs: + cmd.run: + - name: update-ca-trust + - onchanges: + - x509: trusted_ca + +{% else %} + +workstation_trusted-ca_os_fail: + test.fail_without_changes: + - comment: 'SO Analyst Workstation can only be installed on CentOS' + +{% endif %} diff --git a/salt/workstation/xwindows.sls b/salt/workstation/xwindows.sls new file mode 100644 index 000000000..015fb0d3c --- /dev/null +++ b/salt/workstation/xwindows.sls @@ -0,0 +1,23 @@ + +{# we only want this state to run it is CentOS #} +{% if grains.os == 'CentOS' %} + +include: + - workstation.packages + +graphical_target: + file.symlink: + - name: /etc/systemd/system/default.target + - target: /lib/systemd/system/graphical.target + - force: True + - require: + - pkg: X Window System + - pkg: graphical_extras + +{% else %} + +workstation_xwindows_os_fail: + test.fail_without_changes: + - comment: 'SO Analyst Workstation can only be installed on CentOS' + +{% endif %} diff --git a/salt/zeek/policy/securityonion/file-extraction/extract.zeek b/salt/zeek/policy/securityonion/file-extraction/extract.zeek index e5b7db864..8cdaf42dd 100644 --- a/salt/zeek/policy/securityonion/file-extraction/extract.zeek +++ b/salt/zeek/policy/securityonion/file-extraction/extract.zeek @@ -38,12 +38,12 @@ event file_state_remove(f: fa_file) if ( !f$info?$extracted || FileExtract::prefix == "" ) { return; } - # Check some conditions so we know the file is intact: - # Check for MD5 - # Check for total_bytes - # Check for missing bytes - # Check if timed out - if ( !f$info?$md5 || !f?$total_bytes || f$missing_bytes > 0 || f$info$timedout) { + # Check if any of the following conditions exist: + # - missing MD5 + # - total_bytes exists (some protocols aren't populating this field) but is 0 + # - missing bytes + # - timed out + if ( !f$info?$md5 || (f?$total_bytes && f$total_bytes == 0) || f$missing_bytes > 0 || f$info$timedout) { # Delete the file if it didn't pass our requirements check. local nuke = fmt("rm %s/%s", FileExtract::prefix, f$info$extracted); diff --git a/setup/so-functions b/setup/so-functions index 5b4914941..61c3985e3 100755 --- a/setup/so-functions +++ b/setup/so-functions @@ -49,14 +49,14 @@ airgap_repo() { rm -rf /etc/yum.repos.d/* echo "[airgap_repo]" > /etc/yum.repos.d/airgap_repo.repo if $is_manager; then - echo "baseurl=https://$HOSTNAME/repo" >> /etc/yum.repos.d/airgap_repo.repo + echo "baseurl=https://$HOSTNAME/repo" >> /etc/yum.repos.d/airgap_repo.repo else - echo "baseurl=https://$MSRV/repo" >> /etc/yum.repos.d/airgap_repo.repo + echo "baseurl=https://$MSRV/repo" >> /etc/yum.repos.d/airgap_repo.repo fi - echo "gpgcheck=1" >> /etc/yum.repos.d/airgap_repo.repo - echo "sslverify=0" >> /etc/yum.repos.d/airgap_repo.repo - echo "name=Airgap Repo" >> /etc/yum.repos.d/airgap_repo.repo - echo "enabled=1" >> /etc/yum.repos.d/airgap_repo.repo + echo "gpgcheck=1" >> /etc/yum.repos.d/airgap_repo.repo + echo "sslverify=0" >> /etc/yum.repos.d/airgap_repo.repo + echo "name=Airgap Repo" >> /etc/yum.repos.d/airgap_repo.repo + echo "enabled=1" >> /etc/yum.repos.d/airgap_repo.repo } airgap_rules() { @@ -138,6 +138,45 @@ analyze_system() { logCmd "ip a" } +analyst_salt_local() { + + # Install everything using local salt + # Set the repo + securityonion_repo + gpg_rpm_import + # Install salt + logCmd "yum -y install salt-minion-3004.1 httpd-tools python3 python36-docker python36-dateutil python36-m2crypto python36-mysql python36-packaging python36-lxml yum-utils device-mapper-persistent-data lvm2 openssl jq" + logCmd "yum -y update --exclude=salt*" + + salt-call state.apply workstation --local --file-root=../salt/ -l info 2>&1 | tee -a outfile + read -r -d '' message <<- EOM + Finished Analyst workstation installation. + + Press ENTER to reboot. + EOM + + whiptail --title "$whiptail_title" --msgbox "$message" 12 75 + reboot + exit 0 + +} + + +analyst_workstation_pillar() { + + local pillar_file=$temp_install_dir/pillar/minions/$MINION_ID.sls + + # Create the analyst workstation pillar + printf '%s\n'\ + "host:"\ + " mainint: '$MNIC'"\ + "workstation:"\ + " gui:"\ + " enabled: true" >> "$pillar_file"\ + "sensoroni:"\ + " node_description: '${NODE_DESCRIPTION//\'/''}'" > $pillar_file +} + calculate_useable_cores() { # Calculate reasonable core usage @@ -217,36 +256,6 @@ check_service_status() { } -check_salt_master_status() { - local timeout=$1 - echo "Checking if we can talk to the salt master" >> "$setup_log" 2>&1 - salt-call saltutil.kill_all_jobs > /dev/null 2>&1 - salt-call state.show_top -t $timeout > /dev/null 2>&1 - local status=$? - if [ $status -gt 0 ]; then - echo " Could not talk to salt master" >> "$setup_log" 2>&1 - return 1; - else - echo " Can talk to salt master" >> "$setup_log" 2>&1 - return 0; - fi - -} - -check_salt_minion_status() { - local timeout=$1 - echo "Checking if the salt minion will respond to jobs" >> "$setup_log" 2>&1 - salt "$MINION_ID" test.ping -t $timeout > /dev/null 2>&1 - local status=$? - if [ $status -gt 0 ]; then - echo " Minion did not respond" >> "$setup_log" 2>&1 - return 1; - else - echo " Received job response from salt minion" >> "$setup_log" 2>&1 - return 0; - fi -} - check_soremote_pass() { check_pass_match "$SOREMOTEPASS1" "$SOREMOTEPASS2" "SCMATCH" } @@ -435,6 +444,13 @@ collect_hostname_validate() { done } +collect_idh_preferences() { + IDHMGTRESTRICT='False' + whiptail_idh_preferences + + if [[ "$idh_preferences" != "" ]]; then IDHMGTRESTRICT='True'; fi +} + collect_idh_services() { whiptail_idh_services @@ -766,6 +782,9 @@ collect_zeek() { configure_minion() { local minion_type=$1 + if [[ $is_analyst ]]; then + minion_type=workstation + fi echo "Configuring minion type as $minion_type" >> "$setup_log" 2>&1 echo "role: so-$minion_type" > /etc/salt/grains @@ -774,6 +793,9 @@ configure_minion() { echo "id: '$MINION_ID'" > "$minion_config" case "$minion_type" in + 'workstation') + echo "master: '$MSRV'" >> "$minion_config" + ;; 'helix') cp -f ../salt/ca/files/signing_policies.conf /etc/salt/minion.d/signing_policies.conf echo "master: '$HOSTNAME'" >> "$minion_config" @@ -882,6 +904,8 @@ check_requirements() { if [[ -n $nsm_mount ]]; then if [[ "$standalone_or_dist" == 'import' ]]; then req_storage=50 + elif [[ "$node_type" == 'idh' ]]; then + req_storage=12 else req_storage=100 fi @@ -894,6 +918,8 @@ check_requirements() { else if [[ "$standalone_or_dist" == 'import' ]]; then req_storage=50 + elif [[ "$node_type" == 'idh' ]]; then + req_storage=12 else req_storage=200 fi @@ -1108,6 +1134,7 @@ detect_os() { echo "Detecting Base OS" >> "$log" 2>&1 if [ -f /etc/redhat-release ]; then OS=centos + is_centos=true if grep -q "CentOS Linux release 7" /etc/redhat-release; then OSVER=7 elif grep -q "CentOS Linux release 8" /etc/redhat-release; then @@ -1207,7 +1234,7 @@ disable_ipv6() { docker_install() { - if [ $OS = 'centos' ]; then + if [[ $is_centos ]]; then logCmd "yum clean expire-cache" if [[ ! $is_iso ]]; then logCmd "yum -y install docker-ce-20.10.5-3.el7 docker-ce-cli-20.10.5-3.el7 docker-ce-rootless-extras-20.10.5-3.el7 containerd.io-1.4.4-3.1.el7" @@ -1229,15 +1256,15 @@ docker_install() { ;; esac if [ $OSVER == "bionic" ]; then - service docker stop + service docker stop apt -y purge docker-ce docker-ce-cli docker-ce-rootless-extras retry 50 10 "apt-get -y install --allow-downgrades docker-ce=5:20.10.5~3-0~ubuntu-bionic docker-ce-cli=5:20.10.5~3-0~ubuntu-bionic docker-ce-rootless-extras=5:20.10.5~3-0~ubuntu-bionic python3-docker" >> "$setup_log" 2>&1 || exit 1 - apt-mark hold docker-ce docker-ce-cli docker-ce-rootless-extras + apt-mark hold docker-ce docker-ce-cli docker-ce-rootless-extras elif [ $OSVER == "focal" ]; then - service docker stop + service docker stop apt -y purge docker-ce docker-ce-cli docker-ce-rootless-extras retry 50 10 "apt-get -y install --allow-downgrades docker-ce=5:20.10.8~3-0~ubuntu-focal docker-ce-cli=5:20.10.8~3-0~ubuntu-focal docker-ce-rootless-extras=5:20.10.8~3-0~ubuntu-focal python3-docker" >> "$setup_log" 2>&1 || exit 1 - apt-mark hold docker-ce docker-ce-cli docker-ce-rootless-extras + apt-mark hold docker-ce docker-ce-cli docker-ce-rootless-extras fi fi docker_registry @@ -1368,9 +1395,9 @@ es_heapsize() { # Set heap size to 33% of available memory ES_HEAP_SIZE=$(( total_mem / 3 )) if [ "$ES_HEAP_SIZE" -ge 25001 ] ; then - ES_HEAP_SIZE="25000m" + ES_HEAP_SIZE="25000m" else - ES_HEAP_SIZE=$ES_HEAP_SIZE"m" + ES_HEAP_SIZE=$ES_HEAP_SIZE"m" fi fi export ES_HEAP_SIZE @@ -1492,7 +1519,7 @@ generate_passwords(){ } generate_repo_tarball() { - mkdir /opt/so/repo + mkdir -p /opt/so/repo tar -czf /opt/so/repo/"$SOVERSION".tar.gz -C "$(pwd)/.." . } @@ -1728,7 +1755,7 @@ manager_global() { " managerip: '$MAINIP'" > "$global_pillar" if [[ $HIGHLANDER == 'True' ]]; then - printf '%s\n'\ + printf '%s\n'\ " highlander: True"\ >> "$global_pillar" fi if [[ $is_airgap ]]; then @@ -1758,14 +1785,15 @@ manager_global() { " enabled: $STRELKA"\ " rules: 1" >> "$global_pillar" if [[ $is_airgap ]]; then - printf '%s\n'\ - " repos:"\ - " - 'https://$HOSTNAME/repo/rules/strelka'" >> "$global_pillar" - else - printf '%s\n'\ - " repos:"\ - " - 'https://github.com/Neo23x0/signature-base'" >> "$global_pillar" - fi + printf '%s\n'\ + " repos:"\ + " - 'https://$HOSTNAME/repo/rules/strelka'" >> "$global_pillar" + else + printf '%s\n'\ + " repos:"\ + " - 'https://github.com/Neo23x0/signature-base'" >> "$global_pillar" + fi + printf '%s\n'\ "curator:"\ " hot_warm: False"\ @@ -1793,101 +1821,101 @@ manager_global() { " cluster_routing_allocation_disk_watermark_flood_stage: '98%'"\ " index_settings:"\ " so-beats:"\ - " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " index_template:"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-endgame:"\ - " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " index_template:"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-firewall:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-flow:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-ids:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-import:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 73000"\ " delete: 73001"\ " so-osquery:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-ossec:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-strelka:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-syslog:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 1"\ " warm: 7"\ " close: 30"\ " delete: 365"\ " so-zeek:"\ " index_template:"\ - " template:"\ - " settings:"\ - " index:"\ - " number_of_shards: 1"\ + " template:"\ + " settings:"\ + " index:"\ + " number_of_shards: 2"\ " warm: 7"\ " close: 45"\ " delete: 365"\ @@ -1933,7 +1961,7 @@ minio_generate_keys() { network_init() { disable_ipv6 set_hostname - if [[ "$setup_type" == 'iso' ]]; then + if [[ ( $is_iso || $is_analyst_iso ) ]]; then set_management_interface fi } @@ -2178,7 +2206,7 @@ reset_proxy() { [[ -f /etc/gitconfig ]] && rm -f /etc/gitconfig - if [[ $OS == 'centos' ]]; then + if [[ $is_centos ]]; then sed -i "/proxy=/d" /etc/yum.conf else [[ -f /etc/apt/apt.conf.d/00-proxy.conf ]] && rm -f /etc/apt/apt.conf.d/00-proxy.conf @@ -2206,7 +2234,7 @@ backup_dir() { remove_package() { local package_name=$1 - if [ $OS = 'centos' ]; then + if [[ $is_centos ]]; then if rpm -qa | grep -q "$package_name"; then logCmd "yum remove -y $package_name" fi @@ -2227,39 +2255,42 @@ remove_package() { saltify() { # Install updates and Salt - if [ $OS = 'centos' ]; then + if [[ $is_centos ]]; then set_progress_str 6 'Installing various dependencies' - if [[ ! $is_iso ]]; then + if [[ ! ( $is_iso || $is_analyst_iso ) ]]; then logCmd "yum -y install wget nmap-ncat" - fi - case "$install_type" in - 'MANAGER' | 'EVAL' | 'MANAGERSEARCH' | 'FLEET' | 'HELIXSENSOR' | 'STANDALONE'| 'IMPORT') - reserve_group_ids - if [[ ! $is_iso ]]; then - logCmd "yum -y install sqlite curl mariadb-devel" - fi - # Download Ubuntu Keys in case manager updates = 1 - logCmd "mkdir -vp /opt/so/gpg" - if [[ ! $is_airgap ]]; then - logCmd "wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.saltstack.com/py3/ubuntu/18.04/amd64/archive/3004/SALTSTACK-GPG-KEY.pub" - logCmd "wget -q --inet4-only -O /opt/so/gpg/docker.pub https://download.docker.com/linux/ubuntu/gpg" - logCmd "wget -q --inet4-only -O /opt/so/gpg/GPG-KEY-WAZUH https://packages.wazuh.com/key/GPG-KEY-WAZUH" - fi - set_progress_str 7 'Installing salt-master' - if [[ ! $is_iso ]]; then - logCmd "yum -y install salt-master-3004" - fi - logCmd "systemctl enable salt-master" - ;; - *) - ;; - esac + fi + + if [[ ! $is_analyst ]]; then + case "$install_type" in + 'MANAGER' | 'EVAL' | 'MANAGERSEARCH' | 'FLEET' | 'HELIXSENSOR' | 'STANDALONE'| 'IMPORT') + reserve_group_ids + if [[ ! $is_iso ]]; then + logCmd "yum -y install sqlite curl mariadb-devel" + fi + # Download Ubuntu Keys in case manager updates = 1 + logCmd "mkdir -vp /opt/so/gpg" + if [[ ! $is_airgap ]]; then + logCmd "wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt/SALTSTACK-GPG-KEY.pub" + logCmd "wget -q --inet4-only -O /opt/so/gpg/docker.pub https://download.docker.com/linux/ubuntu/gpg" + logCmd "wget -q --inet4-only -O /opt/so/gpg/GPG-KEY-WAZUH https://packages.wazuh.com/key/GPG-KEY-WAZUH" + fi + set_progress_str 7 'Installing salt-master' + if [[ ! $is_iso ]]; then + logCmd "yum -y install salt-master-3004.1" + fi + logCmd "systemctl enable salt-master" + ;; + *) + ;; + esac + fi if [[ ! $is_airgap ]]; then logCmd "yum clean expire-cache" fi set_progress_str 8 'Installing salt-minion & python modules' - if [[ ! $is_iso ]]; then - logCmd "yum -y install salt-minion-3004 httpd-tools python3 python36-docker python36-dateutil python36-m2crypto python36-mysql python36-packaging python36-lxml yum-utils device-mapper-persistent-data lvm2 openssl jq" + if [[ ! ( $is_iso || $is_analyst_iso ) ]]; then + logCmd "yum -y install salt-minion-3004.1 httpd-tools python3 python36-docker python36-dateutil python36-m2crypto python36-mysql python36-packaging python36-lxml yum-utils device-mapper-persistent-data lvm2 openssl jq" logCmd "yum -y update --exclude=salt*" fi logCmd "systemctl enable salt-minion" @@ -2298,8 +2329,8 @@ saltify() { 'MANAGER' | 'EVAL' | 'MANAGERSEARCH' | 'STANDALONE' | 'IMPORT' | 'HELIXSENSOR') # Add saltstack repo(s) - wget -q --inet4-only -O - https://repo.saltstack.com/py3/ubuntu/"$ubuntu_version"/amd64/archive/3004/SALTSTACK-GPG-KEY.pub | apt-key add - >> "$setup_log" 2>&1 - echo "deb http://repo.saltstack.com/py3/ubuntu/$ubuntu_version/amd64/archive/3004 $OSVER main" > /etc/apt/sources.list.d/saltstack.list 2>> "$setup_log" + wget -q --inet4-only -O - https://repo.securityonion.net/file/securityonion-repo/ubuntu/"$ubuntu_version"/amd64/salt/SALTSTACK-GPG-KEY.pub | apt-key add - >> "$setup_log" 2>&1 + echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt/ $OSVER main" > /etc/apt/sources.list.d/saltstack.list 2>> "$setup_log" # Add Docker repo curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - >> "$setup_log" 2>&1 @@ -2307,7 +2338,7 @@ saltify() { # Get gpg keys mkdir -p /opt/so/gpg >> "$setup_log" 2>&1 - wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.saltstack.com/py3/ubuntu/"$ubuntu_version"/amd64/archive/3004/SALTSTACK-GPG-KEY.pub >> "$setup_log" 2>&1 + wget -q --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.securityonion.net/file/securityonion-repo/ubuntu/"$ubuntu_version"/amd64/salt/SALTSTACK-GPG-KEY.pub >> "$setup_log" 2>&1 wget -q --inet4-only -O /opt/so/gpg/docker.pub https://download.docker.com/linux/ubuntu/gpg >> "$setup_log" 2>&1 wget -q --inet4-only -O /opt/so/gpg/GPG-KEY-WAZUH https://packages.wazuh.com/key/GPG-KEY-WAZUH >> "$setup_log" 2>&1 @@ -2320,7 +2351,7 @@ saltify() { set_progress_str 6 'Installing various dependencies' retry 50 10 "apt-get -y install sqlite3 libssl-dev" >> "$setup_log" 2>&1 || exit 1 set_progress_str 7 'Installing salt-master' - retry 50 10 "apt-get -y install salt-master=3004+ds-1" >> "$setup_log" 2>&1 || exit 1 + retry 50 10 "apt-get -y install salt-master=3004.1+ds-1" >> "$setup_log" 2>&1 || exit 1 retry 50 10 "apt-mark hold salt-master" >> "$setup_log" 2>&1 || exit 1 ;; *) @@ -2331,14 +2362,14 @@ saltify() { echo "Using apt-key add to add SALTSTACK-GPG-KEY.pub and GPG-KEY-WAZUH" >> "$setup_log" 2>&1 apt-key add "$temp_install_dir"/gpg/SALTSTACK-GPG-KEY.pub >> "$setup_log" 2>&1 apt-key add "$temp_install_dir"/gpg/GPG-KEY-WAZUH >> "$setup_log" 2>&1 - echo "deb http://repo.saltstack.com/py3/ubuntu/$ubuntu_version/amd64/archive/3004/ $OSVER main" > /etc/apt/sources.list.d/saltstack.list 2>> "$setup_log" + echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt/ $OSVER main" > /etc/apt/sources.list.d/saltstack.list 2>> "$setup_log" echo "deb https://packages.wazuh.com/3.x/apt/ stable main" > /etc/apt/sources.list.d/wazuh.list 2>> "$setup_log" ;; esac retry 50 10 "apt-get update" "" "Err:" >> "$setup_log" 2>&1 || exit 1 set_progress_str 8 'Installing salt-minion & python modules' - retry 50 10 "apt-get -y install salt-minion=3004+ds-1 salt-common=3004+ds-1" >> "$setup_log" 2>&1 || exit 1 + retry 50 10 "apt-get -y install salt-minion=3004.1+ds-1 salt-common=3004.1+ds-1" >> "$setup_log" 2>&1 || exit 1 retry 50 10 "apt-mark hold salt-minion salt-common" >> "$setup_log" 2>&1 || exit 1 retry 50 10 "apt-get -y install python3-pip python3-dateutil python3-m2crypto python3-mysqldb python3-packaging python3-influxdb python3-lxml" >> "$setup_log" 2>&1 || exit 1 fi @@ -2371,7 +2402,7 @@ secrets_pillar(){ securityonion_repo() { # Remove all the current repos - if [[ "$OS" == "centos" ]]; then + if [[ $is_centos ]]; then if [[ "$INTERWEBS" == "AIRGAP" ]]; then echo "This is airgap I don't need to add this repo" else @@ -2390,8 +2421,8 @@ securityonion_repo() { # update this package because the repo config files get added back # if the package is updated when the update_packages function is called logCmd "yum -v -y update centos-release" - echo "Move the .repo files that were added by the centos-release package." - find /etc/yum.repos.d/ -type f -not -name 'securityonion*repo' -print0 | xargs -0 -I {} mv -bvf {} /root/oldrepos/ + echo "Backing up the .repo files that were added by the centos-release package." + logCmd "find /etc/yum.repos.d/ -type f -not -name 'securityonion*repo' -print0 | xargs -0 -I {} mv -bvf {} /root/oldrepos/" logCmd "yum repolist all" fi else @@ -2405,8 +2436,28 @@ set_network_dev_status_list() { } set_main_ip() { - MAINIP=$(ip route get 1 | awk '{print $7;exit}') - MNIC_IP=$(ip a s "$MNIC" | grep -oE 'inet [0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | cut -d' ' -f2) + local count=0 + local progress='.' + local c=0 + local m=3.3 + local max_attempts=30 + echo "Gathering the management IP. " + while ! valid_ip4 "$MAINIP" || ! valid_ip4 "$MNIC_IP"; do + MAINIP=$(ip route get 1 | awk '{print $7;exit}') + MNIC_IP=$(ip a s "$MNIC" | grep -oE 'inet [0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}' | cut -d' ' -f2) + ((count=count+1)) + p=$(awk -vp=$m -vq=$count 'BEGIN{printf "%.0f" ,p * q}') + printf "%-*s" $((count+1)) '[' | tr ' ' '#' + printf "%*s%3d%%\r" $((max_attempts-count)) "]" "$p" + if [ $count = $max_attempts ]; then + echo "ERROR: Could not determine MAINIP or MNIC_IP." >> "$setup_log" 2>&1 + echo "MAINIP=$MAINIP" >> "$setup_log" 2>&1 + echo "MNIC_IP=$MNIC_IP" >> "$setup_log" 2>&1 + whiptail_error_message "The management IP could not be determined. Please check the log at /root/sosetup.log and verify the network configuration. Press OK to exit." + exit 1 + fi + sleep 1 + done } # Add /usr/sbin to everyone's path @@ -2462,7 +2513,7 @@ set_proxy() { "}" > /root/.docker/config.json # Set proxy for package manager - if [ "$OS" = 'centos' ]; then + if [[ $is_centos ]]; then echo "proxy=$so_proxy" >> /etc/yum.conf else # Set it up so the updates roll through the manager @@ -2633,8 +2684,8 @@ set_initial_firewall_policy() { 'EVAL' | 'MANAGERSEARCH' | 'STANDALONE' | 'IMPORT') $default_salt_dir/salt/common/tools/sbin/so-firewall includehost manager "$MAINIP" $default_salt_dir/salt/common/tools/sbin/so-firewall includehost minion "$MAINIP" - $default_salt_dir/salt/common/tools/sbin/so-firewall includehost sensor "$MAINIP" - $default_salt_dir/salt/common/tools/sbin/so-firewall --apply includehost search_node "$MAINIP" + $default_salt_dir/salt/common/tools/sbin/so-firewall includehost sensor "$MAINIP" + $default_salt_dir/salt/common/tools/sbin/so-firewall --apply includehost search_node "$MAINIP" case "$install_type" in 'EVAL') $default_salt_dir/pillar/data/addtotab.sh evaltab "$MINION_ID" "$MAINIP" "$num_cpu_cores" "$random_uid" "$MNIC" "$filesystem_root" "$filesystem_nsm" "$INTERFACE" True @@ -2650,7 +2701,7 @@ set_initial_firewall_policy() { 'HELIXSENSOR') $default_salt_dir/salt/common/tools/sbin/so-firewall includehost manager "$MAINIP" $default_salt_dir/salt/common/tools/sbin/so-firewall includehost minion "$MAINIP" - $default_salt_dir/salt/common/tools/sbin/so-firewall --apply includehost sensor "$MAINIP" + $default_salt_dir/salt/common/tools/sbin/so-firewall --apply includehost sensor "$MAINIP" ;; 'SENSOR' | 'SEARCHNODE' | 'HEAVYNODE' | 'FLEET' | 'IDH' | 'RECEIVER') $sshcmd -i /root/.ssh/so.key soremote@"$MSRV" sudo $default_salt_dir/salt/common/tools/sbin/so-firewall includehost minion "$MAINIP" @@ -2690,6 +2741,13 @@ set_initial_firewall_policy() { # TODO: implement ;; esac + + # Add some firewall rules for analyst workstations that get added to the grid + if [[ $is_analyst ]]; then + $sshcmd -i /root/.ssh/so.key soremote@"$MSRV" sudo $default_salt_dir/salt/common/tools/sbin/so-firewall includehost minion "$MAINIP" + $sshcmd -i /root/.ssh/so.key soremote@"$MSRV" sudo $default_salt_dir/salt/common/tools/sbin/so-firewall --apply includehost analyst "$MAINIP" + fi + } # Set up the management interface on the ISO @@ -2741,7 +2799,7 @@ set_redirect() { set_updates() { if [ "$MANAGERUPDATES" = '1' ]; then - if [ "$OS" = 'centos' ]; then + if [[ $is_centos ]]; then if [[ ! $is_airgap ]] && ! ( grep -q "$MSRV" /etc/yum.conf); then if grep -q "proxy=" /etc/yum.conf; then sed -i "s/proxy=.*/proxy=http:\/\/$MSRV:3142/" /etc/yum.conf @@ -2808,9 +2866,9 @@ update_sudoers() { } update_packages() { - if [ "$OS" = 'centos' ]; then - logCmd "yum repolist" - logCmd "yum -y update --exclude=salt*,wazuh*,docker*,containerd*" + if [[ $is_centos ]]; then + logCmd "yum repolist" + logCmd "yum -y update --exclude=salt*,wazuh*,docker*,containerd*" else retry 50 10 "apt-get -y update" >> "$setup_log" 2>&1 || exit 1 retry 50 10 "apt-get -y upgrade" >> "$setup_log" 2>&1 || exit 1 @@ -2867,10 +2925,11 @@ write_out_idh_services() { printf '%s\n'\ "idh:"\ + " restrict_management_ip: $IDHMGTRESTRICT"\ " services:" >> "$pillar_file" for service in ${idh_services[@]}; do echo " - $service" | tr '[:upper:]' '[:lower:]' >> "$pillar_file" - done + done } # Enable Zeek Logs diff --git a/setup/so-preflight b/setup/so-preflight index cdbcf6bfe..d1fd89b6e 100755 --- a/setup/so-preflight +++ b/setup/so-preflight @@ -96,7 +96,7 @@ check_new_repos() { local repo_arr=( "https://download.docker.com/linux/ubuntu/gpg" "https://download.docker.com/linux/ubuntu" - "https://repo.saltstack.com/py3/ubuntu/$ubuntu_version/amd64/archive/3003/SALTSTACK-GPG-KEY.pub" + "https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt/SALTSTACK-GPG-KEY.pub" "https://packages.wazuh.com/key/GPG-KEY-WAZUH" "https://packages.wazuh.com" ) diff --git a/setup/so-setup b/setup/so-setup index e8709b668..e06b5ded9 100755 --- a/setup/so-setup +++ b/setup/so-setup @@ -70,10 +70,71 @@ while [[ $# -gt 0 ]]; do esac done -if [[ "$setup_type" == 'iso' ]]; then - is_iso=true +detect_os +is_analyst= +if [ "$setup_type" = 'analyst' ]; then + is_analyst=true + # Check to see if this is an ISO + if [ -d /root/SecurityOnion ]; then + is_analyst_iso=true + fi fi +if [[ "$setup_type" == 'iso' ]]; then + if [[ $is_centos ]]; then + is_iso=true + else + echo "Only use 'so-setup iso' for an ISO install on CentOS. Please run 'so-setup network' instead." + exit 1 + fi +fi + +# Check to see if this is an analyst install. If it is let's run things differently + +if [[ $is_analyst ]]; then + + # Make sure it's CentOS + if [[ ! $is_centos ]]; then + echo "Analyst Workstation is only supported on CentOS 7" + exit 1 + fi + + if ! whiptail_analyst_install; then + if [[ $is_analyst_iso ]]; then + if whiptail_analyst_nongrid_iso; then + # Remove setup from auto launching + parse_install_username + sed -i '$ d' /home/$INSTALLUSERNAME/.bash_profile >> "$setup_log" 2>&1 + echo "Enabling graphical interface and setting it to load at boot" + systemctl set-default graphical.target + startx + exit 0 + else + # Abort! + exit 0 + fi + else + if whiptail_analyst_nongrid_network; then + echo "" + echo "" + echo "Kicking off the automated setup of the analyst workstation. This can take a while depending on your network connection." + echo "" + echo "" + analyst_salt_local + else + # Abort! + exit 0 + fi + fi + fi + + # If you got this far then you want to join the grid + is_minion=true + +fi + + + if ! [ -f $install_opt_file ] && [ -d /root/manager_setup/securityonion ] && [[ $(pwd) != /root/manager_setup/securityonion/setup ]]; then exec bash /root/manager_setup/securityonion/setup/so-setup "${original_args[@]}" fi @@ -105,7 +166,6 @@ catch() { whiptail_setup_failed exit 1 } - automated=no progress() { local msg=${1:-'Please wait while installing...'} @@ -117,8 +177,6 @@ progress() { fi } -detect_os - if [[ -f automation/$automation && $(basename $automation) == $automation ]]; then echo "Preselecting variable values based on automated setup: $automation" >> $setup_log 2>&1 source automation/$automation @@ -151,11 +209,11 @@ if [[ -f automation/$automation && $(basename $automation) == $automation ]]; th fi case "$setup_type" in - iso | network) # Accepted values + iso | network | analyst) # Accepted values echo "Beginning Security Onion $setup_type install" >> $setup_log 2>&1 ;; *) - echo "Invalid install type, must be 'iso' or 'network'" | tee -a $setup_log + echo "Invalid install type, must be 'iso', 'network' or 'analyst'." | tee -a $setup_log exit 1 ;; esac @@ -197,6 +255,37 @@ if ! [[ -f $install_opt_file ]]; then echo "User cancelled setup." | tee -a "$setup_log" whiptail_cancel fi + if [[ $is_analyst ]]; then + collect_hostname + if [[ $is_analyst_iso ]]; then + # Prompt Network Setup + whiptail_management_nic + whiptail_dhcp_or_static + + if [ "$address_type" != 'DHCP' ]; then + collect_int_ip_mask + collect_gateway + collect_dns + collect_dns_domain + fi + + fi + if [[ ! $is_analyst_iso ]]; then + # This should be a network install + whiptail_network_notice + whiptail_dhcp_warn + whiptail_management_nic + fi + whiptail_network_init_notice + network_init + printf '%s\n' \ + "MNIC=$MNIC" \ + "HOSTNAME=$HOSTNAME" > "$net_init_file" + set_main_ip + compare_main_nic_ip + + fi + if [[ $setup_type == 'iso' ]] && [ "$automated" == no ]; then whiptail_first_menu_iso if [[ $option == "CONFIGURENETWORK" ]]; then @@ -207,14 +296,16 @@ if ! [[ -f $install_opt_file ]]; then printf '%s\n' \ "MNIC=$MNIC" \ "HOSTNAME=$HOSTNAME" > "$net_init_file" - set_main_ip >> $setup_log 2>&1 + set_main_ip compare_main_nic_ip whiptail_net_setup_complete else true fi fi - whiptail_install_type + if [[ ! $is_analyst ]]; then + whiptail_install_type + fi else source $install_opt_file fi @@ -263,18 +354,15 @@ elif [ "$install_type" = 'RECEIVER' ]; then is_minion=true is_receiver=true elif [ "$install_type" = 'ANALYST' ]; then - cd .. || exit 255 - exec bash so-analyst-install + if [ "$setup_type" != 'analyst' ]; then + exec bash so-setup analyst + fi fi if [[ $is_manager || $is_import ]]; then check_elastic_license fi -if [[ $is_idh ]]; then - collect_idh_services -fi - if ! [[ -f $install_opt_file ]]; then if [[ $is_manager && $is_sensor ]]; then check_requirements "standalone" @@ -284,7 +372,7 @@ if ! [[ -f $install_opt_file ]]; then check_requirements "dist" "idh" elif [[ $is_sensor && ! $is_eval ]]; then check_requirements "dist" "sensor" - elif [[ $is_distmanager || $is_minion ]] && [[ ! $is_import ]]; then + elif [[ $is_distmanager || $is_minion ]] && [[ ! ( $is_import || $is_analyst ) ]]; then check_requirements "dist" elif [[ $is_import ]]; then check_requirements "import" @@ -309,26 +397,28 @@ if ! [[ -f $install_opt_file ]]; then network_init fi - set_main_ip >> $setup_log 2>&1 + set_main_ip compare_main_nic_ip if [[ $is_minion ]]; then collect_mngr_hostname add_mngr_ip_to_hosts - fi - - if [[ $is_minion ]]; then whiptail_ssh_key_copy_notice copy_ssh_key >> $setup_log 2>&1 fi + if [[ $is_idh ]]; then + collect_idh_services + collect_idh_preferences + fi + # Check if this is an airgap install if [[ ( $is_manager || $is_import) && $is_iso ]]; then whiptail_airgap if [[ "$INTERWEBS" == 'AIRGAP' ]]; then is_airgap=true fi - elif [[ $is_minion && $is_iso ]]; then + elif [[ $is_minion && ( $is_iso || $is_analyst ) ]]; then $sshcmd -i /root/.ssh/so.key soremote@"$MSRV" [[ -f /etc/yum.repos.d/airgap_repo.repo ]] >> $setup_log 2>&1 airgap_check=$? [[ $airgap_check == 0 ]] && is_airgap=true >> $setup_log 2>&1 @@ -394,7 +484,12 @@ detect_cloud short_name=$(echo "$HOSTNAME" | awk -F. '{print $1}') -MINION_ID=$(echo "${short_name}_${install_type}" | tr '[:upper:]' '[:lower:]') +if [[ $is_analyst ]]; then + MINION_ID=$(echo "${short_name}_workstation" | tr '[:upper:]' '[:lower:]') +fi +if [[ ! $is_analyst ]]; then + MINION_ID=$(echo "${short_name}_${install_type}" | tr '[:upper:]' '[:lower:]') +fi export MINION_ID echo "MINION_ID = $MINION_ID" >> $setup_log 2>&1 @@ -557,7 +652,7 @@ if [[ $is_sensor && ! $is_eval ]]; then fi fi -[[ $is_iso ]] && collect_ntp_servers +[[ ( $is_iso || $is_analyst ) ]] && collect_ntp_servers if [[ ($is_node || $is_receiver) && ! $is_eval ]]; then whiptail_node_advanced @@ -615,7 +710,9 @@ echo "1" > /root/accept_changes [[ ${#ntp_servers[@]} -gt 0 ]] && configure_ntp >> $setup_log 2>&1 - reserve_ports + if [[ ! $is_analyst ]]; then + reserve_ports + fi set_path @@ -645,8 +742,12 @@ echo "1" > /root/accept_changes if [[ $is_manager && ! $is_eval ]]; then add_soremote_user_manager >> $setup_log 2>&1 fi - - host_pillar >> $setup_log 2>&1 + if [[ ! $is_analyst ]]; then + host_pillar >> $setup_log 2>&1 + fi + if [[ $is_analyst ]]; then + analyst_workstation_pillar + fi ntp_pillar >> $setup_log 2>&1 @@ -669,12 +770,12 @@ echo "1" > /root/accept_changes # Import the gpg keys gpg_rpm_import >> $setup_log 2>&1 info "Disabling fastestmirror" - [[ $OS == 'centos' ]] && disable_fastestmirror + [[ $is_centos ]] && disable_fastestmirror if [[ ! $is_airgap ]]; then - securityonion_repo >> $setup_log 2>&1 - update_packages >> $setup_log 2>&1 + securityonion_repo >> $setup_log 2>&1 + update_packages >> $setup_log 2>&1 else - airgap_repo >> $setup_log 2>&1 + airgap_repo >> $setup_log 2>&1 fi if [[ $is_sensor || $is_helix || $is_import ]]; then @@ -693,17 +794,22 @@ echo "1" > /root/accept_changes set_progress_str 5 'Installing Salt and dependencies' saltify 2>> $setup_log + + if [[ ! $is_analyst ]]; then + set_progress_str 6 'Installing Docker and dependencies' + docker_install >> $setup_log 2>&1 + fi - set_progress_str 6 'Installing Docker and dependencies' - docker_install >> $setup_log 2>&1 - set_progress_str 7 'Generating patch pillar' patch_pillar >> $setup_log 2>&1 set_progress_str 8 'Initializing Salt minion' configure_minion "$minion_type" >> $setup_log 2>&1 - check_sos_appliance >> $setup_log 2>&1 + if [[ ! $is_analyst ]]; then + check_sos_appliance >> $setup_log 2>&1 + fi + update_sudoers_for_testing >> $setup_log 2>&1 if [[ $is_manager || $is_helix || $is_import ]]; then @@ -781,8 +887,10 @@ echo "1" > /root/accept_changes generate_ca >> $setup_log 2>&1 fi - set_progress_str 24 'Generating SSL' - generate_ssl >> $setup_log 2>&1 + if [[ ! $is_analyst ]]; then + set_progress_str 24 'Generating SSL' + generate_ssl >> $setup_log 2>&1 + fi if [[ $is_manager || $is_helix || $is_import ]]; then set_progress_str 25 'Configuring firewall' @@ -809,18 +917,22 @@ echo "1" > /root/accept_changes echo "Finished so-elastic-auth..." >> $setup_log 2>&1 fi - set_progress_str 61 "$(print_salt_state_apply 'firewall')" - salt-call state.apply -l info firewall >> $setup_log 2>&1 + if [[ ! $is_analyst ]]; then + set_progress_str 61 "$(print_salt_state_apply 'firewall')" + salt-call state.apply -l info firewall >> $setup_log 2>&1 + fi - if [ $OS = 'centos' ]; then + if [[ $is_centos ]]; then set_progress_str 61 'Installing Yum utilities' salt-call state.apply -l info yum.packages >> $setup_log 2>&1 fi - set_progress_str 62 "$(print_salt_state_apply 'common')" - salt-call state.apply -l info common >> $setup_log 2>&1 + if [[ ! $is_analyst ]]; then + set_progress_str 62 "$(print_salt_state_apply 'common')" + salt-call state.apply -l info common >> $setup_log 2>&1 + fi - if [[ ! $is_helix && ! $is_receiver && ! $is_idh ]]; then + if [[ ! $is_helix && ! $is_receiver && ! $is_idh && ! $is_analyst ]]; then set_progress_str 62 "$(print_salt_state_apply 'nginx')" salt-call state.apply -l info nginx >> $setup_log 2>&1 fi @@ -918,6 +1030,8 @@ echo "1" > /root/accept_changes set_progress_str 77 "$(print_salt_state_apply 'fleet.event_update-custom-hostname')" pillar_override="{\"global\":{\"fleet_custom_hostname\": \"$FLEETCUSTOMHOSTNAME\"}}" salt-call state.apply -l info fleet.event_update-custom-hostname pillar="$pillar_override" >> $setup_log 2>&1 + rm -f /etc/pki/managerssl.crt + salt-call state.apply -l info ssl >> $setup_log 2>&1 fi set_progress_str 78 "$(print_salt_state_apply 'so-fleet-setup')" @@ -961,15 +1075,22 @@ echo "1" > /root/accept_changes salt-call state.apply -l info filebeat >> $setup_log 2>&1 fi - set_progress_str 85 'Applying finishing touches' - filter_unused_nics >> $setup_log 2>&1 - network_setup >> $setup_log 2>&1 - so-ssh-harden >> $setup_log 2>&1 - + if [[ ! $is_analyst ]]; then + set_progress_str 85 'Applying finishing touches' + filter_unused_nics >> $setup_log 2>&1 + network_setup >> $setup_log 2>&1 + so-ssh-harden >> $setup_log 2>&1 + fi + if [[ $is_manager || $is_import ]]; then set_progress_str 87 'Adding user to SOC' add_web_user >> $setup_log 2>&1 fi + + if [[ $is_analyst ]]; then + # Remove access to the manager from the analyst workstation + rm -rf /root/.ssh/so.key* + fi set_progress_str 90 'Enabling checkin at boot' checkin_at_boot >> $setup_log 2>&1 @@ -1018,9 +1139,9 @@ else fi if [[ -n $ENDGAMEHOST ]]; then - set_progress_str 99 'Configuring firewall for Endgame SMP' - so-firewall --apply includehost endgame $ENDGAMEHOST >> $setup_log 2>&1 - fi + set_progress_str 99 'Configuring firewall for Endgame SMP' + so-firewall --apply includehost endgame $ENDGAMEHOST >> $setup_log 2>&1 + fi } | whiptail_gauge_post_setup "Running post-installation steps..." diff --git a/setup/so-whiptail b/setup/so-whiptail index c39ec4ff7..2c60b7e3e 100755 --- a/setup/so-whiptail +++ b/setup/so-whiptail @@ -33,6 +33,58 @@ whiptail_airgap() { INTERWEBS=$(echo "${INTERWEBS^^}" | tr -d ' ') } +whiptail_analyst_install() { + + [ -n "$TESTING" ] && return + + read -r -d '' message <<- EOM + + Welcome to the Security Onion Analyst Workstation install! + + Would you like to join this workstation to an existing grid? + + EOM + whiptail --title "$whiptail_title" \ + --yesno "$message" 11 75 --defaultno + +} + +whiptail_analyst_nongrid_iso() { + + [ -n "$TESTING" ] && return + + read -r -d '' message <<- EOM + + You have selected this Analyst workstation to be independent. + + Would you still like to have the graphical interface loaded at boot? + + NOTE: Selecting no will exit without making changes. + + EOM + whiptail --title "$whiptail_title" \ + --yesno "$message" 11 75 --defaultno + +} + +whiptail_analyst_nongrid_network() { + + [ -n "$TESTING" ] && return + + read -r -d '' message <<- EOM + + You have selected this Analyst workstation to be independent. + + Would you still like to install and load the graphical interface? + + NOTE: Selecting no will exit without making changes. + + EOM + whiptail --title "$whiptail_title" \ + --yesno "$message" 11 75 --defaultno + +} + whiptail_avoid_default_hostname() { [ -n "$TESTING" ] && return @@ -434,9 +486,9 @@ whiptail_end_settings() { if [[ $is_idh ]]; then __append_end_msg "IDH Services Enabled:" - for service in ${idh_services[@]}; do + for service in ${idh_services[@]}; do __append_end_msg "- $service" - done + done fi @@ -578,14 +630,16 @@ whiptail_end_settings() { __append_end_msg " Logstash Batch Size: $LSPIPELINEBATCH" __append_end_msg " Logstash Input Threads: $LSINPUTTHREADS" else - if [[ ! $is_receiver ]]; then - __append_end_msg "Elasticsearch Heap Size: $NODE_ES_HEAP_SIZE" - __append_end_msg "Elasticsearch Storage Space: ${log_size_limit}GB" + if [[ ! $is_analyst ]]; then + if [[ ! $is_receiver ]]; then + __append_end_msg "Elasticsearch Heap Size: $NODE_ES_HEAP_SIZE" + __append_end_msg "Elasticsearch Storage Space: ${log_size_limit}GB" + fi + __append_end_msg "Logstash Heap Size: $NODE_LS_HEAP_SIZE" + __append_end_msg "Logstash Worker Count: $LSPIPELINEWORKERS" + __append_end_msg "Logstash Batch Size: $LSPIPELINEBATCH" + __append_end_msg "Logstash Input Threads: $LSINPUTTHREADS" fi - __append_end_msg "Logstash Heap Size: $NODE_LS_HEAP_SIZE" - __append_end_msg "Logstash Worker Count: $LSPIPELINEWORKERS" - __append_end_msg "Logstash Batch Size: $LSPIPELINEBATCH" - __append_end_msg "Logstash Input Threads: $LSINPUTTHREADS" fi @@ -725,6 +779,17 @@ whiptail_homenet_sensor() { export HNSENSOR } + whiptail_idh_preferences() { + + [ -n "$TESTING" ] && return + + idh_preferences=$(whiptail --title "$whiptail_title" --radiolist \ + "\nBy default, the IDH services selected in the previous screen will be bound to all interfaces and IP addresses on this system.\n\nIf you would like to prevent IDH services from being published on this system's management IP, you can select the option below." 20 75 5 \ + "$MAINIP" "Disable IDH services on this management IP " OFF 3>&1 1>&2 2>&3 ) + + local exitstatus=$? + whiptail_check_exitstatus $exitstatus +} whiptail_idh_services() { @@ -867,23 +932,13 @@ whiptail_install_type_other() { [ -n "$TESTING" ] && return - # so-analyst-install will only work with a working network connection - # so only show it on network installs for now - if [[ $setup_type == 'network' ]]; then - install_type=$(whiptail --title "$whiptail_title" --radiolist \ - "Choose distributed node type:" 9 65 2 \ - "ANALYST" "Quit setup and run so-analyst-install " ON \ - "HELIXSENSOR" "Create a Helix sensor " OFF \ - 3>&1 1>&2 2>&3 + install_type=$(whiptail --title "$whiptail_title" --radiolist \ + "Choose node type:" 9 65 2 \ + "ANALYST" "Setup will run 'so-setup analyst' " ON \ + "HELIXSENSOR" "Create a Helix sensor " OFF \ + 3>&1 1>&2 2>&3 ) - else - install_type=$(whiptail --title "$whiptail_title" --radiolist \ - "Choose distributed node type:" 8 65 1 \ - "HELIXSENSOR" "Create a Helix sensor " ON \ - 3>&1 1>&2 2>&3 - ) - fi - + local exitstatus=$? whiptail_check_exitstatus $exitstatus @@ -1473,6 +1528,14 @@ whiptail_oinkcode() { #TODO: helper function to display error message or exit if batch mode # exit_if_batch <"Error string"> +whiptail_error_message() { + + local error_message=$1 # message to be displayed + + whiptail --title "$whiptail_title" --msgbox "$error_message" 10 75 + +} + whiptail_passwords_dont_match() { whiptail --title "$whiptail_title" --msgbox "Passwords don't match. Please re-enter." 8 75 @@ -1545,40 +1608,37 @@ whiptail_patch_schedule_select_hours() { [ -n "$TESTING" ] && return - # Select the hours to patch - PATCHSCHEDULEHOURS=$(whiptail --title "$whiptail_title" --checklist \ - "At which time, UTC, do you want to apply OS patches on the selected days?" 22 75 13 \ - 00:00 "" OFF \ - 01:00 "" OFF \ - 02:00 "" ON \ - 03:00 "" OFF \ - 04:00 "" OFF \ - 05:00 "" OFF \ - 06:00 "" OFF \ - 07:00 "" OFF \ - 08:00 "" OFF \ - 09:00 "" OFF \ - 10:00 "" OFF \ - 11:00 "" OFF \ - 12:00 "" OFF \ - 13:00 "" OFF \ - 14:00 "" OFF \ - 15:00 "" OFF \ - 16:00 "" OFF \ - 17:00 "" OFF \ - 18:00 "" OFF \ - 19:00 "" OFF \ - 20:00 "" OFF \ - 21:00 "" OFF \ - 22:00 "" OFF \ - 23:00 "" OFF 3>&1 1>&2 2>&3) - - local exitstatus=$? - whiptail_check_exitstatus $exitstatus - - PATCHSCHEDULEHOURS=$(echo "$PATCHSCHEDULEHOURS" | tr -d '"') - - IFS=' ' read -ra PATCHSCHEDULEHOURS <<< "$PATCHSCHEDULEHOURS" + # Select the hours to patch + PATCHSCHEDULEHOURS=$(whiptail --title "$whiptail_title" --checklist \ + "At which time, UTC, do you want to apply OS patches on the selected days?" 22 75 13 \ + 00:00 "" OFF \ + 01:00 "" OFF \ + 02:00 "" ON \ + 03:00 "" OFF \ + 04:00 "" OFF \ + 05:00 "" OFF \ + 06:00 "" OFF \ + 07:00 "" OFF \ + 08:00 "" OFF \ + 09:00 "" OFF \ + 10:00 "" OFF \ + 11:00 "" OFF \ + 12:00 "" OFF \ + 13:00 "" OFF \ + 14:00 "" OFF \ + 15:00 "" OFF \ + 16:00 "" OFF \ + 17:00 "" OFF \ + 18:00 "" OFF \ + 19:00 "" OFF \ + 20:00 "" OFF \ + 21:00 "" OFF \ + 22:00 "" OFF \ + 23:00 "" OFF 3>&1 1>&2 2>&3) + local exitstatus=$? + whiptail_check_exitstatus $exitstatus + PATCHSCHEDULEHOURS=$(echo "$PATCHSCHEDULEHOURS" | tr -d '"') + IFS=' ' read -ra PATCHSCHEDULEHOURS <<< "$PATCHSCHEDULEHOURS" } @@ -1923,10 +1983,10 @@ whiptail_suricata_pins() { done if [[ $is_node && $is_sensor && ! $is_eval ]]; then - local PROCS=$(expr $lb_procs / 2) - if [ "$PROCS" -lt 1 ]; then PROCS=1; else PROCS=$PROCS; fi + local PROCS=$(expr $lb_procs / 2) + if [ "$PROCS" -lt 1 ]; then PROCS=1; else PROCS=$PROCS; fi else - local PROCS=$lb_procs + local PROCS=$lb_procs fi SURIPINS=$(whiptail --noitem --title "$whiptail_title" --checklist "Please select $PROCS cores to pin Suricata to:" 20 75 12 "${filtered_core_str[@]}" 3>&1 1>&2 2>&3 ) @@ -1978,7 +2038,7 @@ whiptail_you_sure() { read -r -d '' you_sure_text <<- EOM Welcome to Security Onion Setup! - You can use Setup for lots of different use cases from a small standalone installation to a large distributed deployment for your enterprise. Don't forget to review the documentation at: + You can use Setup for several different use cases, from a small standalone installation to a large distributed deployment for your enterprise. Don't forget to review the documentation at: https://docs.securityonion.net Setup uses keyboard navigation and you can use arrow keys to move around. Certain screens may provide a list and ask you to select one or more items from that list. You can use [SPACE] to select items and [ENTER] to proceed to the next screen. @@ -2006,10 +2066,10 @@ whiptail_zeek_pins() { done if [[ $is_smooshed ]]; then - local PROCS=$(expr $lb_procs / 2) - if [ "$PROCS" -lt 1 ]; then PROCS=1; else PROCS=$PROCS; fi + local PROCS=$(expr $lb_procs / 2) + if [ "$PROCS" -lt 1 ]; then PROCS=1; else PROCS=$PROCS; fi else - local PROCS=$lb_procs + local PROCS=$lb_procs fi ZEEKPINS=$(whiptail --noitem --title "$whiptail_title" --checklist "Please select $PROCS cores to pin Zeek to:" 20 75 12 "${cpu_core_list_whiptail[@]}" 3>&1 1>&2 2>&3 ) diff --git a/sigs/securityonion-2.3.110-20220404.iso.sig b/sigs/securityonion-2.3.110-20220404.iso.sig new file mode 100644 index 000000000..bd8215953 Binary files /dev/null and b/sigs/securityonion-2.3.110-20220404.iso.sig differ diff --git a/sigs/securityonion-2.3.110-20220405.iso.sig b/sigs/securityonion-2.3.110-20220405.iso.sig new file mode 100644 index 000000000..bc4648f17 Binary files /dev/null and b/sigs/securityonion-2.3.110-20220405.iso.sig differ diff --git a/sigs/securityonion-2.3.110-20220407.iso.sig b/sigs/securityonion-2.3.110-20220407.iso.sig new file mode 100644 index 000000000..2ea694428 Binary files /dev/null and b/sigs/securityonion-2.3.110-20220407.iso.sig differ diff --git a/sigs/securityonion-2.3.120-20220425.iso.sig b/sigs/securityonion-2.3.120-20220425.iso.sig new file mode 100644 index 000000000..ba8743ad3 Binary files /dev/null and b/sigs/securityonion-2.3.120-20220425.iso.sig differ diff --git a/so-analyst-install b/so-analyst-install index 50417c23d..ac92afd77 100755 --- a/so-analyst-install +++ b/so-analyst-install @@ -15,6 +15,6 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -cd "$(dirname "$0")/salt/common/tools/sbin" || exit 255 +cd "$(dirname "$0")/setup" || exit 255 -./so-analyst-install +./so-setup analyst "$@"