diff --git a/.github/workflows/pythontest.yml b/.github/workflows/pythontest.yml index c0e692730..241245044 100644 --- a/.github/workflows/pythontest.yml +++ b/.github/workflows/pythontest.yml @@ -28,4 +28,4 @@ jobs: flake8 ${{ matrix.python-code-path }} --show-source --max-complexity=12 --doctests --max-line-length=200 --statistics - name: Test with pytest run: | - pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=90 --cov-config=${{ matrix.python-code-path }}/pytest.ini + pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=100 --cov-config=${{ matrix.python-code-path }}/pytest.ini diff --git a/README.md b/README.md index 0662e05be..b10115e77 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,20 @@ -## Security Onion 2.3.120 +## Security Onion 2.3.130 -Security Onion 2.3.120 is here! +Security Onion 2.3.130 is here! ## Screenshots Alerts -![Alerts](./assets/images/screenshots/alerts-1.png) +![Alerts](./assets/images/screenshots/alerts.png) + +Dashboards +![Dashboards](./assets/images/screenshots/dashboards.png) Hunt -![Hunt](./assets/images/screenshots/hunt-1.png) +![Hunt](./assets/images/screenshots/hunt.png) + +Cases +![Cases](./assets/images/screenshots/cases-comments.png) ### Release Notes diff --git a/VERIFY_ISO.md b/VERIFY_ISO.md index ce56cd48c..09f6c368d 100644 --- a/VERIFY_ISO.md +++ b/VERIFY_ISO.md @@ -1,18 +1,18 @@ -### 2.3.120-20220425 ISO image built on 2022/04/25 +### 2.3.130-20220607 ISO image built on 2022/06/07 ### Download and Verify -2.3.120-20220425 ISO image: -https://download.securityonion.net/file/securityonion/securityonion-2.3.120-20220425.iso +2.3.130-20220607 ISO image: +https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso -MD5: C99729E452B064C471BEF04532F28556 -SHA1: 60BF07D5347C24568C7B793BFA9792E98479CFBF -SHA256: CD17D0D7CABE21D45FA45E1CF91C5F24EB9608C79FF88480134E5592AFDD696E +MD5: 0034D6A9461C04357AFF512875408A4C +SHA1: BF80EEB101C583153CAD8E185A7DB3173FD5FFE8 +SHA256: 15943623B96D8BB4A204A78668447F36B54A63ABA5F8467FBDF0B25C5E4E6078 Signature for ISO image: -https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.120-20220425.iso.sig +https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig Signing key: https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS @@ -26,22 +26,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/ma Download the signature file for the ISO: ``` -wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.120-20220425.iso.sig +wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig ``` Download the ISO image: ``` -wget https://download.securityonion.net/file/securityonion/securityonion-2.3.120-20220425.iso +wget https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso ``` Verify the downloaded ISO image using the signature file: ``` -gpg --verify securityonion-2.3.120-20220425.iso.sig securityonion-2.3.120-20220425.iso +gpg --verify securityonion-2.3.130-20220607.iso.sig securityonion-2.3.130-20220607.iso ``` The output should show "Good signature" and the Primary key fingerprint should match what's shown below: ``` -gpg: Signature made Mon 25 Apr 2022 08:20:40 AM EDT using RSA key ID FE507013 +gpg: Signature made Tue 07 Jun 2022 01:27:20 PM EDT using RSA key ID FE507013 gpg: Good signature from "Security Onion Solutions, LLC " gpg: WARNING: This key is not certified with a trusted signature! gpg: There is no indication that the signature belongs to the owner. diff --git a/VERSION b/VERSION index ef4e79170..3994a975c 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2.3.130 +2.3.140 diff --git a/assets/images/screenshots/alerts-1.png b/assets/images/screenshots/alerts-1.png deleted file mode 100644 index 099710f4f..000000000 Binary files a/assets/images/screenshots/alerts-1.png and /dev/null differ diff --git a/assets/images/screenshots/alerts.png b/assets/images/screenshots/alerts.png new file mode 100644 index 000000000..ac8a1c828 Binary files /dev/null and b/assets/images/screenshots/alerts.png differ diff --git a/assets/images/screenshots/cases-comments.png b/assets/images/screenshots/cases-comments.png new file mode 100644 index 000000000..23bc00f95 Binary files /dev/null and b/assets/images/screenshots/cases-comments.png differ diff --git a/assets/images/screenshots/dashboards.png b/assets/images/screenshots/dashboards.png new file mode 100644 index 000000000..9f07adedc Binary files /dev/null and b/assets/images/screenshots/dashboards.png differ diff --git a/assets/images/screenshots/hunt-1.png b/assets/images/screenshots/hunt-1.png deleted file mode 100644 index 089713847..000000000 Binary files a/assets/images/screenshots/hunt-1.png and /dev/null differ diff --git a/assets/images/screenshots/hunt.png b/assets/images/screenshots/hunt.png new file mode 100644 index 000000000..f4c4347a8 Binary files /dev/null and b/assets/images/screenshots/hunt.png differ diff --git a/pillar/logstash/nodes.sls b/pillar/logstash/nodes.sls index 18c4b39bf..935574ff9 100644 --- a/pillar/logstash/nodes.sls +++ b/pillar/logstash/nodes.sls @@ -2,7 +2,7 @@ {% set cached_grains = salt.saltutil.runner('cache.grains', tgt='*') %} {% for minionid, ip in salt.saltutil.runner( 'mine.get', - tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix ', + tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix', fun='network.ip_addrs', tgt_type='compound') | dictsort() %} diff --git a/salt/common/tools/sbin/so-user b/salt/common/tools/sbin/so-user index 2d4e1076d..05a44a19d 100755 --- a/salt/common/tools/sbin/so-user +++ b/salt/common/tools/sbin/so-user @@ -44,7 +44,7 @@ operation=$1 email=$2 role=$3 -kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434} +kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434/admin} databasePath=${KRATOS_DB_PATH:-/opt/so/conf/kratos/db/db.sqlite} databaseTimeout=${KRATOS_DB_TIMEOUT:-5000} bcryptRounds=${BCRYPT_ROUNDS:-12} @@ -408,7 +408,7 @@ function migrateLockedUsers() { # This is a migration function to convert locked users from prior to 2.3.90 # to inactive users using the newer Kratos functionality. This should only # find locked users once. - lockedEmails=$(curl -s http://localhost:4434/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email') + lockedEmails=$(curl -s ${kratosUrl}/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email') if [[ -n "$lockedEmails" ]]; then echo "Disabling locked users..." for email in $lockedEmails; do diff --git a/salt/common/tools/sbin/soup b/salt/common/tools/sbin/soup index fe84bf383..2e76bd9ef 100755 --- a/salt/common/tools/sbin/soup +++ b/salt/common/tools/sbin/soup @@ -423,6 +423,7 @@ preupgrade_changes() { [[ "$INSTALLEDVERSION" == 2.3.90 || "$INSTALLEDVERSION" == 2.3.91 ]] && up_to_2.3.100 [[ "$INSTALLEDVERSION" == 2.3.100 ]] && up_to_2.3.110 [[ "$INSTALLEDVERISON" == 2.3.110 ]] && up_to_2.3.120 + [[ "$INSTALLEDVERISON" == 2.3.120 ]] && up_to_2.3.130 true } @@ -437,6 +438,8 @@ postupgrade_changes() { [[ "$POSTVERSION" == 2.3.90 || "$POSTVERSION" == 2.3.91 ]] && post_to_2.3.100 [[ "$POSTVERSION" == 2.3.100 ]] && post_to_2.3.110 [[ "$POSTVERSION" == 2.3.110 ]] && post_to_2.3.120 + [[ "$POSTVERSION" == 2.3.120 ]] && post_to_2.3.130 + true } @@ -507,6 +510,11 @@ post_to_2.3.120() { sed -i '/so-thehive-es/d;/so-thehive/d;/so-cortex/d' /opt/so/conf/so-status/so-status.conf } +post_to_2.3.130() { + echo "Post Processing for 2.3.130" + POSTVERSION=2.3.130 +} + stop_salt_master() { @@ -765,7 +773,12 @@ up_to_2.3.120() { so-thehive-stop so-thehive-es-stop so-cortex-stop - } +} + +up_to_2.3.130() { + # Remove file for nav update + rm -f /opt/so/conf/navigator/layers/nav_layer_playbook.json +} verify_upgradespace() { CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//') diff --git a/salt/filebeat/etc/filebeat.yml b/salt/filebeat/etc/filebeat.yml index fb8f3768c..04a3351a3 100644 --- a/salt/filebeat/etc/filebeat.yml +++ b/salt/filebeat/etc/filebeat.yml @@ -270,6 +270,7 @@ filebeat.inputs: {%- if RITAENABLED %} - type: filestream + id: rita-beacon paths: - /nsm/rita/beacons.csv exclude_lines: ['^Score', '^Source', '^Domain', '^No results'] @@ -285,6 +286,7 @@ filebeat.inputs: index: "so-rita" - type: filestream + id: rita-connection paths: - /nsm/rita/long-connections.csv - /nsm/rita/open-connections.csv @@ -301,6 +303,7 @@ filebeat.inputs: index: "so-rita" - type: filestream + id: rita-dns paths: - /nsm/rita/exploded-dns.csv exclude_lines: ['^Domain', '^No results'] @@ -446,6 +449,13 @@ output.logstash: # The Logstash hosts hosts: +{# dont let filebeat send to a node designated as dmz #} +{% import_yaml 'logstash/dmz_nodes.yaml' as dmz_nodes -%} +{% if dmz_nodes.logstash.dmz_nodes -%} +{% set dmz_nodes = dmz_nodes.logstash.dmz_nodes -%} +{% else -%} +{% set dmz_nodes = [] -%} +{% endif -%} {%- if grains.role in ['so-sensor', 'so-fleet', 'so-node', 'so-idh'] %} {%- set LOGSTASH = namespace() %} {%- set LOGSTASH.count = 0 %} @@ -454,8 +464,10 @@ output.logstash: {%- for node_type, node_details in node_data.items() | sort -%} {%- if node_type in ['manager', 'managersearch', 'standalone', 'receiver' ] %} {%- for hostname in node_data[node_type].keys() %} -{%- set LOGSTASH.count = LOGSTASH.count + 1 %} +{%- if hostname not in dmz_nodes %} +{%- set LOGSTASH.count = LOGSTASH.count + 1 %} - "{{ hostname }}:5644" #{{ node_details[hostname].ip }} +{%- endif %} {%- endfor %} {%- endif %} {%- if LOGSTASH.count > 1 %} diff --git a/salt/kratos/files/kratos.yaml b/salt/kratos/files/kratos.yaml index d10cdd1e5..b300eac48 100644 --- a/salt/kratos/files/kratos.yaml +++ b/salt/kratos/files/kratos.yaml @@ -37,7 +37,7 @@ selfservice: ui_url: https://{{ WEBACCESS }}/login/ default_browser_return_url: https://{{ WEBACCESS }}/ - whitelisted_return_urls: + allowed_return_urls: - http://127.0.0.1 log: @@ -59,7 +59,10 @@ hashers: cost: 12 identity: - default_schema_url: file:///kratos-conf/schema.json + default_schema_id: default + schemas: + - id: default + url: file:///kratos-conf/schema.json courier: smtp: diff --git a/salt/logstash/dmz_nodes.yaml b/salt/logstash/dmz_nodes.yaml new file mode 100644 index 000000000..982f72080 --- /dev/null +++ b/salt/logstash/dmz_nodes.yaml @@ -0,0 +1,9 @@ +# Do not edit this file. Copy it to /opt/so/saltstack/local/salt/logstash/ and make changes there. It should be formatted as a list. +# logstash: +# dmz_nodes: +# - mydmznodehostname1 +# - mydmznodehostname2 +# - mydmznodehostname3 + +logstash: + dmz_nodes: diff --git a/salt/nginx/etc/nginx.conf b/salt/nginx/etc/nginx.conf index 2a5fe95f7..f82d63c1a 100644 --- a/salt/nginx/etc/nginx.conf +++ b/salt/nginx/etc/nginx.conf @@ -130,6 +130,8 @@ http { proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Proxy ""; proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; } error_page 500 502 503 504 /50x.html; location = /usr/share/nginx/html/50x.html { @@ -330,7 +332,9 @@ http { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header Proxy ""; - proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; } {%- endif %} diff --git a/salt/nginx/files/nav_layer_playbook.json b/salt/nginx/files/nav_layer_playbook.json index 69db796e8..a26f26542 100644 --- a/salt/nginx/files/nav_layer_playbook.json +++ b/salt/nginx/files/nav_layer_playbook.json @@ -1,27 +1,52 @@ { - "name": "Playbook", - "version": "3.0", - "domain": "mitre-enterprise", - "description": "Current Coverage of Playbook", + "name": "Playbook Coverage", + "versions": { + "attack": "11", + "navigator": "4.6.4", + "layer": "4.3" + }, + "domain": "enterprise-attack", + "description": "", "filters": { - "stages": ["act"], "platforms": [ - "windows", - "linux", - "mac" + "Linux", + "macOS", + "Windows", + "Azure AD", + "Office 365", + "SaaS", + "IaaS", + "Google Workspace", + "PRE", + "Network", + "Containers" ] }, "sorting": 0, - "viewMode": 0, + "layout": { + "layout": "side", + "aggregateFunction": "average", + "showID": false, + "showName": true, + "showAggregateScores": false, + "countUnscored": false + }, "hideDisabled": false, "techniques": [], "gradient": { - "colors": ["#ff6666", "#ffe766", "#8ec843"], + "colors": [ + "#ff6666ff", + "#ffe766ff", + "#8ec843ff" + ], "minValue": 0, "maxValue": 100 }, + "legendItems": [], "metadata": [], + "links": [], "showTacticRowBackground": false, "tacticRowBackground": "#dddddd", - "selectTechniquesAcrossTactics": true -} + "selectTechniquesAcrossTactics": true, + "selectSubtechniquesWithParent": false +} \ No newline at end of file diff --git a/salt/nginx/files/navigator_config.json b/salt/nginx/files/navigator_config.json index 3fd87139b..2f4672b48 100644 --- a/salt/nginx/files/navigator_config.json +++ b/salt/nginx/files/navigator_config.json @@ -1,58 +1,62 @@ {%- set URL_BASE = salt['pillar.get']('global:url_base', '') %} { - "enterprise_attack_url": "assets/enterprise-attack.json", - "pre_attack_url": "assets/pre-attack.json", - "mobile_data_url": "assets/mobile-attack.json", - "taxii_server": { - "enabled": false, - "url": "https://cti-taxii.mitre.org/", - "collections": { - "enterprise_attack": "95ecc380-afe9-11e4-9b6c-751b66dd541e", - "pre_attack": "062767bd-02d2-4b72-84ba-56caef0f8658", - "mobile_attack": "2f669986-b40b-4423-b720-4396ca6a462b" + "versions": [ + { + "name": "ATT&CK v11", + "version": "11", + "domains": [ + { + "name": "Enterprise", + "identifier": "enterprise-attack", + "data": ["assets/so/enterprise-attack.json"] + } + ] } + ], + + "custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}], + + "default_layers": { + "enabled": true, + "urls": ["assets/so/nav_layer_playbook.json"] }, - "domain": "mitre-enterprise", - - "custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}], - -"default_layers": { - "enabled": true, - "urls": [ - "assets/playbook.json" - ] - }, - "comment_color": "yellow", - + "link_color": "blue", + "banner": "", "features": [ + {"name": "leave_site_dialog", "enabled": true, "description": "Disable to remove the dialog prompt when leaving site."}, {"name": "tabs", "enabled": true, "description": "Disable to remove the ability to open new tabs."}, {"name": "selecting_techniques", "enabled": true, "description": "Disable to remove the ability to select techniques."}, {"name": "header", "enabled": true, "description": "Disable to remove the header containing 'MITRE ATT&CK Navigator' and the link to the help page. The help page can still be accessed from the new tab menu."}, + {"name": "subtechniques", "enabled": true, "description": "Disable to remove all sub-technique features from the interface."}, {"name": "selection_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [ {"name": "search", "enabled": true, "description": "Disable to remove the technique search panel from the interface."}, {"name": "multiselect", "enabled": true, "description": "Disable to remove the multiselect panel from interface."}, {"name": "deselect_all", "enabled": true, "description": "Disable to remove the deselect all button from the interface."} ]}, - {"name": "layer_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [ - {"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and metadata) panel from the interface. Note that the layer can still be renamed in the tab."}, + {"name": "layer_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [ + {"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and layer metadata) panel from the interface. Note that the layer can still be renamed in the tab."}, {"name": "download_layer", "enabled": true, "description": "Disable to remove the button to download the layer."}, - {"name": "export_render", "enabled": true, "description": "Disable to the remove the button to render the current layer."}, - {"name": "export_excel", "enabled": true, "description": "Disable to the remove the button to export the current layer to MS Excel (.xlsx) format."}, - {"name": "filters", "enabled": true, "description": "Disable to the remove the filters panel from interface."}, - {"name": "sorting", "enabled": true, "description": "Disable to the remove the sorting button from the interface."}, - {"name": "color_setup", "enabled": true, "description": "Disable to the remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."}, - {"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to the remove the hide disabled techniques button from the interface."}, - {"name": "toggle_view_mode", "enabled": true, "description": "Disable to the remove the toggle view mode button from interface."}, - {"name": "legend", "enabled": true, "description": "Disable to the remove the legend panel from the interface."} + {"name": "export_render", "enabled": true, "description": "Disable to remove the button to render the current layer."}, + {"name": "export_excel", "enabled": true, "description": "Disable to remove the button to export the current layer to MS Excel (.xlsx) format."}, + {"name": "filters", "enabled": true, "description": "Disable to remove the filters panel from interface."}, + {"name": "sorting", "enabled": true, "description": "Disable to remove the sorting button from the interface."}, + {"name": "color_setup", "enabled": true, "description": "Disable to remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."}, + {"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to remove the hide disabled techniques button from the interface."}, + {"name": "layout_controls", "enabled": true, "description": "Disable to remove the ability to change the current matrix layout."}, + {"name": "legend", "enabled": true, "description": "Disable to remove the legend panel from the interface."} ]}, - {"name": "technique_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [ - {"name": "disable_techniques", "enabled": true, "description": "Disable to the remove the ability to disable techniques."}, - {"name": "manual_color", "enabled": true, "description": "Disable to the remove the ability to assign manual colors to techniques."}, - {"name": "scoring", "enabled": true, "description": "Disable to the remove the ability to score techniques."}, - {"name": "comments", "enabled": true, "description": "Disable to the remove the ability to add comments to techniques."}, + {"name": "technique_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [ + {"name": "disable_techniques", "enabled": true, "description": "Disable to remove the ability to disable techniques."}, + {"name": "manual_color", "enabled": true, "description": "Disable to remove the ability to assign manual colors to techniques."}, + {"name": "scoring", "enabled": true, "description": "Disable to remove the ability to score techniques."}, + {"name": "comments", "enabled": true, "description": "Disable to remove the ability to add comments to techniques."}, + {"name": "comment_underline", "enabled": true, "description": "Disable to remove the comment underline effect on techniques."}, + {"name": "links", "enabled": true, "description": "Disable to remove the ability to assign hyperlinks to techniques."}, + {"name": "link_underline", "enabled": true, "description": "Disable to remove the hyperlink underline effect on techniques."}, + {"name": "metadata", "enabled": true, "description": "Disable to remove the ability to add metadata to techniques."}, {"name": "clear_annotations", "enabled": true, "description": "Disable to remove the button to clear all annotations on the selected techniques."} ]} ] diff --git a/salt/nginx/init.sls b/salt/nginx/init.sls index a195eb400..cad20996e 100644 --- a/salt/nginx/init.sls +++ b/salt/nginx/init.sls @@ -50,7 +50,7 @@ nginxtmp: navigatorconfig: file.managed: - - name: /opt/so/conf/navigator/navigator_config.json + - name: /opt/so/conf/navigator/config.json - source: salt://nginx/files/navigator_config.json - user: 939 - group: 939 @@ -59,7 +59,7 @@ navigatorconfig: navigatordefaultlayer: file.managed: - - name: /opt/so/conf/navigator/nav_layer_playbook.json + - name: /opt/so/conf/navigator/layers/nav_layer_playbook.json - source: salt://nginx/files/nav_layer_playbook.json - user: 939 - group: 939 @@ -69,7 +69,7 @@ navigatordefaultlayer: navigatorpreattack: file.managed: - - name: /opt/so/conf/navigator/pre-attack.json + - name: /opt/so/conf/navigator/layers/pre-attack.json - source: salt://nginx/files/pre-attack.json - user: 939 - group: 939 @@ -78,7 +78,7 @@ navigatorpreattack: navigatorenterpriseattack: file.managed: - - name: /opt/so/conf/navigator/enterprise-attack.json + - name: /opt/so/conf/navigator/layers/enterprise-attack.json - source: salt://nginx/files/enterprise-attack.json - user: 939 - group: 939 @@ -99,10 +99,8 @@ so-nginx: - /etc/pki/managerssl.crt:/etc/pki/nginx/server.crt:ro - /etc/pki/managerssl.key:/etc/pki/nginx/server.key:ro # ATT&CK Navigator binds - - /opt/so/conf/navigator/navigator_config.json:/opt/socore/html/navigator/assets/config.json:ro - - /opt/so/conf/navigator/nav_layer_playbook.json:/opt/socore/html/navigator/assets/playbook.json:ro - - /opt/so/conf/navigator/enterprise-attack.json:/opt/socore/html/navigator/assets/enterprise-attack.json:ro - - /opt/so/conf/navigator/pre-attack.json:/opt/socore/html/navigator/assets/pre-attack.json:ro + - /opt/so/conf/navigator/layers/:/opt/socore/html/navigator/assets/so:ro + - /opt/so/conf/navigator/config.json:/opt/socore/html/navigator/assets/config.json:ro {% endif %} {% if ISAIRGAP is sameas true %} - /nsm/repo:/opt/socore/html/repo:ro diff --git a/salt/playbook/init.sls b/salt/playbook/init.sls index e437ae350..57195c21c 100644 --- a/salt/playbook/init.sls +++ b/salt/playbook/init.sls @@ -42,6 +42,15 @@ query_updatwebhooks: - connection_user: root - connection_pass: {{ MYSQLPASS }} +query_updatename: + mysql_query.run: + - database: playbook + - query: "update custom_fields set name = 'Custom Filter' where id = 21;" + - connection_host: {{ MAINIP }} + - connection_port: 3306 + - connection_user: root + - connection_pass: {{ MYSQLPASS }} + query_updatepluginurls: mysql_query.run: - database: playbook diff --git a/salt/sensoroni/files/analyzers/README.md b/salt/sensoroni/files/analyzers/README.md index fe311725a..a86730734 100644 --- a/salt/sensoroni/files/analyzers/README.md +++ b/salt/sensoroni/files/analyzers/README.md @@ -12,11 +12,32 @@ The built-in analyzers support the following observable types: | Greynoise |✗ |✗|✓|✗|✗|✗|✗|✗|✗| | JA3er |✗ |✗|✗|✓|✗|✗|✗|✗|✗| | LocalFile |✓ |✓|✓|✓|✗|✓|✗|✓|✗| +| Malware Hash Registry |✗ |✓|✗|✗|✗|✗|✗|✓|✗| | Pulsedive |✓ |✓|✓|✗|✗|✗|✓|✓|✓| | Spamhaus |✗ |✗|✓|✗|✗|✗|✗|✗|✗| | Urlhaus |✗ |✗|✗|✗|✗|✗|✗|✓|✗| | Urlscan |✗ |✗|✗|✗|✗|✗|✗|✓|✗| | Virustotal |✓ |✓|✓|✗|✗|✗|✗|✓|✗| +| WhoisLookup |✓ |✗|✗|✗|✗|✗|✓|✗|✗| + +## Authentication +Many analyzers require authentication, via an API key or similar. The table below illustrates which analyzers require authentication. + +| Name | Authn Req'd| +--------------------------|------------| +[AlienVault OTX](https://otx.alienvault.com/api) |✓| +[EmailRep](https://emailrep.io/key) |✓| +[GreyNoise](https://www.greynoise.io/plans/community) |✓| +[JA3er](https://ja3er.com/) |✗| +LocalFile |✗| +[Malware Hash Registry](https://hash.cymru.com/docs_whois) |✗| +[Pulsedive](https://pulsedive.com/api/) |✓| +[Spamhaus](https://www.spamhaus.org/dbl/) |✗| +[Urlhaus](https://urlhaus.abuse.ch/) |✗| +[Urlscan](https://urlscan.io/docs/api/) |✓| +[VirusTotal](https://developers.virustotal.com/reference/overview) |✓| +[WhoisLookup](https://github.com/meeb/whoisit) |✗| + ## Developer Guide diff --git a/salt/sensoroni/files/analyzers/build.sh b/salt/sensoroni/files/analyzers/build.sh index 17e53c6a7..cb7dcbc52 100755 --- a/salt/sensoroni/files/analyzers/build.sh +++ b/salt/sensoroni/files/analyzers/build.sh @@ -1,15 +1,39 @@ #!/bin/bash -HOME_DIR=$(dirname "$0") -TARGET_DIR=${1:-.} +COMMAND=$1 +SENSORONI_CONTAINER=${SENSORONI_CONTAINER:-so-sensoroni} -PATH=$PATH:/usr/local/bin +function ci() { + HOME_DIR=$(dirname "$0") + TARGET_DIR=${1:-.} -if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then - echo "Missing dependencies. Consider running the following command:" - echo " python -m pip install flake8 pytest pytest-cov" - exit 1 + PATH=$PATH:/usr/local/bin + + if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then + echo "Missing dependencies. Consider running the following command:" + echo " python -m pip install flake8 pytest pytest-cov" + exit 1 + fi + + flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini" + pytest "$TARGET_DIR" "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100 +} + +function download() { + ANALYZERS=$1 + if [[ $ANALYZERS = "all" ]]; then + ANALYZERS="*/" + fi + for ANALYZER in $ANALYZERS; do + rm -fr $ANALYZER/site-packages + mkdir -p $ANALYZER/source-packages + rm -fr $ANALYZER/source-packages/* + docker exec -it $SENSORONI_CONTAINER pip download -r /opt/sensoroni/analyzers/$ANALYZER/requirements.txt -d /opt/sensoroni/analyzers/$ANALYZER/source-packages + done +} + +if [[ "$COMMAND" == "download" ]]; then + download "$2" +else + ci fi - -flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini" -pytest "$TARGET_DIR" "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100 diff --git a/salt/sensoroni/files/analyzers/emailrep/README.md b/salt/sensoroni/files/analyzers/emailrep/README.md new file mode 100644 index 000000000..912ff19c0 --- /dev/null +++ b/salt/sensoroni/files/analyzers/emailrep/README.md @@ -0,0 +1,17 @@ +# EmailRep + +## Description +Submit an email address to EmailRepIO for analysis. + +## Configuration Requirements + +``api_key`` - API key used for communication with the EmailRepIO API + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + emailrep: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep.py b/salt/sensoroni/files/analyzers/emailrep/emailrep.py index 4e9a8fee9..d48977a07 100755 --- a/salt/sensoroni/files/analyzers/emailrep/emailrep.py +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep.py @@ -31,10 +31,10 @@ def prepareResults(raw): elif "status" in raw: if raw["reason"] == "invalid email": status = "caution" - summary = "Invalid email address." + summary = "invalid_input" if "exceeded daily limit" in raw["reason"]: status = "caution" - summary = "Exceeded daily request limit." + summary = "excessive_usage" else: status = "caution" summary = "internal_failure" diff --git a/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py b/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py index ecd6010b0..283aef694 100644 --- a/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py +++ b/salt/sensoroni/files/analyzers/emailrep/emailrep_test.py @@ -44,7 +44,7 @@ class TestEmailRepMethods(unittest.TestCase): raw = {"status": "fail", "reason": "invalid email"} results = emailrep.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Invalid email address.") + self.assertEqual(results["summary"], "invalid_input") self.assertEqual(results["status"], "caution") def test_prepareResults_not_suspicious(self): @@ -65,7 +65,7 @@ class TestEmailRepMethods(unittest.TestCase): raw = {"status": "fail", "reason": "exceeded daily limit. please wait 24 hrs or visit emailrep.io/key for an api key."} results = emailrep.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Exceeded daily request limit.") + self.assertEqual(results["summary"], "excessive_usage") self.assertEqual(results["status"], "caution") def test_prepareResults_error(self): diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/emailrep/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/greynoise/README.md b/salt/sensoroni/files/analyzers/greynoise/README.md new file mode 100644 index 000000000..4c6d4be4b --- /dev/null +++ b/salt/sensoroni/files/analyzers/greynoise/README.md @@ -0,0 +1,19 @@ +# Greynoise + +## Description +Submit an IP address to Greynoise for analysis. + +## Configuration Requirements + +``api_key`` - API key used for communication with the Greynoise API +``api_version`` - Version of Greynoise API. Default is ``community`` + + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + greynoise: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise.py b/salt/sensoroni/files/analyzers/greynoise/greynoise.py index deeef5414..bf2b98e7a 100755 --- a/salt/sensoroni/files/analyzers/greynoise/greynoise.py +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise.py @@ -36,13 +36,13 @@ def prepareResults(raw): summary = "malicious" elif "unknown" in raw['classification']: status = "caution" - summary = "Results found." + summary = "suspicious" elif "IP not observed scanning the internet or contained in RIOT data set." in raw["message"]: status = "ok" summary = "no_results" elif "Request is not a valid routable IPv4 address" in raw["message"]: status = "caution" - summary = "Invalid IP address." + summary = "invalid_input" else: status = "info" summary = raw["message"] diff --git a/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py b/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py index 768107adb..900a35e8c 100644 --- a/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py +++ b/salt/sensoroni/files/analyzers/greynoise/greynoise_test.py @@ -62,7 +62,7 @@ class TestGreynoiseMethods(unittest.TestCase): raw = {"message": "Request is not a valid routable IPv4 address"} results = greynoise.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Invalid IP address.") + self.assertEqual(results["summary"], "invalid_input") self.assertEqual(results["status"], "caution") def test_prepareResults_not_found(self): @@ -90,7 +90,7 @@ class TestGreynoiseMethods(unittest.TestCase): raw = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"} results = greynoise.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["summary"], "suspicious") self.assertEqual(results["status"], "caution") def test_prepareResults_unknown_message(self): @@ -113,5 +113,5 @@ class TestGreynoiseMethods(unittest.TestCase): conf = {"base_url": "myurl/", "api_key": "abcd1234", "api_version": "community"} with patch('greynoise.greynoise.sendReq', new=MagicMock(return_value=output)) as mock: results = greynoise.analyze(conf, artifactInput) - self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["summary"], "suspicious") mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/greynoise/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/helpers.py b/salt/sensoroni/files/analyzers/helpers.py index f4ef4a1e6..903e2373b 100644 --- a/salt/sensoroni/files/analyzers/helpers.py +++ b/salt/sensoroni/files/analyzers/helpers.py @@ -1,7 +1,6 @@ import json import os import sys -import yaml def checkSupportedType(meta, artifact_type): @@ -24,5 +23,6 @@ def loadMetadata(file): def loadConfig(path): + import yaml with open(str(path), "r") as conffile: return yaml.safe_load(conffile) diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er.py b/salt/sensoroni/files/analyzers/ja3er/ja3er.py index 330a8dd66..c1018a880 100755 --- a/salt/sensoroni/files/analyzers/ja3er/ja3er.py +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er.py @@ -15,16 +15,16 @@ def prepareResults(raw): if "error" in raw: if "Sorry" in raw["error"]: status = "ok" - summary = "No results found." + summary = "no_results" elif "Invalid hash" in raw["error"]: status = "caution" - summary = "Invalid hash." + summary = "invalid_input" else: status = "caution" summary = "internal_failure" else: status = "info" - summary = "Results found." + summary = "suspicious" results = {'response': raw, 'summary': summary, 'status': status} return results diff --git a/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py b/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py index 8ad22ac69..41de4e9c7 100644 --- a/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py +++ b/salt/sensoroni/files/analyzers/ja3er/ja3er_test.py @@ -38,21 +38,28 @@ class TestJa3erMethods(unittest.TestCase): raw = {"error": "Sorry no values found"} results = ja3er.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "No results found.") + self.assertEqual(results["summary"], "no_results") self.assertEqual(results["status"], "ok") def test_prepareResults_invalidHash(self): raw = {"error": "Invalid hash"} results = ja3er.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Invalid hash.") + self.assertEqual(results["summary"], "invalid_input") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_internal_failure(self): + raw = {"error": "unknown"} + results = ja3er.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") self.assertEqual(results["status"], "caution") def test_prepareResults_info(self): raw = [{"User-Agent": "Blah/5.0", "Count": 24874, "Last_seen": "2022-04-08 16:18:38"}, {"Comment": "Brave browser v1.36.122\n\n", "Reported": "2022-03-28 20:26:42"}] results = ja3er.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["summary"], "suspicious") self.assertEqual(results["status"], "info") def test_analyze(self): @@ -61,5 +68,5 @@ class TestJa3erMethods(unittest.TestCase): conf = {"base_url": "myurl/"} with patch('ja3er.ja3er.sendReq', new=MagicMock(return_value=output)) as mock: results = ja3er.analyze(conf, artifactInput) - self.assertEqual(results["summary"], "Results found.") + self.assertEqual(results["summary"], "suspicious") mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/ja3er/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/localfile/README.md b/salt/sensoroni/files/analyzers/localfile/README.md new file mode 100644 index 000000000..ceb40497c --- /dev/null +++ b/salt/sensoroni/files/analyzers/localfile/README.md @@ -0,0 +1,32 @@ +# Localfile + +## Description +Utilize a local CSV file (or multiple) for associating a value to contextual data. + +## Configuration Requirements + +``file_path`` - Path(s) used for CSV files containing associative data. CSV files can be dropped in the analyzer directory, with ``file_path`` specified like ``mycsv.csv``. + +- The value in the first column is used for matching +- Header information should be supplied, as it is used for dynamically creating result sets +- Matches will be aggregated from the provided CSV files + +The content of the CSV file(s) should be similar to the following: + +Ex. + +``` +MatchValue,MatchDescription,MatchReference +abcd1234,ThisIsADescription,https://siteabouthings.abc +``` + +The ``file_path`` value(s) should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + localfile: + file_path: + - $file_path1 + - $file_path2 +``` diff --git a/salt/sensoroni/files/analyzers/localfile/localfile.py b/salt/sensoroni/files/analyzers/localfile/localfile.py index 745c4b9b6..5538d6a93 100755 --- a/salt/sensoroni/files/analyzers/localfile/localfile.py +++ b/salt/sensoroni/files/analyzers/localfile/localfile.py @@ -43,7 +43,7 @@ def prepareResults(raw): summary = "no_results" else: status = "info" - summary = "One or more matches found." + summary = "suspicious" else: raw = {} status = "caution" diff --git a/salt/sensoroni/files/analyzers/localfile/localfile_test.py b/salt/sensoroni/files/analyzers/localfile/localfile_test.py index 66e4820e1..30b171f86 100644 --- a/salt/sensoroni/files/analyzers/localfile/localfile_test.py +++ b/salt/sensoroni/files/analyzers/localfile/localfile_test.py @@ -86,7 +86,7 @@ class TestLocalfileMethods(unittest.TestCase): ] results = localfile.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "One or more matches found.") + self.assertEqual(results["summary"], "suspicious") self.assertEqual(results["status"], "info") def test_prepareResults_error(self): @@ -115,5 +115,5 @@ class TestLocalfileMethods(unittest.TestCase): conf = {"file_path": "/home/intel.csv"} with patch('localfile.localfile.searchFile', new=MagicMock(return_value=output)) as mock: results = localfile.analyze(conf, artifactInput) - self.assertEqual(results["summary"], "One or more matches found.") + self.assertEqual(results["summary"], "suspicious") mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/localfile/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/__init__.py b/salt/sensoroni/files/analyzers/malwarehashregistry/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.json b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.json new file mode 100644 index 000000000..916646fbb --- /dev/null +++ b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.json @@ -0,0 +1,7 @@ +{ + "name": "Team Cymru Malware Hash Registry", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer queries Team Cymru's Malware Hash registry for hashes to determine if the associated files are considered malicious.", + "supportedTypes" : ["hash"] +} diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.py b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.py new file mode 100755 index 000000000..faf722376 --- /dev/null +++ b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry.py @@ -0,0 +1,65 @@ +import json +import helpers +import argparse +import datetime +from whois import NICClient + + +def sendReq(hash): + server = "hash.cymru.com" + flags = 0 + options = {"whoishost": server} + nic_client = NICClient() + response = nic_client.whois_lookup(options, hash, flags).rstrip() + hash = response.split(' ')[0] + lastSeen = response.split(' ')[1] + if lastSeen == "NO_DATA": + avPct = 0 + else: + avPct = response.split(' ')[2] + lastSeen = datetime.datetime.fromtimestamp(int(lastSeen)).strftime("%Y-%d-%m %H:%M:%S") + raw = {"hash": hash, "last_seen": lastSeen, "av_detection_percentage": int(avPct)} + return raw + + +def prepareResults(raw): + if raw and "last_seen" in raw: + if raw["last_seen"] == "NO_DATA": + status = "ok" + summary = "no_results" + elif raw["av_detection_percentage"] < 1: + status = "ok" + summary = "harmless" + elif raw["av_detection_percentage"] in range(1, 50): + status = "caution" + summary = "suspicious" + elif raw["av_detection_percentage"] in range(51, 100): + status = "threat" + summary = "malicious" + else: + status = "caution" + summary = "internal_failure" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(input): + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = sendReq(data["value"]) + return prepareResults(response) + + +def main(): + parser = argparse.ArgumentParser(description='Search Team Cymru Malware Hash Registry for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + + args = parser.parse_args() + if args.artifact: + results = analyze(args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry_test.py b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry_test.py new file mode 100644 index 000000000..824949d8b --- /dev/null +++ b/salt/sensoroni/files/analyzers/malwarehashregistry/malwarehashregistry_test.py @@ -0,0 +1,93 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from malwarehashregistry import malwarehashregistry +import unittest + + +class TestMalwareHashRegistryMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + malwarehashregistry.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('malwarehashregistry.malwarehashregistry.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + malwarehashregistry.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_sendReq(self): + output = "84af04b8e69682782607a0c5796ca56999eda6b3 1563161433 35" + hash = "abcd1234" + server = "hash.cymru.com" + flags = 0 + options = {"whoishost": server} + with patch('whois.NICClient.whois_lookup', new=MagicMock(return_value=output)) as mock: + response = malwarehashregistry.sendReq(hash) + mock.assert_called_once_with(options, hash, flags) + self.assertIsNotNone(response) + self.assertEqual(response, {"hash": "84af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "2019-15-07 03:30:33", "av_detection_percentage": 35}) + + def test_sendReqNoData(self): + output = "84af04b8e69682782607a0c5796ca5696b3 NO_DATA" + hash = "abcd1234" + server = "hash.cymru.com" + flags = 0 + options = {"whoishost": server} + with patch('whois.NICClient.whois_lookup', new=MagicMock(return_value=output)) as mock: + response = malwarehashregistry.sendReq(hash) + mock.assert_called_once_with(options, hash, flags) + self.assertIsNotNone(response) + self.assertEqual(response, {"hash": "84af04b8e69682782607a0c5796ca5696b3", "last_seen": "NO_DATA", "av_detection_percentage": 0}) + + def test_prepareResults_none(self): + raw = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "NO_DATA", "av_detection_percentage": 0} + results = malwarehashregistry.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_harmless(self): + raw = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "123456", "av_detection_percentage": 0} + results = malwarehashregistry.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "harmless") + self.assertEqual(results["status"], "ok") + + def test_prepareResults_sus(self): + raw = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "123456", "av_detection_percentage": 1} + results = malwarehashregistry.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "suspicious") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_mal(self): + raw = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "123456", "av_detection_percentage": 51} + results = malwarehashregistry.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "malicious") + self.assertEqual(results["status"], "threat") + + def test_prepareResults_error(self): + raw = {} + results = malwarehashregistry.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "NO_DATA", "av_detection_percentage": 0} + artifactInput = '{"value": "14af04b8e69682782607a0c5796ca56999eda6b3", "artifactType": "hash"}' + with patch('malwarehashregistry.malwarehashregistry.sendReq', new=MagicMock(return_value=output)) as mock: + results = malwarehashregistry.analyze(artifactInput) + self.assertEqual(results["summary"], "no_results") + mock.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/requirements.txt b/salt/sensoroni/files/analyzers/malwarehashregistry/requirements.txt new file mode 100644 index 000000000..b6e7c15bf --- /dev/null +++ b/salt/sensoroni/files/analyzers/malwarehashregistry/requirements.txt @@ -0,0 +1,2 @@ +requests>=2.27.1 +python-whois>=0.7.3 diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/future-0.18.2.tar.gz b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/future-0.18.2.tar.gz new file mode 100644 index 000000000..3c5328a85 Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/future-0.18.2.tar.gz differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/python-whois-0.7.3.tar.gz b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/python-whois-0.7.3.tar.gz new file mode 100644 index 000000000..ef6392313 Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/python-whois-0.7.3.tar.gz differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/malwarehashregistry/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/otx/README.md b/salt/sensoroni/files/analyzers/otx/README.md new file mode 100644 index 000000000..511888fdc --- /dev/null +++ b/salt/sensoroni/files/analyzers/otx/README.md @@ -0,0 +1,17 @@ +# Alienvault OTX + +## Description +Submit a domain, hash, IP, or URL to Alienvault OTX for analysis. + +## Configuration Requirements + +``api_key`` - API key used for communication with the Alienvault API + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + otx: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/otx/otx.py b/salt/sensoroni/files/analyzers/otx/otx.py index 2d4e8e592..b9565058f 100755 --- a/salt/sensoroni/files/analyzers/otx/otx.py +++ b/salt/sensoroni/files/analyzers/otx/otx.py @@ -44,7 +44,7 @@ def prepareResults(response): summaryinfo = "harmless" elif reputation > 0 and reputation < 50: status = "ok" - summaryinfo = "Likely Harmless" + summaryinfo = "likely_harmless" elif reputation >= 50 and reputation < 75: status = "caution" summaryinfo = "suspicious" @@ -53,7 +53,7 @@ def prepareResults(response): summaryinfo = "malicious" else: status = "info" - summaryinfo = "Analysis complete." + summaryinfo = "analyzer_analysis_complete" else: raw = {} status = "caution" diff --git a/salt/sensoroni/files/analyzers/otx/otx_test.py b/salt/sensoroni/files/analyzers/otx/otx_test.py index 6f5764ca4..3e48168a9 100644 --- a/salt/sensoroni/files/analyzers/otx/otx_test.py +++ b/salt/sensoroni/files/analyzers/otx/otx_test.py @@ -25,7 +25,7 @@ class TestOtxMethods(unittest.TestCase): self.assertEqual(mock_stdout.getvalue(), expected) mock.assert_called_once() - def checkConfigRequirements(self): + def test_checkConfigRequirements(self): conf = {"not_a_key": "abcd12345"} with self.assertRaises(SystemExit) as cm: otx.checkConfigRequirements(conf) @@ -119,7 +119,7 @@ class TestOtxMethods(unittest.TestCase): } results = otx.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Likely Harmless") + self.assertEqual(results["summary"], "likely_harmless") self.assertEqual(results["status"], "ok") def test_prepareResults_suspicious(self): @@ -210,7 +210,7 @@ class TestOtxMethods(unittest.TestCase): } results = otx.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Analysis complete.") + self.assertEqual(results["summary"], "analyzer_analysis_complete") self.assertEqual(results["status"], "info") def test_prepareResults_error(self): diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/otx/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/README.md b/salt/sensoroni/files/analyzers/pulsedive/README.md new file mode 100644 index 000000000..d3879fb8d --- /dev/null +++ b/salt/sensoroni/files/analyzers/pulsedive/README.md @@ -0,0 +1,17 @@ +# Pulsedive + +## Description +Search Pulsedive for a domain, hash, IP, URI, URL, or User Agent. + +## Configuration Requirements + +``api_key`` - API key used for communication with the Virustotal API + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + pulsedive: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py index 719d760e6..fd9e0072f 100644 --- a/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive.py @@ -14,7 +14,7 @@ def checkConfigRequirements(conf): def buildReq(conf, artifactType, artifactValue): - indicatorTypes = ["domain", "hash", "ip" "url"] + indicatorTypes = ["domain", "hash", "ip", "url"] if artifactType in indicatorTypes: url = conf['base_url'] + '/info.php' params = {"key": conf["api_key"], "indicator": artifactValue} @@ -53,19 +53,17 @@ def prepareResults(raw): for r in raw['results']: risk = r['risk'] classified.append(classification.get(risk)) - else: + elif "risk" in raw: classified.append(classification.get(raw['risk'])) - + elif "error" in raw and raw["error"] == "Indicator not found.": + classified.append("no_results") if classified.count('malicious') > 0: summary = "malicious" status = "threat" elif classified.count('suspicious') > 0: summary = "suspicious" status = "caution" - elif classified.count('harmless') > 0: - summary = "harmless" - status = "ok" - elif classified.count('none') > 0: + elif classified.count('harmless') or classified.count('none') > 0: summary = "harmless" status = "ok" elif classified.count('unknown') > 0: diff --git a/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py b/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py index 47b60efdd..e76a3c979 100644 --- a/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py +++ b/salt/sensoroni/files/analyzers/pulsedive/pulsedive_test.py @@ -104,6 +104,13 @@ class TestVirusTotalMethods(unittest.TestCase): self.assertEqual(results["summary"], "harmless") self.assertEqual(results["status"], "ok") + def test_prepareResults_indicator_not_Found(self): + raw = {"error": "Indicator not found."} + results = pulsedive.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "ok") + def test_prepareResults_error(self): raw = {} results = pulsedive.prepareResults(raw) diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl deleted file mode 100644 index 1dfb5c2d3..000000000 Binary files a/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/pulsedive/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl b/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl deleted file mode 100644 index b390ce52f..000000000 Binary files a/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/spamhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/urlhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/urlhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlhaus/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/README.md b/salt/sensoroni/files/analyzers/urlscan/README.md new file mode 100644 index 000000000..9f33c3106 --- /dev/null +++ b/salt/sensoroni/files/analyzers/urlscan/README.md @@ -0,0 +1,20 @@ +# Urlscan + +## Description +Submit a URL to Urlscan for analysis. + +## Configuration Requirements + +``api_key`` - API key used for communication with the Virustotal API +``enabled`` - Determines whether or not the analyzer is enabled. Defaults to ``False`` +``visibility`` - Determines whether or not scan results are visibile publicly. Defaults to ``public`` +``timeout`` - Time to wait for scan results. Defaults to ``180``s + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + urlscan: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/urlscan/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/urlscan/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan.py b/salt/sensoroni/files/analyzers/urlscan/urlscan.py index 94c3ec8db..a07e61c89 100755 --- a/salt/sensoroni/files/analyzers/urlscan/urlscan.py +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan.py @@ -54,7 +54,7 @@ def prepareResults(raw): summary = "suspicious" else: status = "info" - summary = "Scan complete." + summary = "analysis_complete" else: status = "caution" summary = "internal_failure" diff --git a/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py b/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py index 487e6dbe3..5135c3cd2 100644 --- a/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py +++ b/salt/sensoroni/files/analyzers/urlscan/urlscan_test.py @@ -96,7 +96,7 @@ class TestUrlScanMethods(unittest.TestCase): raw = {"requests": [{"request": {"requestId": "3"}}], "verdicts": {"overall": {"score": 0, "malicious": False, "hasVerdicts": False}}} results = urlscan.prepareResults(raw) self.assertEqual(results["response"], raw) - self.assertEqual(results["summary"], "Scan complete.") + self.assertEqual(results["summary"], "analysis_complete") self.assertEqual(results["status"], "info") def test_prepareResults_error(self): @@ -116,6 +116,6 @@ class TestUrlScanMethods(unittest.TestCase): with patch('urlscan.urlscan.sendReq', new=MagicMock(return_value=output_req)) as mock_req: with patch('urlscan.urlscan.getReport', new=MagicMock(return_value=output_report)) as mock_report: results = urlscan.analyze(conf, artifactInput) - self.assertEqual(results["summary"], "Scan complete.") + self.assertEqual(results["summary"], "analysis_complete") mock_req.assert_called_once() mock_report.assert_called_once() diff --git a/salt/sensoroni/files/analyzers/virustotal/README.md b/salt/sensoroni/files/analyzers/virustotal/README.md new file mode 100644 index 000000000..af39db3ea --- /dev/null +++ b/salt/sensoroni/files/analyzers/virustotal/README.md @@ -0,0 +1,17 @@ +# Virustotal + +## Description +Submit a domain, hash, IP, or URL to Virustotal for analysis. + +## Configuration Requirements + +``api_key`` - API key used for communication with the Virustotal API + +This value should be set in the ``sensoroni`` pillar, like so: + +``` +sensoroni: + analyzers: + virustotal: + api_key: $yourapikey +``` diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl deleted file mode 100644 index b390ce52f..000000000 Binary files a/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl and /dev/null differ diff --git a/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl b/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl new file mode 100644 index 000000000..d2b6c37f9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/virustotal/source-packages/PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl differ diff --git a/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py b/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py index 103e137ce..177c16d99 100644 --- a/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py +++ b/salt/sensoroni/files/analyzers/virustotal/virustotal_test.py @@ -25,7 +25,7 @@ class TestVirusTotalMethods(unittest.TestCase): self.assertEqual(mock_stdout.getvalue(), expected) mock.assert_called_once() - def checkConfigRequirements(self): + def test_checkConfigRequirements(self): conf = {"not_a_key": "abcd12345"} with self.assertRaises(SystemExit) as cm: virustotal.checkConfigRequirements(conf) diff --git a/salt/sensoroni/files/analyzers/whoislookup/__init__.py b/salt/sensoroni/files/analyzers/whoislookup/__init__.py new file mode 100755 index 000000000..e69de29bb diff --git a/salt/sensoroni/files/analyzers/whoislookup/requirements.txt b/salt/sensoroni/files/analyzers/whoislookup/requirements.txt new file mode 100755 index 000000000..e19053e6c --- /dev/null +++ b/salt/sensoroni/files/analyzers/whoislookup/requirements.txt @@ -0,0 +1 @@ +whoisit>=2.5.3 diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/certifi-2021.10.8-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/certifi-2021.10.8-py2.py3-none-any.whl new file mode 100644 index 000000000..fbcb86b5f Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/certifi-2021.10.8-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/charset_normalizer-2.0.12-py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/charset_normalizer-2.0.12-py3-none-any.whl new file mode 100644 index 000000000..17a2dfbeb Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/charset_normalizer-2.0.12-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/idna-3.3-py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/idna-3.3-py3-none-any.whl new file mode 100644 index 000000000..060541bc9 Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/idna-3.3-py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/python_dateutil-2.8.2-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/python_dateutil-2.8.2-py2.py3-none-any.whl new file mode 100644 index 000000000..8ffb92386 Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/python_dateutil-2.8.2-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/requests-2.27.1-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/requests-2.27.1-py2.py3-none-any.whl new file mode 100644 index 000000000..807fc6110 Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/requests-2.27.1-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/six-1.16.0-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/six-1.16.0-py2.py3-none-any.whl new file mode 100644 index 000000000..fd942658a Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/six-1.16.0-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/urllib3-1.26.9-py2.py3-none-any.whl b/salt/sensoroni/files/analyzers/whoislookup/source-packages/urllib3-1.26.9-py2.py3-none-any.whl new file mode 100644 index 000000000..5019453dd Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/urllib3-1.26.9-py2.py3-none-any.whl differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/source-packages/whoisit-2.5.3.tar.gz b/salt/sensoroni/files/analyzers/whoislookup/source-packages/whoisit-2.5.3.tar.gz new file mode 100644 index 000000000..b48535618 Binary files /dev/null and b/salt/sensoroni/files/analyzers/whoislookup/source-packages/whoisit-2.5.3.tar.gz differ diff --git a/salt/sensoroni/files/analyzers/whoislookup/whoislookup.json b/salt/sensoroni/files/analyzers/whoislookup/whoislookup.json new file mode 100755 index 000000000..37c325a60 --- /dev/null +++ b/salt/sensoroni/files/analyzers/whoislookup/whoislookup.json @@ -0,0 +1,7 @@ +{ + "name": "WHOIS", + "version": "0.1", + "author": "Security Onion Solutions", + "description": "This analyzer performs a query to an RDAP server for WHOIS-like domain information.", + "supportedTypes" : ["domain"] +} diff --git a/salt/sensoroni/files/analyzers/whoislookup/whoislookup.py b/salt/sensoroni/files/analyzers/whoislookup/whoislookup.py new file mode 100755 index 000000000..84c5a2b3c --- /dev/null +++ b/salt/sensoroni/files/analyzers/whoislookup/whoislookup.py @@ -0,0 +1,55 @@ +import json +import helpers +import argparse +import whoisit + + +def sendReq(domain): + whoisit.bootstrap() + try: + results = whoisit.domain(domain, raw=True) + except whoisit.errors.ResourceDoesNotExist: + results = "Not found." + except whoisit.errors.QueryError as error: + results = "QueryError: " + str(error) + return results + + +def prepareResults(raw): + if raw: + if "Not found." in raw: + status = "info" + summary = "no_results" + elif "QueryError" in raw: + status = "caution" + summary = "invalid_input" + else: + status = "info" + summary = "analysis_complete" + else: + status = "caution" + summary = "internal_failure" + results = {'response': raw, 'summary': summary, 'status': status} + return results + + +def analyze(input): + meta = helpers.loadMetadata(__file__) + data = helpers.parseArtifact(input) + helpers.checkSupportedType(meta, data["artifactType"]) + response = sendReq(data["value"]) + return prepareResults(response) + + +def main(): + parser = argparse.ArgumentParser(description='Query RDAP server for WHOIS-like information for a given artifact') + parser.add_argument('artifact', help='the artifact represented in JSON format') + + args = parser.parse_args() + if args.artifact: + results = analyze(args.artifact) + print(json.dumps(results)) + + +if __name__ == "__main__": + main() diff --git a/salt/sensoroni/files/analyzers/whoislookup/whoislookup_test.py b/salt/sensoroni/files/analyzers/whoislookup/whoislookup_test.py new file mode 100755 index 000000000..9aa596095 --- /dev/null +++ b/salt/sensoroni/files/analyzers/whoislookup/whoislookup_test.py @@ -0,0 +1,89 @@ +from io import StringIO +import sys +from unittest.mock import patch, MagicMock +from whoislookup import whoislookup +import unittest +import whoisit + + +class TestWhoisLookupMethods(unittest.TestCase): + + def test_main_missing_input(self): + with patch('sys.exit', new=MagicMock()) as sysmock: + with patch('sys.stderr', new=StringIO()) as mock_stderr: + sys.argv = ["cmd"] + whoislookup.main() + self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] artifact\ncmd: error: the following arguments are required: artifact\n") + sysmock.assert_called_once_with(2) + + def test_main_success(self): + output = {"foo": "bar"} + with patch('sys.stdout', new=StringIO()) as mock_stdout: + with patch('whoislookup.whoislookup.analyze', new=MagicMock(return_value=output)) as mock: + sys.argv = ["cmd", "input"] + whoislookup.main() + expected = '{"foo": "bar"}\n' + self.assertEqual(mock_stdout.getvalue(), expected) + mock.assert_called_once() + + def test_sendReq(self): + output = {"foo": "bar"} + with patch('whoisit.domain', new=MagicMock(return_value=output)) as mock: + response = whoislookup.sendReq("abcd1234.com") + mock.assert_called_once_with("abcd1234.com", raw=True) + self.assertIsNotNone(response) + self.assertEqual(response, output) + + def test_sendReqNotFound(self): + mock = MagicMock() + mock.side_effect = whoisit.errors.ResourceDoesNotExist() + with patch('whoisit.domain', new=mock): + response = whoislookup.sendReq("abcd1234.com") + mock.assert_called_once_with("abcd1234.com", raw=True) + self.assertIsNotNone(response) + self.assertEqual(response, "Not found.") + + def test_sendReqQueryError(self): + mock = MagicMock() + mock.side_effect = whoisit.errors.QueryError("error") + with patch('whoisit.domain', new=mock): + response = whoislookup.sendReq("abcd1234.com") + mock.assert_called_once_with("abcd1234.com", raw=True) + self.assertIsNotNone(response) + self.assertEqual(response, "QueryError: error") + + def test_prepareResults_none(self): + raw = "Not found." + results = whoislookup.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "no_results") + self.assertEqual(results["status"], "info") + + def test_prepareResults_info(self): + raw = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "123456", "av_detection_percentage": 0} + results = whoislookup.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "analysis_complete") + self.assertEqual(results["status"], "info") + + def test_prepareResults_query_error(self): + raw = "QueryError: blahblahblah" + results = whoislookup.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "invalid_input") + self.assertEqual(results["status"], "caution") + + def test_prepareResults_error(self): + raw = {} + results = whoislookup.prepareResults(raw) + self.assertEqual(results["response"], raw) + self.assertEqual(results["summary"], "internal_failure") + self.assertEqual(results["status"], "caution") + + def test_analyze(self): + output = {"hash": "14af04b8e69682782607a0c5796ca56999eda6b3", "last_seen": "NO_DATA", "av_detection_percentage": 0} + artifactInput = '{"value": "14af04b8e69682782607a0c5796ca56999eda6b3", "artifactType": "domain"}' + with patch('whoislookup.whoislookup.sendReq', new=MagicMock(return_value=output)) as mock: + results = whoislookup.analyze(artifactInput) + self.assertEqual(results["summary"], "analysis_complete") + mock.assert_called_once() diff --git a/salt/soc/files/soc/dashboards.queries.json b/salt/soc/files/soc/dashboards.queries.json index 14247721c..dfa999ac6 100644 --- a/salt/soc/files/soc/dashboards.queries.json +++ b/salt/soc/files/soc/dashboards.queries.json @@ -1,24 +1,25 @@ [ - { "name": "Overview", "description": "Show all events grouped by the origin host", "query": "* | groupby event.dataset | groupby event.module | groupby event.category | groupby observer.name | groupby source.ip | groupby destination.ip"}, - { "name": "Elastalerts", "description": "", "query": "_type:elastalert | groupby rule.name"}, + { "name": "Overview", "description": "Overview of all events", "query": "* | groupby -sankey event.dataset event.category* | groupby event.dataset | groupby -bar event.module | groupby event.module | groupby -pie event.category | groupby event.category | groupby observer.name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "SOC Auth", "description": "Show all SOC authentication logs", "query": "event.module:kratos AND event.dataset:audit AND msg:authenticated | groupby http_request.headers.x-real-ip | groupby identity_id | groupby http_request.headers.user-agent"}, + { "name": "Elastalerts", "description": "Elastalert logs", "query": "_index: \"*:elastalert*\" | groupby rule_name | groupby alert_info.type"}, { "name": "Alerts", "description": "Show all alerts", "query": "event.dataset: alert | groupby event.module | groupby rule.name | groupby event.severity | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "NIDS Alerts", "description": "NIDS alerts", "query": "event.category: network AND event.dataset: alert | groupby rule.category | groupby rule.gid | groupby rule.uuid | groupby rule.name | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "Wazuh/OSSEC", "description": "Wazuh/OSSEC HIDS alerts and logs", "query": "event.module:ossec | groupby rule.category | groupby rule.uuid | groupby rule.name | groupby agent.id | groupby agent.name | groupby log.full"}, { "name": "Sysmon", "description": "Sysmon logs", "query": "event.module:sysmon | groupby event.dataset | groupby user.name | groupby process.executable | groupby process.command_line | groupby process.parent.command_line"}, { "name": "Strelka", "description": "Strelka logs", "query": "event.module:strelka | groupby file.mime_type | groupby file.name | groupby file.source"}, { "name": "Zeek Notice", "description": "Zeek Notice logs", "query": "event.dataset:notice | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port"}, - { "name": "Connections", "description": "Connection logs", "query": "event.dataset:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name"}, + { "name": "Connections", "description": "Connection logs", "query": "event.dataset:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name | groupby client.ip_bytes | groupby server.ip_bytes"}, { "name": "DCE_RPC", "description": "DCE_RPC logs", "query": "event.dataset:dce_rpc | groupby dce_rpc.operation | groupby dce_rpc.endpoint | groupby dce_rpc.named_pipe | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "DHCP", "description": "Dynamic Host Configuration Protocol leases", "query": "event.dataset:dhcp | groupby host.hostname | groupby host.domain | groupby dhcp.message_types | groupby client.address | groupby server.address"}, { "name": "DNP3", "description": "DNP3 logs", "query": "event.dataset:dnp3 | groupby dnp3.fc_request | groupby dnp3.fc_reply | groupby dnp3.iin | groupby source.ip | groupby destination.ip | groupby destination.port"}, - { "name": "DNS", "description": "Domain Name System queries", "query": "event.dataset:dns | groupby dns.query.name | groupby dns.query.type_name | groupby dns.response.code_name | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "DNS", "description": "Domain Name System queries", "query": "event.dataset:dns | groupby dns.query.name | groupby dns.highest_registered_domain | groupby dns.parent_domain | groupby dns.answers.name | groupby dns.query.type_name | groupby dns.response.code_name | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "DPD", "description": "Dynamic Protocol Detection errors", "query": "event.dataset:dpd | groupby error.reason | groupby source.ip | groupby destination.ip | groupby destination.port | groupby network.protocol"}, { "name": "Files", "description": "Files seen in network traffic", "query": "event.dataset:file | groupby file.mime_type | groupby file.source | groupby file.bytes.total | groupby source.ip | groupby destination.ip"}, { "name": "FTP", "description": "File Transfer Protocol logs", "query": "event.dataset:ftp | groupby ftp.command | groupby ftp.argument | groupby ftp.user | groupby source.ip | groupby destination.ip | groupby destination.port"}, - { "name": "HTTP", "description": "Hyper Text Transport Protocol logs", "query": "event.dataset:http | groupby http.method | groupby http.status_code | groupby http.status_message | groupby http.useragent | groupby http.virtual_host | groupby http.uri | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "HTTP", "description": "Hyper Text Transport Protocol logs", "query": "event.dataset:http | groupby http.method | groupby http.virtual_host | groupby http.uri | groupby http.useragent | groupby http.status_code | groupby http.status_message | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "Intel", "description": "Zeek Intel framework hits", "query": "event.dataset:intel | groupby intel.indicator | groupby intel.indicator_type | groupby intel.seen_where | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "IRC", "description": "Internet Relay Chat logs", "query": "event.dataset:irc | groupby irc.command.type | groupby irc.username | groupby irc.nickname | groupby irc.command.value | groupby irc.command.info | groupby source.ip | groupby destination.ip | groupby destination.port"}, - { "name": "KERBEROS", "description": "KERBEROS logs", "query": "event.dataset:kerberos | groupby kerberos.service | groupby kerberos.client | groupby kerberos.request_type | groupby source.ip | groupby destination.ip | groupby destination.port"}, + { "name": "Kerberos", "description": "Kerberos logs", "query": "event.dataset:kerberos | groupby kerberos.service | groupby kerberos.client | groupby kerberos.request_type | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "MODBUS", "description": "MODBUS logs", "query": "event.dataset:modbus | groupby modbus.function | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "MYSQL", "description": "MYSQL logs", "query": "event.dataset:mysql | groupby mysql.command | groupby mysql.argument | groupby mysql.success | groupby mysql.response | groupby mysql.rows | groupby source.ip | groupby destination.ip | groupby destination.port"}, { "name": "NOTICE", "description": "Zeek notice logs", "query": "event.dataset:notice | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port"}, @@ -42,4 +43,4 @@ { "name": "Weird", "description": "Weird network traffic seen by Zeek", "query": "event.dataset:weird | groupby weird.name | groupby weird.additional_info | groupby source.ip | groupby destination.ip | groupby destination.port "}, { "name": "x509", "description": "x.509 certificates seen by Zeek", "query": "event.dataset:x509 | groupby x509.certificate.key.length | groupby x509.san_dns | groupby x509.certificate.key.type | groupby x509.certificate.subject | groupby x509.certificate.issuer"}, { "name": "Firewall", "description": "Firewall logs", "query": "event.dataset:firewall | groupby rule.action | groupby interface.name | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port"} -] \ No newline at end of file +] diff --git a/salt/soc/files/soc/hunt.queries.json b/salt/soc/files/soc/hunt.queries.json index 01692ad0b..ed82c10a4 100644 --- a/salt/soc/files/soc/hunt.queries.json +++ b/salt/soc/files/soc/hunt.queries.json @@ -1,8 +1,8 @@ [ - { "name": "Default Query", "showSubtitle": true, "showSubtitle": true, "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name"}, + { "name": "Default Query", "showSubtitle": true, "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name"}, { "name": "Log Type", "showSubtitle": true, "description": "Show all events grouped by module and dataset", "query": "* | groupby event.module event.dataset"}, { "name": "SOC Auth", "showSubtitle": true, "description": "Users authenticated to SOC grouped by IP address and identity", "query": "event.module:kratos AND event.dataset:audit AND msg:authenticated | groupby http_request.headers.x-real-ip identity_id"}, - { "name": "Elastalerts", "showSubtitle": true, "description": "", "query": "_type:elastalert | groupby rule.name"}, + { "name": "Elastalerts", "showSubtitle": true, "description": "Elastalert logs", "query": "_index: \"*:elastalert*\" | groupby rule_name alert_info.type"}, { "name": "Alerts", "showSubtitle": true, "description": "Show all alerts grouped by alert source", "query": "event.dataset: alert | groupby event.module"}, { "name": "NIDS Alerts", "showSubtitle": true, "description": "Show all NIDS alerts grouped by alert", "query": "event.category: network AND event.dataset: alert | groupby rule.category rule.gid rule.uuid rule.name"}, { "name": "Wazuh/OSSEC Alerts", "showSubtitle": true, "description": "Show all Wazuh alerts at Level 5 or higher grouped by category", "query": "event.module:ossec AND event.dataset:alert AND rule.level:>4 | groupby rule.category rule.name"}, @@ -38,7 +38,7 @@ { "name": "HTTP", "showSubtitle": true, "description": "HTTP with exe downloads", "query": "event.dataset:http AND (file.resp_mime_types:dosexec OR file.resp_mime_types:executable) | groupby http.virtual_host"}, { "name": "Intel", "showSubtitle": true, "description": "Intel framework hits grouped by indicator", "query": "event.dataset:intel | groupby intel.indicator.keyword"}, { "name": "IRC", "showSubtitle": true, "description": "IRC grouped by command", "query": "event.dataset:irc | groupby irc.command.type"}, - { "name": "KERBEROS", "showSubtitle": true, "description": "KERBEROS grouped by service", "query": "event.dataset:kerberos | groupby kerberos.service"}, + { "name": "Kerberos", "showSubtitle": true, "description": "Kerberos grouped by service", "query": "event.dataset:kerberos | groupby kerberos.service"}, { "name": "MODBUS", "showSubtitle": true, "description": "MODBUS grouped by function", "query": "event.dataset:modbus | groupby modbus.function"}, { "name": "MYSQL", "showSubtitle": true, "description": "MYSQL grouped by command", "query": "event.dataset:mysql | groupby mysql.command"}, { "name": "NOTICE", "showSubtitle": true, "description": "Zeek notice logs grouped by note and message", "query": "event.dataset:notice | groupby notice.note notice.message"}, diff --git a/salt/soc/files/soc/motd.md b/salt/soc/files/soc/motd.md index fd95b089d..fbc643993 100644 --- a/salt/soc/files/soc/motd.md +++ b/salt/soc/files/soc/motd.md @@ -1,16 +1,16 @@ ## Getting Started -New to Security Onion 2? Check out the [Online Help](/docs/) and [Cheatsheet](/docs/cheatsheet.pdf) to learn how to best utilize Security Onion to hunt for evil! Find them in the upper-right menu. Also, watch our free Security Onion 2 Essentials online course, available on our [Training](https://securityonionsolutions.com/training) website. +New to Security Onion 2? Click the menu in the upper-right corner and you'll find links for [Help](/docs/) and a [Cheatsheet](/docs/cheatsheet.pdf) that will help you best utilize Security Onion to hunt for evil! In addition, check out our free Security Onion 2 Essentials online course, available on our [Training](https://securityonionsolutions.com/training) website. -If you're ready to dive-in, take a look at the [Alerts](/#/alerts) interface to see what Security Onion has detected so far. Or navigate to the [Hunt](/#/hunt) interface to hunt for evil that the alerts might have missed! +If you're ready to dive in, take a look at the [Alerts](/#/alerts) interface to see what Security Onion has detected so far. Then go to the [Dashboards](/#/dashboards) interface for a general overview of all logs collected or go to the [Hunt](/#/hunt) interface for more focused threat hunting. Once you've found something of interest, escalate it to [Cases](/#/cases) to then collect evidence and analyze observables as you work towards closing the case. ## What's New -The release notes have moved to the upper-right menu. Click on the [What's New](/docs/#release-notes) menu option to find all the latest fixes and features in this version of Security Onion! +To see all the latest features and fixes in this version of Security Onion, click the upper-right menu and then click the [What's New](/docs/#release-notes) link. ## Customize This Space -Make this area your own by customizing the content. The content is stored in the `motd.md` file, which uses the common Markdown (.md) format. Visit [markdownguide.org](https://www.markdownguide.org/) to learn more about the simple Markdown format. +Make this area your own by customizing the content. The content is stored in the `motd.md` file, which uses the common Markdown (.md) format. To learn more about the format, please see [markdownguide.org](https://www.markdownguide.org/). To customize this content, login to the manager via SSH and execute the following command: @@ -18,9 +18,9 @@ To customize this content, login to the manager via SSH and execute the followin sudo cp /opt/so/saltstack/default/salt/soc/files/soc/motd.md /opt/so/saltstack/local/salt/soc/files/soc/ ``` -and edit the new file as desired. +Then edit the new file as desired using your favorite text editor. -Finally, run this command: +Finally, restart SOC to make the changes take effect: ```bash sudo so-soc-restart diff --git a/salt/soc/files/soc/soc.json b/salt/soc/files/soc/soc.json index 4eabb7c48..e6ee71b51 100644 --- a/salt/soc/files/soc/soc.json +++ b/salt/soc/files/soc/soc.json @@ -162,6 +162,7 @@ "ackEnabled": false, "escalateEnabled": true, "escalateRelatedEventsEnabled": {{ 'true' if CASE_MODULE == 'soc' else 'false' }}, + "aggregationActionsEnabled": true, "eventFields": {{ hunt_eventfields | json }}, "queryBaseFilter": "", "queryToggleFilters": [ @@ -178,10 +179,11 @@ "eventFetchLimit": 100, "relativeTimeValue": 24, "relativeTimeUnit": 30, - "mostRecentlyUsedLimit": 5, + "mostRecentlyUsedLimit": 0, "ackEnabled": false, "escalateEnabled": true, "escalateRelatedEventsEnabled": {{ 'true' if CASE_MODULE == 'soc' else 'false' }}, + "aggregationActionsEnabled": false, "eventFields": {{ hunt_eventfields | json }}, "queryBaseFilter": "", "queryToggleFilters": [ @@ -205,6 +207,7 @@ "ackEnabled": true, "escalateEnabled": true, "escalateRelatedEventsEnabled": {{ 'true' if CASE_MODULE == 'soc' else 'false' }}, + "aggregationActionsEnabled": true, "eventFields": {{ alerts_eventfields | json }}, "queryBaseFilter": "event.dataset:alert", "queryToggleFilters": [ @@ -226,6 +229,7 @@ "ackEnabled": false, "escalateEnabled": false, "escalateRelatedEventsEnabled": false, + "aggregationActionsEnabled": false, "viewEnabled": true, "createLink": "/case/create", "eventFields": {{ cases_eventfields | json }}, diff --git a/salt/soctopus/init.sls b/salt/soctopus/init.sls index 74ab5dcc1..7ad2640ea 100644 --- a/salt/soctopus/init.sls +++ b/salt/soctopus/init.sls @@ -66,7 +66,7 @@ so-soctopus: - /opt/so/conf/soctopus/SOCtopus.conf:/SOCtopus/SOCtopus.conf:ro - /opt/so/log/soctopus/:/var/log/SOCtopus/:rw - /opt/so/rules/elastalert/playbook:/etc/playbook-rules:rw - - /opt/so/conf/navigator/nav_layer_playbook.json:/etc/playbook/nav_layer_playbook.json:rw + - /opt/so/conf/navigator/layers/:/etc/playbook/:rw - /opt/so/conf/soctopus/sigma-import/:/SOCtopus/sigma-import/:rw {% if ISAIRGAP is sameas true %} - /nsm/repo/rules/sigma:/soctopus/sigma diff --git a/salt/suricata/defaults.yaml b/salt/suricata/defaults.yaml index 695e43f08..9c358b448 100644 --- a/salt/suricata/defaults.yaml +++ b/salt/suricata/defaults.yaml @@ -586,4 +586,133 @@ suricata: threshold-file: /etc/suricata/threshold.conf #include: include1.yaml #include: include2.yaml - \ No newline at end of file + classification: + attempted-admin: + description: Attempted Administrator Privilege Gain + priority: 1 + attempted-dos: + description: Attempted Denial of Service + priority: 2 + attempted-recon: + description: Attempted Information Leak + priority: 2 + attempted-user: + description: Attempted User Privilege Gain + priority: 1 + bad-unknown: + description: Potentially Bad Traffic + priority: 2 + coin-mining: + description: Crypto Currency Mining Activity Detected + priority: 2 + command-and-control: + description: Malware Command and Control Activity Detected + priority: 1 + credential-theft: + description: Successful Credential Theft Detected + priority: 1 + default-login-attempt: + description: Attempt to login by a default username and password + priority: 2 + denial-of-service: + description: Detection of a Denial of Service Attack + priority: 2 + domain-c2: + description: Domain Observed Used for C2 Detected + priority: 1 + exploit-kit: + description: Exploit Kit Activity Detected + priority: 1 + external-ip-check: + description: Device Retrieving External IP Address Detected + priority: 2 + icmp-event: + description: Generic ICMP event + priority: 3 + inappropriate-content: + description: Inappropriate Content was Detected + priority: 1 + misc-activity: + description: Misc activity + priority: 3 + misc-attack: + description: Misc Attack + priority: 2 + network-scan: + description: Detection of a Network Scan + priority: 3 + non-standard-protocol: + description: Detection of a non-standard protocol or event + priority: 2 + not-suspicious: + description: Not Suspicious Traffic + priority: 3 + policy-violation: + description: Potential Corporate Privacy Violation + priority: 1 + protocol-command-decode: + description: Generic Protocol Command Decode + priority: 3 + pup-activity: + description: Possibly Unwanted Program Detected + priority: 2 + rpc-portmap-decode: + description: Decode of an RPC Query + priority: 2 + shellcode-detect: + description: Executable code was detected + priority: 1 + social-engineering: + description: Possible Social Engineering Attempted + priority: 2 + string-detect: + description: A suspicious string was detected + priority: 3 + successful-admin: + description: Successful Administrator Privilege Gain + priority: 1 + successful-dos: + description: Denial of Service + priority: 2 + successful-recon-largescale: + description: Large Scale Information Leak + priority: 2 + successful-recon-limited: + description: Information Leak + priority: 2 + successful-user: + description: Successful User Privilege Gain + priority: 1 + suspicious-filename-detect: + description: A suspicious filename was detected + priority: 2 + suspicious-login: + description: An attempted login using a suspicious username was detected + priority: 2 + system-call-detect: + description: A system call was detected + priority: 2 + targeted-activity: + description: Targeted Malicious Activity was Detected + priority: 1 + tcp-connection: + description: A TCP connection was detected + priority: 4 + trojan-activity: + description: A Network Trojan was detected + priority: 1 + unknown: + description: Unknown Traffic + priority: 3 + unsuccessful-user: + description: Unsuccessful User Privilege Gain + priority: 1 + unusual-client-port-connection: + description: A client was using an unusual port + priority: 2 + web-application-activity: + description: access to a potentially vulnerable web application + priority: 2 + web-application-attack: + description: Web Application Attack + priority: 1 diff --git a/salt/suricata/files/classification.config.jinja b/salt/suricata/files/classification.config.jinja new file mode 100644 index 000000000..122cf4baf --- /dev/null +++ b/salt/suricata/files/classification.config.jinja @@ -0,0 +1,11 @@ +{% import_yaml 'suricata/defaults.yaml' as suricata_defaults with context -%} +{% do salt['defaults.merge'](suricata_defaults.suricata.classification, salt['pillar.get']('suricata:classification', {}), in_place=True) -%} +# +# config classification:shortname,short description,priority +# +{% for sn, details in suricata_defaults.suricata.classification.items() -%} +{% if not details -%} +{% set details = {'description': 'The description is not set', 'priority': '1'} -%} +{% endif -%} +config classification: {{sn}}, {{details.get('description', 'The description is not set')}}, {{details.get('priority', '1')}} +{% endfor -%} diff --git a/salt/suricata/init.sls b/salt/suricata/init.sls index 4c2347302..db09e310b 100644 --- a/salt/suricata/init.sls +++ b/salt/suricata/init.sls @@ -111,6 +111,14 @@ surithresholding: - group: 940 - template: jinja +classification_config: + file.managed: + - name: /opt/so/conf/suricata/classification.config + - source: salt://suricata/files/classification.config.jinja + - user: 940 + - group: 940 + - template: jinja + # BPF compilation and configuration {% if BPF_NIDS %} {% set BPF_CALC = salt['cmd.script']('/usr/sbin/so-bpf-compile', interface + ' ' + BPF_NIDS|join(" "),cwd='/root') %} @@ -148,6 +156,7 @@ so-suricata: - binds: - /opt/so/conf/suricata/suricata.yaml:/etc/suricata/suricata.yaml:ro - /opt/so/conf/suricata/threshold.conf:/etc/suricata/threshold.conf:ro + - /opt/so/conf/suricata/classification.config:/etc/suricata/classification.config:ro - /opt/so/conf/suricata/rules:/etc/suricata/rules:ro - /opt/so/log/suricata/:/var/log/suricata/:rw - /nsm/suricata/:/nsm/:rw @@ -159,10 +168,12 @@ so-suricata: - file: surithresholding - file: /opt/so/conf/suricata/rules/ - file: /opt/so/conf/suricata/bpf + - file: classification_config - require: - file: suriconfig - file: surithresholding - file: suribpf + - file: classification_config {% else %} {# if Suricata isn't enabled, then stop and remove the container #} - force: True diff --git a/setup/automation/eval-cloud-logscan b/setup/automation/eval-cloud-logscan new file mode 100644 index 000000000..564df40f0 --- /dev/null +++ b/setup/automation/eval-cloud-logscan @@ -0,0 +1,77 @@ +#!/bin/bash + +# Copyright 2014-2022 Security Onion Solutions, LLC + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +TESTING=true + +address_type=DHCP +ADMINUSER=onionuser +ADMINPASS1=onionuser +ADMINPASS2=onionuser +ALLOW_CIDR=0.0.0.0/0 +ALLOW_ROLE=a +BASICZEEK=2 +BASICSURI=2 +# BLOGS= +BNICS=eth1 +ZEEKVERSION=ZEEK +# CURCLOSEDAYS= +# EVALADVANCED=BASIC +GRAFANA=1 +# HELIXAPIKEY= +HNMANAGER=10.0.0.0/8,192.168.0.0/16,172.16.0.0/12 +HNSENSOR=inherit +HOSTNAME=eval +install_type=EVAL +LEARN_LOGSCAN_ENABLE=true +# LSINPUTBATCHCOUNT= +# LSINPUTTHREADS= +# LSPIPELINEBATCH= +# LSPIPELINEWORKERS= +MANAGERADV=BASIC +# MDNS= +# MGATEWAY= +# MIP= +# MMASK= +MNIC=eth0 +# MSEARCH= +# MSRV= +# MTU= +NIDS=Suricata +# NODE_ES_HEAP_SIZE= +# NODE_LS_HEAP_SIZE= +NODESETUP=NODEBASIC +NSMSETUP=BASIC +NODEUPDATES=MANAGER +# OINKCODE= +OSQUERY=1 +# PATCHSCHEDULEDAYS= +# PATCHSCHEDULEHOURS= +PATCHSCHEDULENAME=auto +PLAYBOOK=1 +REDIRECTHOST=$(cat /root/public_ip) +REDIRECTINFO=OTHER +RULESETUP=ETOPEN +# SHARDCOUNT= +# SKIP_REBOOT= +SOREMOTEPASS1=onionuser +SOREMOTEPASS2=onionuser +STRELKA=1 +THEHIVE=0 +WAZUH=1 +WEBUSER=onionuser@somewhere.invalid +WEBPASSWD1=0n10nus3r +WEBPASSWD2=0n10nus3r diff --git a/setup/automation/standalone-cloud-suricata b/setup/automation/standalone-cloud-suricata new file mode 100644 index 000000000..e3e21f756 --- /dev/null +++ b/setup/automation/standalone-cloud-suricata @@ -0,0 +1,76 @@ +#!/bin/bash + +# Copyright 2014-2022 Security Onion Solutions, LLC + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +TESTING=true + +address_type=DHCP +ADMINUSER=onionuser +ADMINPASS1=onionuser +ADMINPASS2=onionuser +ALLOW_CIDR=0.0.0.0/0 +ALLOW_ROLE=a +BASICZEEK=2 +BASICSURI=2 +# BLOGS= +BNICS=eth1 +ZEEKVERSION=SURICATA +# CURCLOSEDAYS= +# EVALADVANCED=BASIC +GRAFANA=1 +# HELIXAPIKEY= +HNMANAGER=10.0.0.0/8,192.168.0.0/16,172.16.0.0/12 +HNSENSOR=inherit +HOSTNAME=standalone +install_type=STANDALONE +# LSINPUTBATCHCOUNT= +# LSINPUTTHREADS= +# LSPIPELINEBATCH= +# LSPIPELINEWORKERS= +MANAGERADV=BASIC +# MDNS= +# MGATEWAY= +# MIP= +# MMASK= +MNIC=eth0 +# MSEARCH= +# MSRV= +# MTU= +NIDS=Suricata +# NODE_ES_HEAP_SIZE= +# NODE_LS_HEAP_SIZE= +NODESETUP=NODEBASIC +NSMSETUP=BASIC +NODEUPDATES=MANAGER +# OINKCODE= +OSQUERY=1 +# PATCHSCHEDULEDAYS= +# PATCHSCHEDULEHOURS= +PATCHSCHEDULENAME=auto +PLAYBOOK=1 +REDIRECTHOST=$(cat /root/public_ip) +REDIRECTINFO=OTHER +RULESETUP=ETOPEN +# SHARDCOUNT= +# SKIP_REBOOT= +SOREMOTEPASS1=onionuser +SOREMOTEPASS2=onionuser +STRELKA=1 +THEHIVE=0 +WAZUH=1 +WEBUSER=onionuser@somewhere.invalid +WEBPASSWD1=0n10nus3r +WEBPASSWD2=0n10nus3r diff --git a/setup/so-setup b/setup/so-setup index e06b5ded9..225a01130 100755 --- a/setup/so-setup +++ b/setup/so-setup @@ -1106,9 +1106,9 @@ if [[ $success != 0 ]]; then SO_ERROR=1; fi # Check entire setup log for errors or unexpected salt states and ensure cron jobs are not reporting errors to root's mailbox # Ignore "Status .* was not found" due to output from salt http.query or http.wait_for_successful_query states used with retry # Uncaught exception, closing connection|Exception in callback None - this is seen during influxdb / http.wait_for_successful_query state for ubuntu reinstall -if grep -E "ERROR|Result: False" $setup_log | grep -qvE "Status .* was not found|An exception occurred in this state|Uncaught exception, closing connection|Exception in callback None|deprecation: ERROR" || [[ -s /var/spool/mail/root && "$setup_type" == "iso" ]]; then +if grep -E "ERROR|Result: False" $setup_log | grep -qvE "Status .* was not found|An exception occurred in this state|Uncaught exception, closing connection|Exception in callback None|deprecation: ERROR|code: 100" || [[ -s /var/spool/mail/root && "$setup_type" == "iso" ]]; then SO_ERROR=1 - grep --color=never "ERROR" "$setup_log" | grep -qvE "Status .* was not found|An exception occurred in this state|Uncaught exception, closing connection|Exception in callback None" > "$error_log" + grep --color=never "ERROR" "$setup_log" | grep -qvE "Status .* was not found|An exception occurred in this state|Uncaught exception, closing connection|Exception in callback None|deprecation: ERROR|code: 100" > "$error_log" fi if [[ -n $SO_ERROR ]]; then diff --git a/sigs/securityonion-2.3.130-20220607.iso.sig b/sigs/securityonion-2.3.130-20220607.iso.sig new file mode 100644 index 000000000..e3f97a43a Binary files /dev/null and b/sigs/securityonion-2.3.130-20220607.iso.sig differ