diff --git a/salt/common/tools/sbin_jinja/so-import-evtx b/salt/common/tools/sbin_jinja/so-import-evtx index 59a13612c..d12f34593 100755 --- a/salt/common/tools/sbin_jinja/so-import-evtx +++ b/salt/common/tools/sbin_jinja/so-import-evtx @@ -80,8 +80,8 @@ function evtx2es() { -e "SHIFTTS=$SHIFTDATE" \ -v "$EVTX:/tmp/data.evtx" \ -v "/nsm/import/$HASH/evtx/:/tmp/evtx/" \ - -v "/nsm/import/evtx-end_newest:/tmp/newest" \ - -v "/nsm/import/evtx-start_oldest:/tmp/oldest" \ + -v "/nsm/import/$HASH/evtx-end_newest:/tmp/newest" \ + -v "/nsm/import/$HASH/evtx-start_oldest:/tmp/oldest" \ --entrypoint "/evtx_calc_timestamps.sh" \ {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} >> $LOG_FILE 2>&1 } @@ -111,12 +111,6 @@ INVALID_EVTXS_COUNT=0 VALID_EVTXS_COUNT=0 SKIPPED_EVTXS_COUNT=0 -touch /nsm/import/evtx-start_oldest -touch /nsm/import/evtx-end_newest - -echo $START_OLDEST > /nsm/import/evtx-start_oldest -echo $END_NEWEST > /nsm/import/evtx-end_newest - # paths must be quoted in case they include spaces for EVTX in $INPUT_FILES; do EVTX=$(/usr/bin/realpath "$EVTX") @@ -141,8 +135,15 @@ for EVTX in $INPUT_FILES; do status "- this EVTX has already been imported; skipping" SKIPPED_EVTXS_COUNT=$((SKIPPED_EVTXS_COUNT + 1)) else + # create EVTX directory EVTX_DIR=$HASH_DIR/evtx mkdir -p $EVTX_DIR + # create import timestamp files + for i in evtx-start_oldest evtx-end_newest; do + if ! [ -f "$i" ]; then + touch /nsm/import/$HASH/$i + fi + done # import evtx and write them to import ingest pipeline status "- importing logs to Elasticsearch..." @@ -154,28 +155,37 @@ for EVTX in $INPUT_FILES; do VALID_EVTXS_COUNT=$((VALID_EVTXS_COUNT + 1)) fi - # compare $START to $START_OLDEST - START=$(cat /nsm/import/evtx-start_oldest) - START_COMPARE=$(date -d $START +%s) - START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s) - if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then - START_OLDEST=$START - fi - - # compare $ENDNEXT to $END_NEWEST - END=$(cat /nsm/import/evtx-end_newest) - ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"` - ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s) - END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s) - if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then - END_NEWEST=$ENDNEXT - fi - cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx chmod 644 "${EVTX_DIR}"/data.evtx fi # end of valid evtx + # determine start and end and make sure they aren't reversed + START=$(cat /nsm/import/$HASH/evtx-start_oldest) + END=$(cat /nsm/import/$HASH/evtx-end_newest) + START_EPOCH=`date -d "$START" +"%s"` + END_EPOCH=`date -d "$END" +"%s"` + if [ "$START_EPOCH" -gt "$END_EPOCH" ]; then + TEMP=$START + START=$END + END=$TEMP + fi + + # compare $START to $START_OLDEST + START_COMPARE=$(date -d $START +%s) + START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s) + if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then + START_OLDEST=$START + fi + + # compare $ENDNEXT to $END_NEWEST + ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"` + ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s) + END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s) + if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then + END_NEWEST=$ENDNEXT + fi + status done # end of for-loop processing evtx files diff --git a/salt/elasticsearch/defaults.yaml b/salt/elasticsearch/defaults.yaml index cc2f5e1cd..91e5191f6 100644 --- a/salt/elasticsearch/defaults.yaml +++ b/salt/elasticsearch/defaults.yaml @@ -3689,6 +3689,7 @@ elasticsearch: refresh_interval: 30s number_of_shards: 1 number_of_replicas: 0 + final_pipeline: ".fleet_final_pipeline-1" composed_of: - agent-mappings - dtc-agent-mappings diff --git a/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 b/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 index 688000fb7..52b6bae7a 100644 --- a/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 +++ b/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 @@ -80,6 +80,7 @@ { "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } }, { "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.dataset", "value": "import" } }, { "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.namespace", "value": "so" } }, + { "date": { "if": "ctx.event?.module == 'system'", "field": "event.created", "target_field": "@timestamp", "formats": ["yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'"] } }, { "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } }, { "remove": { "field": [ "message2", "type", "fields", "category", "module", "dataset", "event.dataset_temp", "dataset_tag_temp", "module_temp" ], "ignore_missing": true, "ignore_failure": true } } ], diff --git a/salt/elasticsearch/templates/component/so/so-scan-mappings.json b/salt/elasticsearch/templates/component/so/so-scan-mappings.json index 8ddbe6077..008a6ab10 100644 --- a/salt/elasticsearch/templates/component/so/so-scan-mappings.json +++ b/salt/elasticsearch/templates/component/so/so-scan-mappings.json @@ -20,7 +20,10 @@ "type": "float" } } - } + }, + "image_version": { + "type": "float" + } } }, "elf": { diff --git a/salt/manager/tools/sbin/soup b/salt/manager/tools/sbin/soup index 8ec9f9bad..8c6c2b237 100755 --- a/salt/manager/tools/sbin/soup +++ b/salt/manager/tools/sbin/soup @@ -441,8 +441,8 @@ post_to_2.4.10() { } post_to_2.4.20() { - echo "Pruning unused volumes" - docker volume prune -f + echo "Pruning unused docker volumes on all nodes - This process will run in the background." + salt --async \* cmd.run "docker volume prune -f" POSTVERSION=2.4.20 } diff --git a/salt/nginx/etc/nginx.conf b/salt/nginx/etc/nginx.conf index 05da0b5d8..795663384 100644 --- a/salt/nginx/etc/nginx.conf +++ b/salt/nginx/etc/nginx.conf @@ -230,7 +230,20 @@ http { proxy_cookie_path /api/ /influxdb/api/; } - location /kibana/ { + location /app/dashboards/ { + auth_request /auth/sessions/whoami; + rewrite /app/dashboards/(.*) /app/dashboards/$1 break; + proxy_pass http://{{ GLOBALS.manager }}:5601/app/; + proxy_read_timeout 300; + proxy_connect_timeout 300; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + proxy_set_header X-Forwarded-Proto $scheme; + } + + location /kibana/ { auth_request /auth/sessions/whoami; rewrite /kibana/(.*) /$1 break; proxy_pass http://{{ GLOBALS.manager }}:5601/; diff --git a/salt/sensoroni/files/analyzers/README.md b/salt/sensoroni/files/analyzers/README.md index 8b1f44f29..19335a545 100644 --- a/salt/sensoroni/files/analyzers/README.md +++ b/salt/sensoroni/files/analyzers/README.md @@ -154,6 +154,12 @@ The analyzer itself will only run when a user in SOC enqueues an analyzer job, s python -m urlhaus '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}' ``` +To manually test an analyzer outside of the Sensoroni Docker container, use a command similar to the following: + +```bash +PYTHONPATH=. python urlhaus/urlhaus.py '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}' +``` + It is up to each analyzer to determine whether the provided input is compatible with that analyzer. This is assisted by the analyzer metadata, as described earlier in this document, with the use of the `supportedTypes` list. Once the analyzer completes its functionality, it must terminate promptly. See the following sections for more details on expected internal behavior of the analyzer. diff --git a/salt/sensoroni/soc_sensoroni.yaml b/salt/sensoroni/soc_sensoroni.yaml index eb63dbe25..db51da358 100644 --- a/salt/sensoroni/soc_sensoroni.yaml +++ b/salt/sensoroni/soc_sensoroni.yaml @@ -2,53 +2,53 @@ sensoroni: enabled: description: Enable or disable Sensoroni. advanced: True - helpLink: sensoroni.html + helpLink: grid.html config: analyze: enabled: description: Enable or disable the analyzer. advanced: True - helpLink: sensoroni.html + helpLink: cases.html timeout_ms: description: Timeout period for the analyzer. advanced: True - helpLink: sensoroni.html + helpLink: cases.html parallel_limit: description: Parallel limit for the analyzer. advanced: True - helpLink: sensoroni.html + helpLink: cases.html node_checkin_interval_ms: description: Interval in ms to checkin to the soc_host. advanced: True - helpLink: sensoroni.html + helpLink: grid.html node_description: description: Description of the specific node. - helpLink: sensoroni.html + helpLink: grid.html node: True forcedType: string sensoronikey: description: Shared key for sensoroni authentication. - helpLink: sensoroni.html + helpLink: grid.html global: True sensitive: True advanced: True soc_host: description: Host for sensoroni agents to connect to. - helpLink: sensoroni.html + helpLink: grid.html global: True advanced: True analyzers: emailrep: api_key: description: API key for the EmailRep analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string base_url: description: Base URL for the EmailRep analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -56,21 +56,21 @@ sensoroni: greynoise: api_key: description: API key for the GreyNoise analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string api_version: description: API version for the GreyNoise analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True forcedType: string base_url: description: Base URL for the GreyNoise analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -78,7 +78,7 @@ sensoroni: localfile: file_path: description: File path for the LocalFile analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -86,14 +86,14 @@ sensoroni: otx: api_key: description: API key for the OTX analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string base_url: description: Base URL for the OTX analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -101,14 +101,14 @@ sensoroni: pulsedive: api_key: description: API key for the Pulsedive analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string base_url: description: Base URL for the Pulsedive analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -116,14 +116,14 @@ sensoroni: spamhaus: lookup_host: description: Host to use for lookups. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True forcedType: string nameservers: description: Nameservers used for queries. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -131,35 +131,35 @@ sensoroni: urlscan: api_key: description: API key for the Urlscan analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string base_url: description: Base URL for the Urlscan analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True forcedType: string enabled: description: Analyzer enabled - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True forcedType: bool timeout: description: Timeout for the Urlscan analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True forcedType: int visibility: description: Type of visibility. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True @@ -167,14 +167,14 @@ sensoroni: virustotal: api_key: description: API key for the VirusTotal analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: True advanced: True forcedType: string base_url: description: Base URL for the VirusTotal analyzer. - helpLink: sensoroni.html + helpLink: cases.html global: False sensitive: False advanced: True