mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-20 16:03:06 +01:00
Merge remote-tracking branch 'origin/2.4/dev' into issue/11390
This commit is contained in:
@@ -80,8 +80,8 @@ function evtx2es() {
|
||||
-e "SHIFTTS=$SHIFTDATE" \
|
||||
-v "$EVTX:/tmp/data.evtx" \
|
||||
-v "/nsm/import/$HASH/evtx/:/tmp/evtx/" \
|
||||
-v "/nsm/import/evtx-end_newest:/tmp/newest" \
|
||||
-v "/nsm/import/evtx-start_oldest:/tmp/oldest" \
|
||||
-v "/nsm/import/$HASH/evtx-end_newest:/tmp/newest" \
|
||||
-v "/nsm/import/$HASH/evtx-start_oldest:/tmp/oldest" \
|
||||
--entrypoint "/evtx_calc_timestamps.sh" \
|
||||
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} >> $LOG_FILE 2>&1
|
||||
}
|
||||
@@ -111,12 +111,6 @@ INVALID_EVTXS_COUNT=0
|
||||
VALID_EVTXS_COUNT=0
|
||||
SKIPPED_EVTXS_COUNT=0
|
||||
|
||||
touch /nsm/import/evtx-start_oldest
|
||||
touch /nsm/import/evtx-end_newest
|
||||
|
||||
echo $START_OLDEST > /nsm/import/evtx-start_oldest
|
||||
echo $END_NEWEST > /nsm/import/evtx-end_newest
|
||||
|
||||
# paths must be quoted in case they include spaces
|
||||
for EVTX in $INPUT_FILES; do
|
||||
EVTX=$(/usr/bin/realpath "$EVTX")
|
||||
@@ -141,8 +135,15 @@ for EVTX in $INPUT_FILES; do
|
||||
status "- this EVTX has already been imported; skipping"
|
||||
SKIPPED_EVTXS_COUNT=$((SKIPPED_EVTXS_COUNT + 1))
|
||||
else
|
||||
# create EVTX directory
|
||||
EVTX_DIR=$HASH_DIR/evtx
|
||||
mkdir -p $EVTX_DIR
|
||||
# create import timestamp files
|
||||
for i in evtx-start_oldest evtx-end_newest; do
|
||||
if ! [ -f "$i" ]; then
|
||||
touch /nsm/import/$HASH/$i
|
||||
fi
|
||||
done
|
||||
|
||||
# import evtx and write them to import ingest pipeline
|
||||
status "- importing logs to Elasticsearch..."
|
||||
@@ -154,8 +155,23 @@ for EVTX in $INPUT_FILES; do
|
||||
VALID_EVTXS_COUNT=$((VALID_EVTXS_COUNT + 1))
|
||||
fi
|
||||
|
||||
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
|
||||
chmod 644 "${EVTX_DIR}"/data.evtx
|
||||
|
||||
fi # end of valid evtx
|
||||
|
||||
# determine start and end and make sure they aren't reversed
|
||||
START=$(cat /nsm/import/$HASH/evtx-start_oldest)
|
||||
END=$(cat /nsm/import/$HASH/evtx-end_newest)
|
||||
START_EPOCH=`date -d "$START" +"%s"`
|
||||
END_EPOCH=`date -d "$END" +"%s"`
|
||||
if [ "$START_EPOCH" -gt "$END_EPOCH" ]; then
|
||||
TEMP=$START
|
||||
START=$END
|
||||
END=$TEMP
|
||||
fi
|
||||
|
||||
# compare $START to $START_OLDEST
|
||||
START=$(cat /nsm/import/evtx-start_oldest)
|
||||
START_COMPARE=$(date -d $START +%s)
|
||||
START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s)
|
||||
if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then
|
||||
@@ -163,7 +179,6 @@ for EVTX in $INPUT_FILES; do
|
||||
fi
|
||||
|
||||
# compare $ENDNEXT to $END_NEWEST
|
||||
END=$(cat /nsm/import/evtx-end_newest)
|
||||
ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"`
|
||||
ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s)
|
||||
END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s)
|
||||
@@ -171,11 +186,6 @@ for EVTX in $INPUT_FILES; do
|
||||
END_NEWEST=$ENDNEXT
|
||||
fi
|
||||
|
||||
cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx
|
||||
chmod 644 "${EVTX_DIR}"/data.evtx
|
||||
|
||||
fi # end of valid evtx
|
||||
|
||||
status
|
||||
|
||||
done # end of for-loop processing evtx files
|
||||
|
||||
@@ -3689,6 +3689,7 @@ elasticsearch:
|
||||
refresh_interval: 30s
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
final_pipeline: ".fleet_final_pipeline-1"
|
||||
composed_of:
|
||||
- agent-mappings
|
||||
- dtc-agent-mappings
|
||||
|
||||
@@ -80,6 +80,7 @@
|
||||
{ "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } },
|
||||
{ "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.dataset", "value": "import" } },
|
||||
{ "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.namespace", "value": "so" } },
|
||||
{ "date": { "if": "ctx.event?.module == 'system'", "field": "event.created", "target_field": "@timestamp", "formats": ["yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'"] } },
|
||||
{ "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } },
|
||||
{ "remove": { "field": [ "message2", "type", "fields", "category", "module", "dataset", "event.dataset_temp", "dataset_tag_temp", "module_temp" ], "ignore_missing": true, "ignore_failure": true } }
|
||||
],
|
||||
|
||||
@@ -20,6 +20,9 @@
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"image_version": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -441,8 +441,8 @@ post_to_2.4.10() {
|
||||
}
|
||||
|
||||
post_to_2.4.20() {
|
||||
echo "Pruning unused volumes"
|
||||
docker volume prune -f
|
||||
echo "Pruning unused docker volumes on all nodes - This process will run in the background."
|
||||
salt --async \* cmd.run "docker volume prune -f"
|
||||
POSTVERSION=2.4.20
|
||||
}
|
||||
|
||||
|
||||
@@ -230,6 +230,19 @@ http {
|
||||
proxy_cookie_path /api/ /influxdb/api/;
|
||||
}
|
||||
|
||||
location /app/dashboards/ {
|
||||
auth_request /auth/sessions/whoami;
|
||||
rewrite /app/dashboards/(.*) /app/dashboards/$1 break;
|
||||
proxy_pass http://{{ GLOBALS.manager }}:5601/app/;
|
||||
proxy_read_timeout 300;
|
||||
proxy_connect_timeout 300;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /kibana/ {
|
||||
auth_request /auth/sessions/whoami;
|
||||
rewrite /kibana/(.*) /$1 break;
|
||||
|
||||
@@ -154,6 +154,12 @@ The analyzer itself will only run when a user in SOC enqueues an analyzer job, s
|
||||
python -m urlhaus '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}'
|
||||
```
|
||||
|
||||
To manually test an analyzer outside of the Sensoroni Docker container, use a command similar to the following:
|
||||
|
||||
```bash
|
||||
PYTHONPATH=. python urlhaus/urlhaus.py '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}'
|
||||
```
|
||||
|
||||
It is up to each analyzer to determine whether the provided input is compatible with that analyzer. This is assisted by the analyzer metadata, as described earlier in this document, with the use of the `supportedTypes` list.
|
||||
|
||||
Once the analyzer completes its functionality, it must terminate promptly. See the following sections for more details on expected internal behavior of the analyzer.
|
||||
|
||||
@@ -2,53 +2,53 @@ sensoroni:
|
||||
enabled:
|
||||
description: Enable or disable Sensoroni.
|
||||
advanced: True
|
||||
helpLink: sensoroni.html
|
||||
helpLink: grid.html
|
||||
config:
|
||||
analyze:
|
||||
enabled:
|
||||
description: Enable or disable the analyzer.
|
||||
advanced: True
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
timeout_ms:
|
||||
description: Timeout period for the analyzer.
|
||||
advanced: True
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
parallel_limit:
|
||||
description: Parallel limit for the analyzer.
|
||||
advanced: True
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
node_checkin_interval_ms:
|
||||
description: Interval in ms to checkin to the soc_host.
|
||||
advanced: True
|
||||
helpLink: sensoroni.html
|
||||
helpLink: grid.html
|
||||
node_description:
|
||||
description: Description of the specific node.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: grid.html
|
||||
node: True
|
||||
forcedType: string
|
||||
sensoronikey:
|
||||
description: Shared key for sensoroni authentication.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: grid.html
|
||||
global: True
|
||||
sensitive: True
|
||||
advanced: True
|
||||
soc_host:
|
||||
description: Host for sensoroni agents to connect to.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: grid.html
|
||||
global: True
|
||||
advanced: True
|
||||
analyzers:
|
||||
emailrep:
|
||||
api_key:
|
||||
description: API key for the EmailRep analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the EmailRep analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -56,21 +56,21 @@ sensoroni:
|
||||
greynoise:
|
||||
api_key:
|
||||
description: API key for the GreyNoise analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
api_version:
|
||||
description: API version for the GreyNoise analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the GreyNoise analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -78,7 +78,7 @@ sensoroni:
|
||||
localfile:
|
||||
file_path:
|
||||
description: File path for the LocalFile analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -86,14 +86,14 @@ sensoroni:
|
||||
otx:
|
||||
api_key:
|
||||
description: API key for the OTX analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the OTX analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -101,14 +101,14 @@ sensoroni:
|
||||
pulsedive:
|
||||
api_key:
|
||||
description: API key for the Pulsedive analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the Pulsedive analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -116,14 +116,14 @@ sensoroni:
|
||||
spamhaus:
|
||||
lookup_host:
|
||||
description: Host to use for lookups.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
forcedType: string
|
||||
nameservers:
|
||||
description: Nameservers used for queries.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -131,35 +131,35 @@ sensoroni:
|
||||
urlscan:
|
||||
api_key:
|
||||
description: API key for the Urlscan analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the Urlscan analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
forcedType: string
|
||||
enabled:
|
||||
description: Analyzer enabled
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
forcedType: bool
|
||||
timeout:
|
||||
description: Timeout for the Urlscan analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
forcedType: int
|
||||
visibility:
|
||||
description: Type of visibility.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
@@ -167,14 +167,14 @@ sensoroni:
|
||||
virustotal:
|
||||
api_key:
|
||||
description: API key for the VirusTotal analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: True
|
||||
advanced: True
|
||||
forcedType: string
|
||||
base_url:
|
||||
description: Base URL for the VirusTotal analyzer.
|
||||
helpLink: sensoroni.html
|
||||
helpLink: cases.html
|
||||
global: False
|
||||
sensitive: False
|
||||
advanced: True
|
||||
|
||||
Reference in New Issue
Block a user