mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 09:12:45 +01:00
Merge pull request #14773 from Security-Onion-Solutions/2.4/dev
2.4.160
This commit is contained in:
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
1
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
@@ -28,6 +28,7 @@ body:
|
|||||||
- 2.4.140
|
- 2.4.140
|
||||||
- 2.4.141
|
- 2.4.141
|
||||||
- 2.4.150
|
- 2.4.150
|
||||||
|
- 2.4.160
|
||||||
- Other (please provide detail below)
|
- Other (please provide detail below)
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
### 2.4.150-20250522 ISO image released on 2025/05/22
|
### 2.4.160-20250625 ISO image released on 2025/06/25
|
||||||
|
|
||||||
|
|
||||||
### Download and Verify
|
### Download and Verify
|
||||||
|
|
||||||
2.4.150-20250522 ISO image:
|
2.4.160-20250625 ISO image:
|
||||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.150-20250522.iso
|
https://download.securityonion.net/file/securityonion/securityonion-2.4.160-20250625.iso
|
||||||
|
|
||||||
MD5: 239E69B83072BBF2602D4043FE53A160
|
MD5: 78CF5602EFFAB84174C56AD2826E6E4E
|
||||||
SHA1: C62893D3C7F5592665BFDCBC9A45BB20A926F9A8
|
SHA1: FC7EEC3EC95D97D3337501BAA7CA8CAE7C0E15EA
|
||||||
SHA256: 2ADE037C7FD34591030B1FAC10392C4E6613F152DD24BFBD897E57EE300895B9
|
SHA256: 0ED965E8BEC80EE16AE90A0F0F96A3046CEF2D92720A587278DDDE3B656C01C2
|
||||||
|
|
||||||
Signature for ISO image:
|
Signature for ISO image:
|
||||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.150-20250522.iso.sig
|
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.160-20250625.iso.sig
|
||||||
|
|
||||||
Signing key:
|
Signing key:
|
||||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
||||||
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
|
|||||||
|
|
||||||
Download the signature file for the ISO:
|
Download the signature file for the ISO:
|
||||||
```
|
```
|
||||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.150-20250522.iso.sig
|
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.160-20250625.iso.sig
|
||||||
```
|
```
|
||||||
|
|
||||||
Download the ISO image:
|
Download the ISO image:
|
||||||
```
|
```
|
||||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.150-20250522.iso
|
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.160-20250625.iso
|
||||||
```
|
```
|
||||||
|
|
||||||
Verify the downloaded ISO image using the signature file:
|
Verify the downloaded ISO image using the signature file:
|
||||||
```
|
```
|
||||||
gpg --verify securityonion-2.4.150-20250522.iso.sig securityonion-2.4.150-20250522.iso
|
gpg --verify securityonion-2.4.160-20250625.iso.sig securityonion-2.4.160-20250625.iso
|
||||||
```
|
```
|
||||||
|
|
||||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||||
```
|
```
|
||||||
gpg: Signature made Thu 22 May 2025 11:15:06 AM EDT using RSA key ID FE507013
|
gpg: Signature made Wed 25 Jun 2025 10:13:33 AM EDT using RSA key ID FE507013
|
||||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||||
gpg: WARNING: This key is not certified with a trusted signature!
|
gpg: WARNING: This key is not certified with a trusted signature!
|
||||||
gpg: There is no indication that the signature belongs to the owner.
|
gpg: There is no indication that the signature belongs to the owner.
|
||||||
|
|||||||
@@ -99,6 +99,17 @@ add_interface_bond0() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
airgap_playbooks() {
|
||||||
|
SRC_DIR=$1
|
||||||
|
# Copy playbooks if using airgap
|
||||||
|
mkdir -p /nsm/airgap-resources
|
||||||
|
# Purge old airgap playbooks to ensure SO only uses the latest released playbooks
|
||||||
|
rm -fr /nsm/airgap-resources/playbooks
|
||||||
|
tar xf $SRC_DIR/airgap-resources/playbooks.tgz -C /nsm/airgap-resources/
|
||||||
|
chown -R socore:socore /nsm/airgap-resources/playbooks
|
||||||
|
git config --global --add safe.directory /nsm/airgap-resources/playbooks
|
||||||
|
}
|
||||||
|
|
||||||
check_container() {
|
check_container() {
|
||||||
docker ps | grep "$1:" > /dev/null 2>&1
|
docker ps | grep "$1:" > /dev/null 2>&1
|
||||||
return $?
|
return $?
|
||||||
@@ -299,7 +310,8 @@ fail() {
|
|||||||
|
|
||||||
get_agent_count() {
|
get_agent_count() {
|
||||||
if [ -f /opt/so/log/agents/agentstatus.log ]; then
|
if [ -f /opt/so/log/agents/agentstatus.log ]; then
|
||||||
AGENTCOUNT=$(cat /opt/so/log/agents/agentstatus.log | grep -wF active | awk '{print $2}')
|
AGENTCOUNT=$(cat /opt/so/log/agents/agentstatus.log | grep -wF active | awk '{print $2}' | sed 's/,//')
|
||||||
|
[[ -z "$AGENTCOUNT" ]] && AGENTCOUNT="0"
|
||||||
else
|
else
|
||||||
AGENTCOUNT=0
|
AGENTCOUNT=0
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -45,7 +45,7 @@ def check_for_fps():
|
|||||||
result = subprocess.run([feat_full + '-mode-setup', '--is-enabled'], stdout=subprocess.PIPE)
|
result = subprocess.run([feat_full + '-mode-setup', '--is-enabled'], stdout=subprocess.PIPE)
|
||||||
if result.returncode == 0:
|
if result.returncode == 0:
|
||||||
fps = 1
|
fps = 1
|
||||||
except FileNotFoundError:
|
except:
|
||||||
fn = '/proc/sys/crypto/' + feat_full + '_enabled'
|
fn = '/proc/sys/crypto/' + feat_full + '_enabled'
|
||||||
try:
|
try:
|
||||||
with open(fn, 'r') as f:
|
with open(fn, 'r') as f:
|
||||||
|
|||||||
@@ -19,11 +19,12 @@
|
|||||||
{ "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } },
|
{ "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } },
|
||||||
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.dataset", "value": "import" } },
|
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.dataset", "value": "import" } },
|
||||||
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.namespace", "value": "so" } },
|
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.namespace", "value": "so" } },
|
||||||
{ "date": { "if": "ctx.event?.module == 'system'", "field": "event.created", "target_field": "@timestamp","ignore_failure": true, "formats": ["yyyy-MM-dd'T'HH:mm:ss.SSSX","yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'"] } },
|
|
||||||
{ "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } },
|
{ "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } },
|
||||||
{ "set": { "if": "ctx.event?.module == 'fim'", "override": true, "field": "event.module", "value": "file_integrity" } },
|
{ "set": { "if": "ctx.event?.module == 'fim'", "override": true, "field": "event.module", "value": "file_integrity" } },
|
||||||
{ "rename": { "if": "ctx.winlog?.provider_name == 'Microsoft-Windows-Windows Defender'", "ignore_missing": true, "field": "winlog.event_data.Threat Name", "target_field": "winlog.event_data.threat_name" } },
|
{ "rename": { "if": "ctx.winlog?.provider_name == 'Microsoft-Windows-Windows Defender'", "ignore_missing": true, "field": "winlog.event_data.Threat Name", "target_field": "winlog.event_data.threat_name" } },
|
||||||
{ "set": { "if": "ctx?.metadata?.kafka != null" , "field": "kafka.id", "value": "{{metadata.kafka.partition}}{{metadata.kafka.offset}}{{metadata.kafka.timestamp}}", "ignore_failure": true } },
|
{ "set": { "if": "ctx?.metadata?.kafka != null" , "field": "kafka.id", "value": "{{metadata.kafka.partition}}{{metadata.kafka.offset}}{{metadata.kafka.timestamp}}", "ignore_failure": true } },
|
||||||
|
{"append": {"field":"related.ip","value":["{{source.ip}}","{{destination.ip}}"],"allow_duplicates":false,"if":"ctx?.event?.dataset == 'endpoint.events.network' && ctx?.source?.ip != null","ignore_failure":true}},
|
||||||
|
{"foreach": {"field":"host.ip","processor":{"append":{"field":"related.ip","value":"{{_ingest._value}}","allow_duplicates":false}},"if":"ctx?.event?.module == 'endpoint'","description":"Extract IPs from Elastic Agent events (host.ip) and adds them to related.ip"}},
|
||||||
{ "remove": { "field": [ "message2", "type", "fields", "category", "module", "dataset", "event.dataset_temp", "dataset_tag_temp", "module_temp", "datastream_dataset_temp" ], "ignore_missing": true, "ignore_failure": true } }
|
{ "remove": { "field": [ "message2", "type", "fields", "category", "module", "dataset", "event.dataset_temp", "dataset_tag_temp", "module_temp", "datastream_dataset_temp" ], "ignore_missing": true, "ignore_failure": true } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,11 +0,0 @@
|
|||||||
{
|
|
||||||
"description" : "import.wel",
|
|
||||||
"processors" : [
|
|
||||||
{ "set": { "field": "event.ingested", "value": "{{ @timestamp }}" } },
|
|
||||||
{ "set" : { "field" : "@timestamp", "value" : "{{ event.created }}" } },
|
|
||||||
{ "remove": { "field": [ "event_record_id", "event.created" , "timestamp" , "winlog.event_data.UtcTime" ], "ignore_failure": true } },
|
|
||||||
{ "pipeline": { "if": "ctx.winlog?.channel == 'Microsoft-Windows-Sysmon/Operational'", "name": "sysmon" } },
|
|
||||||
{ "pipeline": { "if": "ctx.winlog?.channel != 'Microsoft-Windows-Sysmon/Operational'", "name":"win.eventlogs" } },
|
|
||||||
{ "pipeline": { "name": "common" } }
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -9,6 +9,7 @@
|
|||||||
{ "rename":{ "field": "rule.signature_id", "target_field": "rule.uuid", "ignore_failure": true } },
|
{ "rename":{ "field": "rule.signature_id", "target_field": "rule.uuid", "ignore_failure": true } },
|
||||||
{ "rename":{ "field": "rule.signature_id", "target_field": "rule.signature", "ignore_failure": true } },
|
{ "rename":{ "field": "rule.signature_id", "target_field": "rule.signature", "ignore_failure": true } },
|
||||||
{ "rename":{ "field": "message2.payload_printable", "target_field": "network.data.decoded", "ignore_failure": true } },
|
{ "rename":{ "field": "message2.payload_printable", "target_field": "network.data.decoded", "ignore_failure": true } },
|
||||||
|
{ "dissect": { "field": "rule.rule", "pattern": "%{?prefix}content:\"%{dns.query_name}\"%{?remainder}", "ignore_missing": true, "ignore_failure": true } },
|
||||||
{ "pipeline": { "name": "common.nids" } }
|
{ "pipeline": { "name": "common.nids" } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -18,6 +18,13 @@
|
|||||||
{ "set": { "field": "event.ingested", "value": "{{@timestamp}}" } },
|
{ "set": { "field": "event.ingested", "value": "{{@timestamp}}" } },
|
||||||
{ "date": { "field": "message2.timestamp", "target_field": "@timestamp", "formats": ["ISO8601", "UNIX"], "timezone": "UTC", "ignore_failure": true } },
|
{ "date": { "field": "message2.timestamp", "target_field": "@timestamp", "formats": ["ISO8601", "UNIX"], "timezone": "UTC", "ignore_failure": true } },
|
||||||
{ "remove":{ "field": "agent", "ignore_failure": true } },
|
{ "remove":{ "field": "agent", "ignore_failure": true } },
|
||||||
|
{"append":{"field":"related.ip","value":["{{source.ip}}","{{destination.ip}}"],"allow_duplicates":false,"ignore_failure":true}},
|
||||||
|
{
|
||||||
|
"script": {
|
||||||
|
"source": "boolean isPrivate(def ip) { if (ip == null) return false; int dot1 = ip.indexOf('.'); if (dot1 == -1) return false; int dot2 = ip.indexOf('.', dot1 + 1); if (dot2 == -1) return false; int first = Integer.parseInt(ip.substring(0, dot1)); if (first == 10) return true; if (first == 192 && ip.startsWith('168.', dot1 + 1)) return true; if (first == 172) { int second = Integer.parseInt(ip.substring(dot1 + 1, dot2)); return second >= 16 && second <= 31; } return false; } String[] fields = new String[] {\"source\", \"destination\"}; for (int i = 0; i < fields.length; i++) { def field = fields[i]; def ip = ctx[field]?.ip; if (ip != null) { if (ctx.network == null) ctx.network = new HashMap(); if (isPrivate(ip)) { if (ctx.network.private_ip == null) ctx.network.private_ip = new ArrayList(); if (!ctx.network.private_ip.contains(ip)) ctx.network.private_ip.add(ip); } else { if (ctx.network.public_ip == null) ctx.network.public_ip = new ArrayList(); if (!ctx.network.public_ip.contains(ip)) ctx.network.public_ip.add(ip); } } }",
|
||||||
|
"ignore_failure": false
|
||||||
|
}
|
||||||
|
},
|
||||||
{ "pipeline": { "if": "ctx?.event?.dataset != null", "name": "suricata.{{event.dataset}}" } }
|
{ "pipeline": { "if": "ctx?.event?.dataset != null", "name": "suricata.{{event.dataset}}" } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -12,7 +12,8 @@
|
|||||||
{ "rename": { "field": "message2.id.orig_p", "target_field": "source.port", "ignore_missing": true } },
|
{ "rename": { "field": "message2.id.orig_p", "target_field": "source.port", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.id.resp_h", "target_field": "destination.ip", "ignore_missing": true } },
|
{ "rename": { "field": "message2.id.resp_h", "target_field": "destination.ip", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.id.resp_p", "target_field": "destination.port", "ignore_missing": true } },
|
{ "rename": { "field": "message2.id.resp_p", "target_field": "destination.port", "ignore_missing": true } },
|
||||||
{ "community_id": {} },
|
{ "rename": { "field": "message2.community_id", "target_field": "network.community_id", "ignore_missing": true } },
|
||||||
|
{ "community_id": { "if": "ctx.network?.community_id == null" } },
|
||||||
{ "set": { "if": "ctx.source?.ip != null", "field": "client.ip", "value": "{{source.ip}}" } },
|
{ "set": { "if": "ctx.source?.ip != null", "field": "client.ip", "value": "{{source.ip}}" } },
|
||||||
{ "set": { "if": "ctx.source?.port != null", "field": "client.port", "value": "{{source.port}}" } },
|
{ "set": { "if": "ctx.source?.port != null", "field": "client.port", "value": "{{source.port}}" } },
|
||||||
{ "set": { "if": "ctx.destination?.ip != null", "field": "server.ip", "value": "{{destination.ip}}" } },
|
{ "set": { "if": "ctx.destination?.ip != null", "field": "server.ip", "value": "{{destination.ip}}" } },
|
||||||
|
|||||||
@@ -20,7 +20,8 @@
|
|||||||
{ "rename": { "field": "message2.RD", "target_field": "dns.recursion.desired", "ignore_missing": true } },
|
{ "rename": { "field": "message2.RD", "target_field": "dns.recursion.desired", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.RA", "target_field": "dns.recursion.available", "ignore_missing": true } },
|
{ "rename": { "field": "message2.RA", "target_field": "dns.recursion.available", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.Z", "target_field": "dns.reserved", "ignore_missing": true } },
|
{ "rename": { "field": "message2.Z", "target_field": "dns.reserved", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.answers", "target_field": "dns.answers.name", "ignore_missing": true } },
|
{ "rename": { "field": "message2.answers", "target_field": "dns.answers.name", "ignore_missing": true } },
|
||||||
|
{ "script": { "lang": "painless", "if": "ctx.dns != null && ctx.dns.answers != null && ctx.dns.answers.name != null", "source": "def ips = []; for (item in ctx.dns.answers.name) { if (item =~ /^(?:[0-9]{1,3}\\.){3}[0-9]{1,3}$/ || item =~ /^([a-fA-F0-9:]+:+)+[a-fA-F0-9]+$/) { ips.add(item); } } ctx.dns.resolved_ip = ips;" } },
|
||||||
{ "rename": { "field": "message2.TTLs", "target_field": "dns.ttls", "ignore_missing": true } },
|
{ "rename": { "field": "message2.TTLs", "target_field": "dns.ttls", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.rejected", "target_field": "dns.query.rejected", "ignore_missing": true } },
|
{ "rename": { "field": "message2.rejected", "target_field": "dns.query.rejected", "ignore_missing": true } },
|
||||||
{ "script": { "lang": "painless", "source": "ctx.dns.query.length = ctx.dns.query.name.length()", "ignore_failure": true } },
|
{ "script": { "lang": "painless", "source": "ctx.dns.query.length = ctx.dns.query.name.length()", "ignore_failure": true } },
|
||||||
@@ -28,4 +29,4 @@
|
|||||||
{ "pipeline": { "if": "ctx.dns?.query?.name != null && ctx.dns.query.name.contains('.')", "name": "dns.tld" } },
|
{ "pipeline": { "if": "ctx.dns?.query?.name != null && ctx.dns.query.name.contains('.')", "name": "dns.tld" } },
|
||||||
{ "pipeline": { "name": "zeek.common" } }
|
{ "pipeline": { "name": "zeek.common" } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -12,7 +12,7 @@ elasticsearch:
|
|||||||
description: Specify the memory heap size in (m)egabytes for Elasticsearch.
|
description: Specify the memory heap size in (m)egabytes for Elasticsearch.
|
||||||
helpLink: elasticsearch.html
|
helpLink: elasticsearch.html
|
||||||
index_clean:
|
index_clean:
|
||||||
description: Determines if indices should be considered for deletion by available disk space in the cluster. Otherwise, indices will only be deleted by the age defined in the ILM settings.
|
description: Determines if indices should be considered for deletion by available disk space in the cluster. Otherwise, indices will only be deleted by the age defined in the ILM settings. This setting only applies to EVAL, STANDALONE, and HEAVY NODE installations. Other installations can only use ILM settings.
|
||||||
forcedType: bool
|
forcedType: bool
|
||||||
helpLink: elasticsearch.html
|
helpLink: elasticsearch.html
|
||||||
retention:
|
retention:
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
/bin/bash
|
#!/bin/bash
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||||
@@ -6,6 +6,6 @@
|
|||||||
|
|
||||||
. /usr/sbin/so-common
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
|
||||||
echo "Starting ILM..."
|
echo "Starting ILM..."
|
||||||
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L -X POST https://localhost:9200/_ilm/start
|
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L -X POST https://localhost:9200/_ilm/start
|
||||||
|
echo
|
||||||
|
|||||||
@@ -8,3 +8,4 @@
|
|||||||
|
|
||||||
echo "Stopping ILM..."
|
echo "Stopping ILM..."
|
||||||
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L -X POST https://localhost:9200/_ilm/stop
|
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L -X POST https://localhost:9200/_ilm/stop
|
||||||
|
echo
|
||||||
|
|||||||
113
salt/elasticsearch/tools/sbin/so-elasticsearch-indices-growth
Normal file
113
salt/elasticsearch/tools/sbin/so-elasticsearch-indices-growth
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||||
|
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||||
|
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||||
|
# Elastic License 2.0.
|
||||||
|
|
||||||
|
INFLUX_URL="https://localhost:8086/api/v2"
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
request() {
|
||||||
|
curl -skK /opt/so/conf/influxdb/curl.config "$INFLUX_URL/$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
lookup_org_id() {
|
||||||
|
response=$(request orgs?org=Security+Onion)
|
||||||
|
echo "$response" | jq -r ".orgs[] | select(.name == \"Security Onion\").id"
|
||||||
|
}
|
||||||
|
|
||||||
|
ORG_ID=$(lookup_org_id)
|
||||||
|
|
||||||
|
run_flux_query() {
|
||||||
|
local query=$1
|
||||||
|
request "query?org=$ORG_ID" -H 'Accept:application/csv' -H 'Content-type:application/vnd.flux' -d "$query" -XPOST 2>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
read_csv_result() {
|
||||||
|
local result="$1"
|
||||||
|
echo "$result" | grep '^,_result,' | head -1 | awk -F',' '{print $NF}' | tr -d '\r\n\t '
|
||||||
|
}
|
||||||
|
|
||||||
|
bytes_to_gb() {
|
||||||
|
local bytes="${1:-0}"
|
||||||
|
if [[ "$bytes" =~ ^-?[0-9]+$ ]]; then
|
||||||
|
echo "$bytes" | awk '{printf "%.2f", $1 / 1024 / 1024 / 1024}'
|
||||||
|
else
|
||||||
|
echo "0.00"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
indexes_query='from(bucket: "telegraf/so_long_term")
|
||||||
|
|> range(start: -7d)
|
||||||
|
|> filter(fn: (r) => r._measurement == "elasticsearch_index_size")
|
||||||
|
|> distinct(column: "_field")
|
||||||
|
|> keep(columns: ["_field"])'
|
||||||
|
|
||||||
|
indexes_result=$(run_flux_query "$indexes_query")
|
||||||
|
indexes=$(echo "$indexes_result" | tail -n +2 | cut -d',' -f4 | grep -v '^$' | grep -v '^_field$' | sed 's/\r$//' | sort -u)
|
||||||
|
|
||||||
|
printf "%-50s %15s %15s %15s\n" "Index Name" "Last 24hr (GB)" "Last 7d (GB)" "Last 30d (GB)"
|
||||||
|
printf "%-50s %15s %15s %15s\n" "$(printf '%.0s-' {1..50})" "$(printf '%.0s-' {1..15})" "$(printf '%.0s-' {1..15})" "$(printf '%.0s-' {1..15})"
|
||||||
|
|
||||||
|
for index in $indexes; do
|
||||||
|
[[ -z "$index" ]] && continue
|
||||||
|
current_query="from(bucket: \"telegraf/so_long_term\")
|
||||||
|
|> range(start: -4h)
|
||||||
|
|> filter(fn: (r) => r._measurement == \"elasticsearch_index_size\" and r._field == \"$index\")
|
||||||
|
|> last()
|
||||||
|
|> keep(columns: [\"_value\"])"
|
||||||
|
current_result=$(run_flux_query "$current_query")
|
||||||
|
current_size=$(read_csv_result "$current_result")
|
||||||
|
current_size=${current_size:-0}
|
||||||
|
|
||||||
|
size_24h_query="from(bucket: \"telegraf/so_long_term\")
|
||||||
|
|> range(start: -25h, stop: -23h)
|
||||||
|
|> filter(fn: (r) => r._measurement == \"elasticsearch_index_size\" and r._field == \"$index\")
|
||||||
|
|> last()
|
||||||
|
|> keep(columns: [\"_value\"])"
|
||||||
|
size_24h_result=$(run_flux_query "$size_24h_query")
|
||||||
|
size_24h_ago=$(read_csv_result "$size_24h_result")
|
||||||
|
size_24h_ago=${size_24h_ago:-$current_size}
|
||||||
|
|
||||||
|
size_7d_query="from(bucket: \"telegraf/so_long_term\")
|
||||||
|
|> range(start: -7d8h, stop: -7d)
|
||||||
|
|> filter(fn: (r) => r._measurement == \"elasticsearch_index_size\" and r._field == \"$index\")
|
||||||
|
|> last()
|
||||||
|
|> keep(columns: [\"_value\"])"
|
||||||
|
size_7d_result=$(run_flux_query "$size_7d_query")
|
||||||
|
size_7d_ago=$(read_csv_result "$size_7d_result")
|
||||||
|
size_7d_ago=${size_7d_ago:-$current_size}
|
||||||
|
|
||||||
|
size_30d_query="from(bucket: \"telegraf/so_long_term\")
|
||||||
|
|> range(start: -30d8h, stop: -30d)
|
||||||
|
|> filter(fn: (r) => r._measurement == \"elasticsearch_index_size\" and r._field == \"$index\")
|
||||||
|
|> last()
|
||||||
|
|> keep(columns: [\"_value\"])"
|
||||||
|
size_30d_result=$(run_flux_query "$size_30d_query")
|
||||||
|
size_30d_ago=$(read_csv_result "$size_30d_result")
|
||||||
|
size_30d_ago=${size_30d_ago:-$current_size}
|
||||||
|
|
||||||
|
# if an index was recently cleaned up by ilm it will result in a negative number for 'index growth'.
|
||||||
|
growth_24h=$(( current_size > size_24h_ago ? current_size - size_24h_ago : 0 ))
|
||||||
|
|
||||||
|
growth_7d=$(( current_size > size_7d_ago ? current_size - size_7d_ago : 0 ))
|
||||||
|
|
||||||
|
growth_30d=$(( current_size > size_30d_ago ? current_size - size_30d_ago : 0 ))
|
||||||
|
|
||||||
|
growth_24h_gb=$(bytes_to_gb "$growth_24h")
|
||||||
|
growth_7d_gb=$(bytes_to_gb "$growth_7d")
|
||||||
|
growth_30d_gb=$(bytes_to_gb "$growth_30d")
|
||||||
|
|
||||||
|
# Only results for indices with atleast 1 metric above 0.00
|
||||||
|
if [[ "$growth_24h_gb" != "0.00" ]] || [[ "$growth_7d_gb" != "0.00" ]] || [[ "$growth_30d_gb" != "0.00" ]]; then
|
||||||
|
printf "%020.2f|%-50s %15s %15s %15s\n" \
|
||||||
|
"$growth_24h" \
|
||||||
|
"$index" \
|
||||||
|
"$growth_24h_gb" \
|
||||||
|
"$growth_7d_gb" \
|
||||||
|
"$growth_30d_gb"
|
||||||
|
fi
|
||||||
|
done | sort -t'|' -k1,1nr | cut -d'|' -f2-
|
||||||
|
|
||||||
@@ -1,26 +1,26 @@
|
|||||||
# Extract all PDF mime type
|
# Extract all PDF mime type
|
||||||
alert http any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; sid:1100000; rev:1;)
|
alert http any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; noalert; sid:1100000; rev:1;)
|
||||||
alert smtp any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; sid:1100001; rev:1;)
|
alert smtp any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; noalert; sid:1100001; rev:1;)
|
||||||
alert nfs any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; sid:1100002; rev:1;)
|
alert nfs any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; noalert; sid:1100002; rev:1;)
|
||||||
alert smb any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; sid:1100003; rev:1;)
|
alert smb any any -> any any (msg:"FILE pdf detected"; filemagic:"PDF document"; filestore; noalert; sid:1100003; rev:1;)
|
||||||
# Extract EXE/DLL file types
|
# Extract EXE/DLL file types
|
||||||
alert http any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; sid:1100004; rev:1;)
|
alert http any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; noalert; sid:1100004; rev:1;)
|
||||||
alert smtp any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; sid:1100005; rev:1;)
|
alert smtp any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; noalert; sid:1100005; rev:1;)
|
||||||
alert nfs any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; sid:1100006; rev:1;)
|
alert nfs any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; noalert; sid:1100006; rev:1;)
|
||||||
alert smb any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; sid:1100007; rev:1;)
|
alert smb any any -> any any (msg:"FILE EXE detected"; filemagic:"PE32 executable"; filestore; noalert; sid:1100007; rev:1;)
|
||||||
alert http any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; sid:1100008; rev:1;)
|
alert http any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; noalert; sid:1100008; rev:1;)
|
||||||
alert smtp any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; sid:1100009; rev:1;)
|
alert smtp any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; noalert; sid:1100009; rev:1;)
|
||||||
alert nfs any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; sid:1100010; rev:1;)
|
alert nfs any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; noalert; sid:1100010; rev:1;)
|
||||||
alert smb any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; sid:1100011; rev:1;)
|
alert smb any any -> any any (msg:"FILE EXE detected"; filemagic:"MS-DOS executable"; filestore; noalert; sid:1100011; rev:1;)
|
||||||
|
|
||||||
# Extract all Zip files
|
# Extract all Zip files
|
||||||
alert http any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; sid:1100012; rev:1;)
|
alert http any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; noalert; sid:1100012; rev:1;)
|
||||||
alert smtp any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; sid:1100013; rev:1;)
|
alert smtp any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; noalert; sid:1100013; rev:1;)
|
||||||
alert nfs any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; sid:1100014; rev:1;)
|
alert nfs any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; noalert; sid:1100014; rev:1;)
|
||||||
alert smb any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; sid:1100015; rev:1;)
|
alert smb any any -> any any (msg:"FILE ZIP detected"; filemagic:"Zip"; filestore; noalert; sid:1100015; rev:1;)
|
||||||
|
|
||||||
# Extract Word Docs
|
# Extract Word Docs
|
||||||
alert http any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; sid:1100016; rev:1;)
|
alert http any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; noalert; sid:1100016; rev:1;)
|
||||||
alert smtp any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; sid:1100017; rev:1;)
|
alert smtp any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; noalert; sid:1100017; rev:1;)
|
||||||
alert nfs any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; sid:1100018; rev:1;)
|
alert nfs any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; noalert; sid:1100018; rev:1;)
|
||||||
alert smb any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; sid:1100019; rev:1;)
|
alert smb any any -> any any (msg:"FILE WORDDOC detected"; filemagic:"Composite Document File V2 Document"; filestore; noalert; sid:1100019; rev:1;)
|
||||||
@@ -156,6 +156,13 @@ rules_dir:
|
|||||||
- group: socore
|
- group: socore
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
|
|
||||||
|
nsm_playbooks_dir:
|
||||||
|
file.directory:
|
||||||
|
- name: /nsm/airgap-resources/playbooks
|
||||||
|
- user: socore
|
||||||
|
- group: socore
|
||||||
|
- makedirs: True
|
||||||
|
|
||||||
git_config_set_safe_dirs:
|
git_config_set_safe_dirs:
|
||||||
git.config_set:
|
git.config_set:
|
||||||
- name: safe.directory
|
- name: safe.directory
|
||||||
@@ -166,6 +173,8 @@ git_config_set_safe_dirs:
|
|||||||
- /nsm/rules/custom-local-repos/local-yara
|
- /nsm/rules/custom-local-repos/local-yara
|
||||||
- /nsm/securityonion-resources
|
- /nsm/securityonion-resources
|
||||||
- /opt/so/conf/soc/ai_summary_repos/securityonion-resources
|
- /opt/so/conf/soc/ai_summary_repos/securityonion-resources
|
||||||
|
- /nsm/airgap-resources/playbooks
|
||||||
|
- /opt/so/conf/soc/playbooks
|
||||||
{% else %}
|
{% else %}
|
||||||
|
|
||||||
{{sls}}_state_not_allowed:
|
{{sls}}_state_not_allowed:
|
||||||
|
|||||||
@@ -417,6 +417,7 @@ preupgrade_changes() {
|
|||||||
[[ "$INSTALLEDVERSION" == 2.4.130 ]] && up_to_2.4.140
|
[[ "$INSTALLEDVERSION" == 2.4.130 ]] && up_to_2.4.140
|
||||||
[[ "$INSTALLEDVERSION" == 2.4.140 ]] && up_to_2.4.141
|
[[ "$INSTALLEDVERSION" == 2.4.140 ]] && up_to_2.4.141
|
||||||
[[ "$INSTALLEDVERSION" == 2.4.141 ]] && up_to_2.4.150
|
[[ "$INSTALLEDVERSION" == 2.4.141 ]] && up_to_2.4.150
|
||||||
|
[[ "$INSTALLEDVERSION" == 2.4.150 ]] && up_to_2.4.160
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -444,6 +445,7 @@ postupgrade_changes() {
|
|||||||
[[ "$POSTVERSION" == 2.4.130 ]] && post_to_2.4.140
|
[[ "$POSTVERSION" == 2.4.130 ]] && post_to_2.4.140
|
||||||
[[ "$POSTVERSION" == 2.4.140 ]] && post_to_2.4.141
|
[[ "$POSTVERSION" == 2.4.140 ]] && post_to_2.4.141
|
||||||
[[ "$POSTVERSION" == 2.4.141 ]] && post_to_2.4.150
|
[[ "$POSTVERSION" == 2.4.141 ]] && post_to_2.4.150
|
||||||
|
[[ "$POSTVERSION" == 2.4.150 ]] && post_to_2.4.160
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -579,6 +581,11 @@ post_to_2.4.150() {
|
|||||||
POSTVERSION=2.4.150
|
POSTVERSION=2.4.150
|
||||||
}
|
}
|
||||||
|
|
||||||
|
post_to_2.4.160() {
|
||||||
|
echo "Nothing to apply"
|
||||||
|
POSTVERSION=2.4.160
|
||||||
|
}
|
||||||
|
|
||||||
repo_sync() {
|
repo_sync() {
|
||||||
echo "Sync the local repo."
|
echo "Sync the local repo."
|
||||||
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
||||||
@@ -816,6 +823,12 @@ up_to_2.4.150() {
|
|||||||
INSTALLEDVERSION=2.4.150
|
INSTALLEDVERSION=2.4.150
|
||||||
}
|
}
|
||||||
|
|
||||||
|
up_to_2.4.160() {
|
||||||
|
echo "Nothing to do for 2.4.160"
|
||||||
|
|
||||||
|
INSTALLEDVERSION=2.4.160
|
||||||
|
}
|
||||||
|
|
||||||
add_hydra_pillars() {
|
add_hydra_pillars() {
|
||||||
mkdir -p /opt/so/saltstack/local/pillar/hydra
|
mkdir -p /opt/so/saltstack/local/pillar/hydra
|
||||||
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
|
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
|
||||||
@@ -1405,6 +1418,8 @@ main() {
|
|||||||
if [[ $is_airgap -eq 0 ]]; then
|
if [[ $is_airgap -eq 0 ]]; then
|
||||||
echo "Updating Rule Files to the Latest."
|
echo "Updating Rule Files to the Latest."
|
||||||
update_airgap_rules
|
update_airgap_rules
|
||||||
|
echo "Updating Playbooks to the Latest."
|
||||||
|
airgap_playbooks "$UPDATE_DIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# since we don't run the backup.config_backup state on import we wont snapshot previous version states and pillars
|
# since we don't run the backup.config_backup state on import we wont snapshot previous version states and pillars
|
||||||
|
|||||||
@@ -383,6 +383,7 @@ http {
|
|||||||
}
|
}
|
||||||
if ($request_uri ~* ^/(?!(^/api/.*))) {
|
if ($request_uri ~* ^/(?!(^/api/.*))) {
|
||||||
add_header Set-Cookie "AUTH_REDIRECT=$request_uri;Path=/;Max-Age=14400";
|
add_header Set-Cookie "AUTH_REDIRECT=$request_uri;Path=/;Max-Age=14400";
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
|
||||||
}
|
}
|
||||||
return 302 /auth/self-service/login/browser;
|
return 302 /auth/self-service/login/browser;
|
||||||
}
|
}
|
||||||
@@ -392,6 +393,7 @@ http {
|
|||||||
return 403;
|
return 403;
|
||||||
}
|
}
|
||||||
add_header Set-Cookie "ory_kratos_session=;Path=/;Max-Age=0;expires=Thu, 01 Jan 1970 00:00:00 GMT;";
|
add_header Set-Cookie "ory_kratos_session=;Path=/;Max-Age=0;expires=Thu, 01 Jan 1970 00:00:00 GMT;";
|
||||||
|
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
|
||||||
return 302 /auth/self-service/login/browser;
|
return 302 /auth/self-service/login/browser;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ include:
|
|||||||
# Install the registry container
|
# Install the registry container
|
||||||
so-dockerregistry:
|
so-dockerregistry:
|
||||||
docker_container.running:
|
docker_container.running:
|
||||||
- image: ghcr.io/security-onion-solutions/registry:2.8.3
|
- image: ghcr.io/security-onion-solutions/registry:3.0.0
|
||||||
- hostname: so-registry
|
- hostname: so-registry
|
||||||
- networks:
|
- networks:
|
||||||
- sobridge:
|
- sobridge:
|
||||||
@@ -25,7 +25,7 @@ so-dockerregistry:
|
|||||||
- {{ BINDING }}
|
- {{ BINDING }}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
- binds:
|
- binds:
|
||||||
- /opt/so/conf/docker-registry/etc/config.yml:/etc/docker/registry/config.yml:ro
|
- /opt/so/conf/docker-registry/etc/config.yml:/etc/distribution/config.yml:ro
|
||||||
- /opt/so/conf/docker-registry:/var/lib/registry:rw
|
- /opt/so/conf/docker-registry:/var/lib/registry:rw
|
||||||
- /nsm/docker-registry/docker:/var/lib/registry/docker:rw
|
- /nsm/docker-registry/docker:/var/lib/registry/docker:rw
|
||||||
- /etc/pki/registry.crt:/etc/pki/registry.crt:ro
|
- /etc/pki/registry.crt:/etc/pki/registry.crt:ro
|
||||||
|
|||||||
@@ -52,6 +52,13 @@ socsaltdir:
|
|||||||
- mode: 770
|
- mode: 770
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
|
|
||||||
|
socplaybooksdir:
|
||||||
|
file.directory:
|
||||||
|
- name: /opt/so/conf/soc/playbooks
|
||||||
|
- user: 939
|
||||||
|
- group: 939
|
||||||
|
- makedirs: True
|
||||||
|
|
||||||
socanalytics:
|
socanalytics:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/soc/analytics.js
|
- name: /opt/so/conf/soc/analytics.js
|
||||||
|
|||||||
@@ -1415,17 +1415,21 @@ soc:
|
|||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
folder: sigma/stable
|
folder: sigma/stable
|
||||||
community: true
|
community: true
|
||||||
|
rulesetName: securityonion-resources
|
||||||
- repo: file:///nsm/rules/custom-local-repos/local-sigma
|
- repo: file:///nsm/rules/custom-local-repos/local-sigma
|
||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
community: false
|
community: false
|
||||||
|
rulesetName: local-sigma
|
||||||
airgap:
|
airgap:
|
||||||
- repo: file:///nsm/rules/detect-sigma/repos/securityonion-resources
|
- repo: file:///nsm/rules/detect-sigma/repos/securityonion-resources
|
||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
folder: sigma/stable
|
folder: sigma/stable
|
||||||
community: true
|
community: true
|
||||||
|
rulesetName: securityonion-resources
|
||||||
- repo: file:///nsm/rules/custom-local-repos/local-sigma
|
- repo: file:///nsm/rules/custom-local-repos/local-sigma
|
||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
community: false
|
community: false
|
||||||
|
rulesetName: local-sigma
|
||||||
sigmaRulePackages:
|
sigmaRulePackages:
|
||||||
- core
|
- core
|
||||||
- emerging_threats_addon
|
- emerging_threats_addon
|
||||||
@@ -1456,6 +1460,16 @@ soc:
|
|||||||
org: Security Onion
|
org: Security Onion
|
||||||
bucket: telegraf/so_short_term
|
bucket: telegraf/so_short_term
|
||||||
verifyCert: false
|
verifyCert: false
|
||||||
|
playbook:
|
||||||
|
autoUpdateEnabled: true
|
||||||
|
playbookImportFrequencySeconds: 86400
|
||||||
|
playbookImportErrorSeconds: 600
|
||||||
|
playbookRepoUrl:
|
||||||
|
default: https://github.com/Security-Onion-Solutions/securityonion-resources-playbooks
|
||||||
|
airgap: file:///nsm/airgap-resources/playbooks/securityonion-resources-playbooks
|
||||||
|
playbookRepoBranch: main
|
||||||
|
playbookRepoPath: /opt/sensoroni/playbooks/
|
||||||
|
playbookPathInRepo: securityonion-normalized
|
||||||
salt:
|
salt:
|
||||||
queueDir: /opt/sensoroni/queue
|
queueDir: /opt/sensoroni/queue
|
||||||
timeoutMs: 45000
|
timeoutMs: 45000
|
||||||
@@ -1492,16 +1506,20 @@ soc:
|
|||||||
- repo: https://github.com/Security-Onion-Solutions/securityonion-yara
|
- repo: https://github.com/Security-Onion-Solutions/securityonion-yara
|
||||||
license: DRL
|
license: DRL
|
||||||
community: true
|
community: true
|
||||||
|
rulesetName: securityonion-yara
|
||||||
- repo: file:///nsm/rules/custom-local-repos/local-yara
|
- repo: file:///nsm/rules/custom-local-repos/local-yara
|
||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
community: false
|
community: false
|
||||||
|
rulesetName: local-yara
|
||||||
airgap:
|
airgap:
|
||||||
- repo: file:///nsm/rules/detect-yara/repos/securityonion-yara
|
- repo: file:///nsm/rules/detect-yara/repos/securityonion-yara
|
||||||
license: DRL
|
license: DRL
|
||||||
community: true
|
community: true
|
||||||
|
rulesetName: securityonion-yara
|
||||||
- repo: file:///nsm/rules/custom-local-repos/local-yara
|
- repo: file:///nsm/rules/custom-local-repos/local-yara
|
||||||
license: Elastic-2.0
|
license: Elastic-2.0
|
||||||
community: false
|
community: false
|
||||||
|
rulesetName: local-yara
|
||||||
yaraRulesFolder: /opt/sensoroni/yara/rules
|
yaraRulesFolder: /opt/sensoroni/yara/rules
|
||||||
stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state
|
stateFilePath: /opt/sensoroni/fingerprints/strelkaengine.state
|
||||||
integrityCheckFrequencySeconds: 1200
|
integrityCheckFrequencySeconds: 1200
|
||||||
|
|||||||
@@ -31,10 +31,12 @@ so-soc:
|
|||||||
- /opt/so/conf/soc/fingerprints:/opt/sensoroni/fingerprints:rw
|
- /opt/so/conf/soc/fingerprints:/opt/sensoroni/fingerprints:rw
|
||||||
- /nsm/soc/jobs:/opt/sensoroni/jobs:rw
|
- /nsm/soc/jobs:/opt/sensoroni/jobs:rw
|
||||||
- /nsm/soc/uploads:/nsm/soc/uploads:rw
|
- /nsm/soc/uploads:/nsm/soc/uploads:rw
|
||||||
|
- /nsm/airgap-resources:/nsm/airgap-resources:rw
|
||||||
- /opt/so/log/soc/:/opt/sensoroni/logs/:rw
|
- /opt/so/log/soc/:/opt/sensoroni/logs/:rw
|
||||||
- /opt/so/conf/soc/soc.json:/opt/sensoroni/sensoroni.json:ro
|
- /opt/so/conf/soc/soc.json:/opt/sensoroni/sensoroni.json:ro
|
||||||
- /opt/so/conf/soc/ai_summary_repos:/opt/sensoroni/ai_summary_repos:rw
|
- /opt/so/conf/soc/ai_summary_repos:/opt/sensoroni/ai_summary_repos:rw
|
||||||
- /opt/so/conf/navigator/layers/:/opt/sensoroni/navigator/:rw
|
- /opt/so/conf/navigator/layers/:/opt/sensoroni/navigator/:rw
|
||||||
|
- /opt/so/conf/soc/playbooks/:/opt/sensoroni/playbooks/:rw
|
||||||
{% if SOCMERGED.telemetryEnabled and not GLOBALS.airgap %}
|
{% if SOCMERGED.telemetryEnabled and not GLOBALS.airgap %}
|
||||||
- /opt/so/conf/soc/analytics.js:/opt/sensoroni/html/js/analytics.js:ro
|
- /opt/so/conf/soc/analytics.js:/opt/sensoroni/html/js/analytics.js:ro
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|||||||
@@ -12,12 +12,30 @@ transformations:
|
|||||||
sid: rule.uuid
|
sid: rule.uuid
|
||||||
answer: answers
|
answer: answers
|
||||||
query: dns.query.name
|
query: dns.query.name
|
||||||
src_ip: source.ip.keyword
|
src_ip: source.ip
|
||||||
src_port: source.port
|
src_port: source.port
|
||||||
dst_ip: destination.ip.keyword
|
dst_ip: destination.ip
|
||||||
dst_port: destination.port
|
dst_port: destination.port
|
||||||
winlog.event_data.User: user.name
|
winlog.event_data.User: user.name
|
||||||
logtype: event.code # OpenCanary
|
logtype: event.code # OpenCanary
|
||||||
|
## Start Temp Linux Mappings ##
|
||||||
|
ProcessGuid: process.entity_id
|
||||||
|
ProcessId: process.pid
|
||||||
|
Image: process.executable
|
||||||
|
CommandLine: process.command_line
|
||||||
|
CurrentDirectory: process.working_directory
|
||||||
|
ParentProcessGuid: process.parent.entity_id
|
||||||
|
ParentProcessId: process.parent.pid
|
||||||
|
ParentImage: process.parent.executable
|
||||||
|
ParentCommandLine: process.parent.command_line
|
||||||
|
User: user.name
|
||||||
|
## End Temp Linux Mappings ##
|
||||||
|
document_id: _id
|
||||||
|
rule.type: event.module
|
||||||
|
related_ip: related.ip
|
||||||
|
community_id: network.community_id
|
||||||
|
event_dataset: event.dataset
|
||||||
|
hostname: host.name
|
||||||
# Maps "opencanary" product to SO IDH logs
|
# Maps "opencanary" product to SO IDH logs
|
||||||
- id: opencanary_idh_add-fields
|
- id: opencanary_idh_add-fields
|
||||||
type: add_condition
|
type: add_condition
|
||||||
@@ -126,4 +144,96 @@ transformations:
|
|||||||
event.type: 'creation'
|
event.type: 'creation'
|
||||||
rule_conditions:
|
rule_conditions:
|
||||||
- type: logsource
|
- type: logsource
|
||||||
category: file_event
|
category: file_event
|
||||||
|
# Maps network rules to all network logs
|
||||||
|
# This targets all network logs, all services, generated from endpoints and network
|
||||||
|
- id: network_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
event.category: 'network'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
# Maps network_connection rules to endpoint network creation logs
|
||||||
|
# This is an OS-agnostic mapping, to account for logs that don't specify source OS
|
||||||
|
- id: endpoint_network_connection_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
event.category: 'network'
|
||||||
|
event.type: 'start'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network_connection
|
||||||
|
# Maps "alert" category to SO Alert events
|
||||||
|
- id: alert_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
tags: 'alert'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: alert
|
||||||
|
# Maps "network + connection" to SO connection logs
|
||||||
|
- id: network_connection_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
tags: 'conn'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: connection
|
||||||
|
# Maps "network + dns" to SO DNS logs
|
||||||
|
- id: network_dns_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
tags: 'dns'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: dns
|
||||||
|
# Maps "network + http" to SO HTTP logs
|
||||||
|
- id: network_http_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
tags: 'http'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: http
|
||||||
|
# Maps "network + file" to SO file logs
|
||||||
|
- id: network_file_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
event.category: 'network'
|
||||||
|
tags: 'file'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: file
|
||||||
|
# Maps "network + x509" to SO x509 logs
|
||||||
|
- id: network_x509_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
event.category: 'network'
|
||||||
|
tags: 'x509'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: x509
|
||||||
|
# Maps "network + ssl" to SO ssl logs
|
||||||
|
- id: network_ssl_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
event.category: 'network'
|
||||||
|
tags: 'ssl'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: network
|
||||||
|
service: ssl
|
||||||
|
# Maps file to host or network file events
|
||||||
|
- id: file_so_add-fields
|
||||||
|
type: add_condition
|
||||||
|
conditions:
|
||||||
|
tags: '*file'
|
||||||
|
rule_conditions:
|
||||||
|
- type: logsource
|
||||||
|
category: file
|
||||||
@@ -61,6 +61,13 @@
|
|||||||
{% do SOCMERGED.config.server.update({'airgapEnabled': false}) %}
|
{% do SOCMERGED.config.server.update({'airgapEnabled': false}) %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{# set playbookRepoUrl based on airgap or not #}
|
||||||
|
{% if GLOBALS.airgap %}
|
||||||
|
{% do SOCMERGED.config.server.modules.playbook.update({'playbookRepoUrl': SOCMERGED.config.server.modules.playbook.playbookRepoUrl.airgap}) %}
|
||||||
|
{% else %}
|
||||||
|
{% do SOCMERGED.config.server.modules.playbook.update({'playbookRepoUrl': SOCMERGED.config.server.modules.playbook.playbookRepoUrl.default}) %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{# remove these modules if detections is disabled #}
|
{# remove these modules if detections is disabled #}
|
||||||
{% if not SOCMERGED.config.server.client.detectionsEnabled %}
|
{% if not SOCMERGED.config.server.client.detectionsEnabled %}
|
||||||
{% do SOCMERGED.config.server.modules.pop('elastalertengine') %}
|
{% do SOCMERGED.config.server.modules.pop('elastalertengine') %}
|
||||||
|
|||||||
@@ -344,6 +344,23 @@ soc:
|
|||||||
advanced: True
|
advanced: True
|
||||||
forcedType: "[]{}"
|
forcedType: "[]{}"
|
||||||
helpLink: sigma.html
|
helpLink: sigma.html
|
||||||
|
syntax: json
|
||||||
|
uiElements:
|
||||||
|
- field: rulesetName
|
||||||
|
label: Ruleset Name
|
||||||
|
- field: repo
|
||||||
|
label: Repo URL
|
||||||
|
required: True
|
||||||
|
- field: branch
|
||||||
|
label: Branch
|
||||||
|
- field: license
|
||||||
|
label: License
|
||||||
|
required: True
|
||||||
|
- field: folder
|
||||||
|
label: Folder
|
||||||
|
- field: community
|
||||||
|
label: Community
|
||||||
|
forcedType: bool
|
||||||
airgap: *eerulesRepos
|
airgap: *eerulesRepos
|
||||||
sigmaRulePackages:
|
sigmaRulePackages:
|
||||||
description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, the new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.'
|
description: 'Defines the Sigma Community Ruleset you want to run. One of these (core | core+ | core++ | all ) as well as an optional Add-on (emerging_threats_addon). Once you have changed the ruleset here, the new settings will be applied within 15 minutes. At that point, you will need to wait for the scheduled rule update to take place (by default, every 24 hours), or you can force the update by nagivating to Detections --> Options dropdown menu --> Elastalert --> Full Update. WARNING! Changing the ruleset will remove all existing non-overlapping Sigma rules of the previous ruleset and their associated overrides. This removal cannot be undone.'
|
||||||
@@ -459,6 +476,23 @@ soc:
|
|||||||
advanced: True
|
advanced: True
|
||||||
forcedType: "[]{}"
|
forcedType: "[]{}"
|
||||||
helpLink: yara.html
|
helpLink: yara.html
|
||||||
|
syntax: json
|
||||||
|
uiElements:
|
||||||
|
- field: rulesetName
|
||||||
|
label: Ruleset Name
|
||||||
|
- field: repo
|
||||||
|
label: Repo URL
|
||||||
|
required: True
|
||||||
|
- field: branch
|
||||||
|
label: Branch
|
||||||
|
- field: license
|
||||||
|
label: License
|
||||||
|
required: True
|
||||||
|
- field: folder
|
||||||
|
label: Folder
|
||||||
|
- field: community
|
||||||
|
label: Community
|
||||||
|
forcedType: bool
|
||||||
airgap: *serulesRepos
|
airgap: *serulesRepos
|
||||||
suricataengine:
|
suricataengine:
|
||||||
aiRepoUrl:
|
aiRepoUrl:
|
||||||
@@ -592,7 +626,7 @@ soc:
|
|||||||
label: Query
|
label: Query
|
||||||
required: True
|
required: True
|
||||||
- field: showSubtitle
|
- field: showSubtitle
|
||||||
label: Show Query in Dropdown.
|
label: Show Query in Dropdown.
|
||||||
forcedType: bool
|
forcedType: bool
|
||||||
queryToggleFilters:
|
queryToggleFilters:
|
||||||
description: Customize togglable query filters that apply to all queries. Exclusive toggles will invert the filter if toggled off rather than omitting the filter from the query.
|
description: Customize togglable query filters that apply to all queries. Exclusive toggles will invert the filter if toggled off rather than omitting the filter from the query.
|
||||||
|
|||||||
@@ -26,7 +26,6 @@ echo "Running all.rules and $TESTRULE against the following pcap: $TESTPCAP"
|
|||||||
echo ""
|
echo ""
|
||||||
sleep 3
|
sleep 3
|
||||||
|
|
||||||
|
|
||||||
rm -rf /tmp/nids-testing/output
|
rm -rf /tmp/nids-testing/output
|
||||||
mkdir -p /tmp/nids-testing/output
|
mkdir -p /tmp/nids-testing/output
|
||||||
chown suricata:socore /tmp/nids-testing/output
|
chown suricata:socore /tmp/nids-testing/output
|
||||||
@@ -45,7 +44,7 @@ echo "==== Begin Suricata Output ==="
|
|||||||
-v /opt/so/conf/suricata/bpf:/etc/suricata/bpf:ro \
|
-v /opt/so/conf/suricata/bpf:/etc/suricata/bpf:ro \
|
||||||
-v /tmp/nids-testing/output/:/nsm/:rw \
|
-v /tmp/nids-testing/output/:/nsm/:rw \
|
||||||
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-suricata:{{ VERSION }} \
|
{{ MANAGER }}:5000/{{ IMAGEREPO }}/so-suricata:{{ VERSION }} \
|
||||||
--runmode single -v -k none -r /input.pcap -l /tmp --init-errors-fatal
|
--runmode single -v -k none -r /input.pcap -l /tmp --init-errors-fatal --set outputs.6.pcap-log.enabled=no
|
||||||
echo "==== End Suricata Output ==="
|
echo "==== End Suricata Output ==="
|
||||||
|
|
||||||
echo ""
|
echo ""
|
||||||
|
|||||||
@@ -241,7 +241,12 @@
|
|||||||
# ## Use TLS but skip chain & host verification
|
# ## Use TLS but skip chain & host verification
|
||||||
# # insecure_skip_verify = false
|
# # insecure_skip_verify = false
|
||||||
|
|
||||||
{% if grains.role in ['so-searchnode','so-standalone','so-manager', 'so-managersearch', 'so-heavynode', 'so-receiver'] -%}
|
{%- set logstash_metrics_roles = ['so-searchnode','so-standalone','so-managersearch','so-heavynode'] %}
|
||||||
|
{%- if GLOBALS.pipeline != "KAFKA" %}
|
||||||
|
{%- set logstash_metrics_roles = logstash_metrics_roles + ['so-manager', 'so-receiver'] %}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if grains.role in logstash_metrics_roles %}
|
||||||
[[inputs.logstash]]
|
[[inputs.logstash]]
|
||||||
url = "http://localhost:9600"
|
url = "http://localhost:9600"
|
||||||
collect = ["pipelines"]
|
collect = ["pipelines"]
|
||||||
@@ -252,7 +257,7 @@
|
|||||||
{% if grains.role in ['so-manager','so-managersearch','so-standalone','so-receiver'] and GLOBALS.pipeline == "KAFKA" -%}
|
{% if grains.role in ['so-manager','so-managersearch','so-standalone','so-receiver'] and GLOBALS.pipeline == "KAFKA" -%}
|
||||||
[[inputs.jolokia2_agent]]
|
[[inputs.jolokia2_agent]]
|
||||||
name_prefix= "kafka_"
|
name_prefix= "kafka_"
|
||||||
urls = ["http://localhost:8778/jolokia"]
|
urls = ["http://{{ NODEIP }}:8778/jolokia"]
|
||||||
|
|
||||||
[[inputs.jolokia2_agent.metric]]
|
[[inputs.jolokia2_agent.metric]]
|
||||||
name = "topics"
|
name = "topics"
|
||||||
|
|||||||
@@ -57,6 +57,7 @@ zeek:
|
|||||||
- cve-2020-0601
|
- cve-2020-0601
|
||||||
- securityonion/bpfconf
|
- securityonion/bpfconf
|
||||||
- securityonion/file-extraction
|
- securityonion/file-extraction
|
||||||
|
- securityonion/community-id-extended
|
||||||
- oui-logging
|
- oui-logging
|
||||||
- icsnpp-modbus
|
- icsnpp-modbus
|
||||||
- icsnpp-dnp3
|
- icsnpp-dnp3
|
||||||
|
|||||||
40
salt/zeek/policy/securityonion/community-id-extended.zeek
Normal file
40
salt/zeek/policy/securityonion/community-id-extended.zeek
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
##! Extends community ID logging to Files, and SSL by copying
|
||||||
|
##! the community_id from the parent connection.
|
||||||
|
##!
|
||||||
|
##! Note: Requires that protocols/conn/community-id-logging is loaded
|
||||||
|
|
||||||
|
module CommunityIDExt;
|
||||||
|
|
||||||
|
@load base/protocols/ssl
|
||||||
|
@load protocols/conn/community-id-logging
|
||||||
|
|
||||||
|
export {
|
||||||
|
redef record SSL::Info += {
|
||||||
|
community_id: string &optional &log;
|
||||||
|
};
|
||||||
|
|
||||||
|
redef record Files::Info += {
|
||||||
|
community_id: string &optional &log;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
# Files
|
||||||
|
event file_new(f: fa_file) {
|
||||||
|
if ( f?$conns ) {
|
||||||
|
# Take community_id from first connection that has it
|
||||||
|
for ( cid in f$conns ) {
|
||||||
|
local c = f$conns[cid];
|
||||||
|
if ( c?$conn && c$conn?$community_id ) {
|
||||||
|
f$info$community_id = c$conn$community_id;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# SSL Connections
|
||||||
|
event ssl_established(c: connection) {
|
||||||
|
if ( c?$conn && c$conn?$community_id && c?$ssl ) {
|
||||||
|
c$ssl$community_id = c$conn$community_id;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -769,6 +769,10 @@ if ! [[ -f $install_opt_file ]]; then
|
|||||||
title "Syncing AI-Generated Detection Summaries"
|
title "Syncing AI-Generated Detection Summaries"
|
||||||
airgap_detection_summaries
|
airgap_detection_summaries
|
||||||
fi
|
fi
|
||||||
|
if [[ $is_airgap ]]; then
|
||||||
|
title "Syncing Playbooks"
|
||||||
|
logCmd "airgap_playbooks /root/SecurityOnion"
|
||||||
|
fi
|
||||||
title "Setting up Kibana Default Space"
|
title "Setting up Kibana Default Space"
|
||||||
logCmd "so-kibana-space-defaults"
|
logCmd "so-kibana-space-defaults"
|
||||||
add_web_user
|
add_web_user
|
||||||
|
|||||||
BIN
sigs/securityonion-2.4.160-20250625.iso.sig
Normal file
BIN
sigs/securityonion-2.4.160-20250625.iso.sig
Normal file
Binary file not shown.
Reference in New Issue
Block a user