mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge remote-tracking branch 'origin/2.4/dev' into amv
This commit is contained in:
@@ -20,7 +20,7 @@
|
|||||||
],
|
],
|
||||||
"data_stream.dataset": "import",
|
"data_stream.dataset": "import",
|
||||||
"custom": "",
|
"custom": "",
|
||||||
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/evtx/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n- drop_fields:\n fields: [\"host\"]\n ignore_missing: true\n- add_fields:\n target: data_stream\n fields:\n type: logs\n dataset: system.security\n- add_fields:\n target: event\n fields:\n dataset: system.security\n module: system\n imported: true\n- add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.security-2.5.4\n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-Sysmon/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.sysmon_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.sysmon_operational\n module: windows\n imported: true\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.sysmon_operational-3.1.2\n- if:\n equals:\n winlog.channel: 'Application'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.application\n - add_fields:\n target: event\n fields:\n dataset: system.application\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.application-2.5.4\n- if:\n equals:\n winlog.channel: 'System'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.system\n - add_fields:\n target: event\n fields:\n dataset: system.system\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.system-2.5.4\n \n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-PowerShell/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.powershell_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.powershell_operational\n module: windows\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.powershell_operational-3.1.2\n- add_fields:\n target: data_stream\n fields:\n dataset: import",
|
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/evtx/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n- drop_fields:\n fields: [\"host\"]\n ignore_missing: true\n- add_fields:\n target: data_stream\n fields:\n type: logs\n dataset: system.security\n- add_fields:\n target: event\n fields:\n dataset: system.security\n module: system\n imported: true\n- add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.security-2.6.1\n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-Sysmon/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.sysmon_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.sysmon_operational\n module: windows\n imported: true\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.sysmon_operational-3.1.2\n- if:\n equals:\n winlog.channel: 'Application'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.application\n - add_fields:\n target: event\n fields:\n dataset: system.application\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.application-2.6.1\n- if:\n equals:\n winlog.channel: 'System'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.system\n - add_fields:\n target: event\n fields:\n dataset: system.system\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.system-2.6.1\n \n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-PowerShell/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.powershell_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.powershell_operational\n module: windows\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.powershell_operational-3.1.2\n- add_fields:\n target: data_stream\n fields:\n dataset: import",
|
||||||
"tags": [
|
"tags": [
|
||||||
"import"
|
"import"
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
elasticsearch:
|
elasticsearch:
|
||||||
enabled: false
|
enabled: false
|
||||||
version: 8.18.6
|
version: 8.18.8
|
||||||
index_clean: true
|
index_clean: true
|
||||||
config:
|
config:
|
||||||
action:
|
action:
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ kibana:
|
|||||||
- default
|
- default
|
||||||
- file
|
- file
|
||||||
migrations:
|
migrations:
|
||||||
discardCorruptObjects: "8.18.6"
|
discardCorruptObjects: "8.18.8"
|
||||||
telemetry:
|
telemetry:
|
||||||
enabled: False
|
enabled: False
|
||||||
security:
|
security:
|
||||||
|
|||||||
@@ -603,9 +603,6 @@ post_to_2.4.170() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
post_to_2.4.180() {
|
post_to_2.4.180() {
|
||||||
echo "Regenerating Elastic Agent Installers"
|
|
||||||
/sbin/so-elastic-agent-gen-installers
|
|
||||||
|
|
||||||
# Force update to Kafka output policy
|
# Force update to Kafka output policy
|
||||||
/usr/sbin/so-kafka-fleet-output-policy --force
|
/usr/sbin/so-kafka-fleet-output-policy --force
|
||||||
|
|
||||||
@@ -613,6 +610,9 @@ post_to_2.4.180() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
post_to_2.4.190() {
|
post_to_2.4.190() {
|
||||||
|
echo "Regenerating Elastic Agent Installers"
|
||||||
|
/sbin/so-elastic-agent-gen-installers
|
||||||
|
|
||||||
# Only need to update import / eval nodes
|
# Only need to update import / eval nodes
|
||||||
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
|
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
|
||||||
update_import_fleet_output
|
update_import_fleet_output
|
||||||
@@ -887,14 +887,14 @@ up_to_2.4.170() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
up_to_2.4.180() {
|
up_to_2.4.180() {
|
||||||
# Elastic Update for this release, so download Elastic Agent files
|
echo "Nothing to do for 2.4.180"
|
||||||
determine_elastic_agent_upgrade
|
|
||||||
|
|
||||||
INSTALLEDVERSION=2.4.180
|
INSTALLEDVERSION=2.4.180
|
||||||
}
|
}
|
||||||
|
|
||||||
up_to_2.4.190() {
|
up_to_2.4.190() {
|
||||||
echo "Nothing to do for 2.4.190"
|
# Elastic Update for this release, so download Elastic Agent files
|
||||||
|
determine_elastic_agent_upgrade
|
||||||
|
|
||||||
INSTALLEDVERSION=2.4.190
|
INSTALLEDVERSION=2.4.190
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -237,10 +237,22 @@ function manage_salt() {
|
|||||||
|
|
||||||
case "$op" in
|
case "$op" in
|
||||||
state)
|
state)
|
||||||
log "Performing '$op' for '$state' on minion '$minion'"
|
|
||||||
state=$(echo "$request" | jq -r .state)
|
state=$(echo "$request" | jq -r .state)
|
||||||
|
async=$(echo "$request" | jq -r .async)
|
||||||
|
if [[ $async == "true" ]]; then
|
||||||
|
log "Performing async '$op' on minion $minion with state '$state'"
|
||||||
response=$(salt --async "$minion" state.apply "$state" queue=2)
|
response=$(salt --async "$minion" state.apply "$state" queue=2)
|
||||||
|
else
|
||||||
|
log "Performing '$op' on minion $minion with state '$state'"
|
||||||
|
response=$(salt "$minion" state.apply "$state")
|
||||||
|
fi
|
||||||
|
|
||||||
exit_code=$?
|
exit_code=$?
|
||||||
|
if [[ $exit_code -ne 0 && "$response" =~ "is running as PID" ]]; then
|
||||||
|
log "Salt already running: $response ($exit_code)"
|
||||||
|
respond "$id" "ERROR_SALT_ALREADY_RUNNING"
|
||||||
|
return
|
||||||
|
fi
|
||||||
;;
|
;;
|
||||||
highstate)
|
highstate)
|
||||||
log "Performing '$op' on minion $minion"
|
log "Performing '$op' on minion $minion"
|
||||||
@@ -259,7 +271,7 @@ function manage_salt() {
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
if [[ exit_code -eq 0 ]]; then
|
if [[ $exit_code -eq 0 ]]; then
|
||||||
log "Successful command execution: $response"
|
log "Successful command execution: $response"
|
||||||
respond "$id" "true"
|
respond "$id" "true"
|
||||||
else
|
else
|
||||||
|
|||||||
30
salt/zeek/policy/custom/filters/dns
Normal file
30
salt/zeek/policy/custom/filters/dns
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
hook DNS::log_policy(rec: DNS::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# Only put a single name per line otherwise there will be memory issues!
|
||||||
|
# If the query comes back blank don't log
|
||||||
|
if (!rec?$query)
|
||||||
|
break;
|
||||||
|
|
||||||
|
# If the query comes back with one of these don't log
|
||||||
|
if (rec?$query && /google.com$/ in rec$query)
|
||||||
|
break;
|
||||||
|
|
||||||
|
# If the query comes back with one of these don't log
|
||||||
|
if (rec?$query && /.apple.com$/ in rec$query)
|
||||||
|
break;
|
||||||
|
|
||||||
|
# Don't log reverse lookups
|
||||||
|
if (rec?$query && /.in-addr.arpa/ in to_lower(rec$query))
|
||||||
|
break;
|
||||||
|
|
||||||
|
# Don't log netbios lookups. This generates a cray amount of logs
|
||||||
|
if (rec?$qtype_name && /NB/ in rec$qtype_name)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(DNS::LOG);
|
||||||
|
local filter: Log::Filter = [$name="dns-filter"];
|
||||||
|
Log::add_filter(DNS::LOG, filter);
|
||||||
|
}
|
||||||
13
salt/zeek/policy/custom/filters/files
Normal file
13
salt/zeek/policy/custom/filters/files
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
hook Files::log_policy(rec: Files::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# Turn off a specific mimetype
|
||||||
|
if (rec?$mime_type && ( /soap+xml/ | /json/ | /xml/ | /x509/ )in rec$mime_type)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(Files::LOG);
|
||||||
|
local filter: Log::Filter = [$name="files-filter"];
|
||||||
|
Log::add_filter(Files::LOG, filter);
|
||||||
|
}
|
||||||
20
salt/zeek/policy/custom/filters/httphost
Normal file
20
salt/zeek/policy/custom/filters/httphost
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
### HTTP filter by host entries by string #####
|
||||||
|
|
||||||
|
module Filterhttp;
|
||||||
|
|
||||||
|
export {
|
||||||
|
global remove_host_entries: set[string] = {"www.genevalab.com", "www.google.com"};
|
||||||
|
}
|
||||||
|
|
||||||
|
hook HTTP::log_policy(rec: HTTP::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# Remove HTTP host entries
|
||||||
|
if ( ! rec?$host || rec$host in remove_host_entries )
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(HTTP::LOG);
|
||||||
|
local filter: Log::Filter = [$name="http-filter"];
|
||||||
|
Log::add_filter(HTTP::LOG, filter);
|
||||||
|
}
|
||||||
14
salt/zeek/policy/custom/filters/httpuri
Normal file
14
salt/zeek/policy/custom/filters/httpuri
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
### HTTP filter by uri using pattern ####
|
||||||
|
|
||||||
|
hook HTTP::log_policy(rec: HTTP::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# Remove HTTP uri entries by regex
|
||||||
|
if ( rec?$uri && /^\/kratos\// in rec$uri )
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(HTTP::LOG);
|
||||||
|
local filter: Log::Filter = [$name="http-filter"];
|
||||||
|
Log::add_filter(HTTP::LOG, filter);
|
||||||
|
}
|
||||||
29
salt/zeek/policy/custom/filters/ssl
Normal file
29
salt/zeek/policy/custom/filters/ssl
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
### Log filter by JA3S md5 hash:
|
||||||
|
hook SSL::log_policy(rec: SSL::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# SSL log filter Ja3s by md5
|
||||||
|
if (rec?c$ssl$ja3s_cipher && ( /623de93db17d313345d7ea481e7443cf/ )in rec$c$ssl$ja3s_cipher)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(SSL::LOG);
|
||||||
|
local filter: Log::Filter = [$name="ssl-filter"];
|
||||||
|
Log::add_filter(SSL::LOG, filter);
|
||||||
|
}
|
||||||
|
|
||||||
|
### Log filter by server name:
|
||||||
|
hook SSL::log_policy(rec: SSL::Info, id: Log::ID, filter: Log::Filter)
|
||||||
|
{
|
||||||
|
# SSL log filter by server name
|
||||||
|
if (rec?$server_name && ( /api.github.com$/ ) in rec$server_name)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(SSL::LOG);
|
||||||
|
local filter: Log::Filter = [$name="ssl-filter"];
|
||||||
|
Log::add_filter(SSL::LOG, filter);
|
||||||
|
}
|
||||||
17
salt/zeek/policy/custom/filters/tunnel
Normal file
17
salt/zeek/policy/custom/filters/tunnel
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
global tunnel_subnet: set[subnet]={
|
||||||
|
|
||||||
|
10.19.0.0/24
|
||||||
|
|
||||||
|
};
|
||||||
|
|
||||||
|
hook Tunnel::log_policy(rec: Tunnel::Info, id: Log::ID, Filter: Log::Filter)
|
||||||
|
{
|
||||||
|
if (rec$id$orig_h in tunnel_subnet || rec$id$resp_h in tunnel_subnet)
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
event zeek_init()
|
||||||
|
{
|
||||||
|
Log::remove_default_filter(Tunnel::LOG);
|
||||||
|
local filter: Log::Filter = [$name="tunnel-filter"];
|
||||||
|
Log::add_filter(Tunnel::LOG, filter);
|
||||||
|
}
|
||||||
@@ -61,6 +61,48 @@ zeek:
|
|||||||
global: True
|
global: True
|
||||||
advanced: True
|
advanced: True
|
||||||
duplicates: True
|
duplicates: True
|
||||||
|
dns:
|
||||||
|
description: DNS Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
|
files:
|
||||||
|
description: Files Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
|
httphost:
|
||||||
|
description: HTTP Hosts Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
|
httpuri:
|
||||||
|
description: HTTP URI Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
|
ssl:
|
||||||
|
description: SSL Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
|
tunnel:
|
||||||
|
description: Tunnel Filter for Zeek. This is an advanced setting and will take further action to enable.
|
||||||
|
helpLink: zeek.html
|
||||||
|
file: True
|
||||||
|
global: True
|
||||||
|
advanced: True
|
||||||
|
duplicates: True
|
||||||
file_extraction:
|
file_extraction:
|
||||||
description: Contains a list of file or MIME types Zeek will extract from the network streams. Values must adhere to the following format - {"MIME_TYPE":"FILE_EXTENSION"}
|
description: Contains a list of file or MIME types Zeek will extract from the network streams. Values must adhere to the following format - {"MIME_TYPE":"FILE_EXTENSION"}
|
||||||
forcedType: "[]{}"
|
forcedType: "[]{}"
|
||||||
|
|||||||
Reference in New Issue
Block a user