mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge remote-tracking branch 'origin/2.4/dev' into vlb2
This commit is contained in:
@@ -128,6 +128,7 @@ common_sbin:
|
||||
- user: 939
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
- show_changes: False
|
||||
|
||||
common_sbin_jinja:
|
||||
file.recurse:
|
||||
@@ -137,6 +138,7 @@ common_sbin_jinja:
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
{% if not GLOBALS.is_manager%}
|
||||
# prior to 2.4.50 these scripts were in common/tools/sbin on the manager because of soup and distributed to non managers
|
||||
|
||||
@@ -27,6 +27,7 @@ commonpkgs:
|
||||
- vim
|
||||
- tar
|
||||
- unzip
|
||||
- bc
|
||||
{% if grains.oscodename != 'focal' %}
|
||||
- python3-rich
|
||||
{% endif %}
|
||||
@@ -56,6 +57,7 @@ commonpkgs:
|
||||
- skip_suggestions: True
|
||||
- pkgs:
|
||||
- python3-dnf-plugin-versionlock
|
||||
- bc
|
||||
- curl
|
||||
- device-mapper-persistent-data
|
||||
- fuse
|
||||
|
||||
@@ -226,7 +226,7 @@ create_local_directories() {
|
||||
for d in $(find $PILLARSALTDIR/$i -type d); do
|
||||
suffixdir=${d//$PILLARSALTDIR/}
|
||||
if [ ! -d "$local_salt_dir/$suffixdir" ]; then
|
||||
mkdir -pv $local_salt_dir$suffixdir
|
||||
mkdir -p $local_salt_dir$suffixdir
|
||||
fi
|
||||
done
|
||||
chown -R socore:socore $local_salt_dir/$i
|
||||
|
||||
@@ -154,6 +154,7 @@ if [[ $EXCLUDE_FALSE_POSITIVE_ERRORS == 'Y' ]]; then
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|syncing rule" # false positive (rule sync log line includes rule name which can contain 'error')
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|request_unauthorized" # false positive (login failures to Hydra result in an 'error' log)
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|adding index lifecycle policy" # false positive (elasticsearch policy names contain 'error')
|
||||
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|adding ingest pipeline" # false positive (elasticsearch ingest pipeline names contain 'error')
|
||||
fi
|
||||
|
||||
if [[ $EXCLUDE_KNOWN_ERRORS == 'Y' ]]; then
|
||||
|
||||
@@ -63,7 +63,7 @@ function status {
|
||||
function pcapinfo() {
|
||||
PCAP=$1
|
||||
ARGS=$2
|
||||
docker run --rm -v "$PCAP:/input.pcap" --entrypoint capinfos {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} /input.pcap $ARGS
|
||||
docker run --rm -v "$PCAP:/input.pcap" --entrypoint capinfos {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} /input.pcap -ae $ARGS
|
||||
}
|
||||
|
||||
function pcapfix() {
|
||||
|
||||
@@ -30,6 +30,7 @@ elasticfleet_sbin:
|
||||
- user: 947
|
||||
- group: 939
|
||||
- file_mode: 755
|
||||
- show_changes: False
|
||||
|
||||
elasticfleet_sbin_jinja:
|
||||
file.recurse:
|
||||
@@ -41,6 +42,7 @@ elasticfleet_sbin_jinja:
|
||||
- template: jinja
|
||||
- exclude_pat:
|
||||
- so-elastic-fleet-package-upgrade # exclude this because we need to watch it for changes
|
||||
- show_changes: False
|
||||
|
||||
eaconfdir:
|
||||
file.directory:
|
||||
@@ -145,6 +147,7 @@ eadynamicintegration:
|
||||
- user: 947
|
||||
- group: 939
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
eaintegration:
|
||||
file.recurse:
|
||||
@@ -152,6 +155,7 @@ eaintegration:
|
||||
- source: salt://elasticfleet/files/integrations
|
||||
- user: 947
|
||||
- group: 939
|
||||
- show_changes: False
|
||||
|
||||
eaoptionalintegrationsdir:
|
||||
file.directory:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
],
|
||||
"data_stream.dataset": "import",
|
||||
"custom": "",
|
||||
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/evtx/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n- drop_fields:\n fields: [\"host\"]\n ignore_missing: true\n- add_fields:\n target: data_stream\n fields:\n type: logs\n dataset: system.security\n- add_fields:\n target: event\n fields:\n dataset: system.security\n module: system\n imported: true\n- add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.security-1.66.1\n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-Sysmon/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.sysmon_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.sysmon_operational\n module: windows\n imported: true\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.sysmon_operational-2.4.1\n- if:\n equals:\n winlog.channel: 'Application'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.application\n - add_fields:\n target: event\n fields:\n dataset: system.application\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.application-1.66.1\n- if:\n equals:\n winlog.channel: 'System'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.system\n - add_fields:\n target: event\n fields:\n dataset: system.system\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.system-1.66.1\n \n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-PowerShell/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.powershell_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.powershell_operational\n module: windows\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.powershell_operational-2.4.1\n- add_fields:\n target: data_stream\n fields:\n dataset: import",
|
||||
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/evtx/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n- drop_fields:\n fields: [\"host\"]\n ignore_missing: true\n- add_fields:\n target: data_stream\n fields:\n type: logs\n dataset: system.security\n- add_fields:\n target: event\n fields:\n dataset: system.security\n module: system\n imported: true\n- add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.security-1.67.0\n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-Sysmon/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.sysmon_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.sysmon_operational\n module: windows\n imported: true\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.sysmon_operational-2.5.0\n- if:\n equals:\n winlog.channel: 'Application'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.application\n - add_fields:\n target: event\n fields:\n dataset: system.application\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.application-1.67.0\n- if:\n equals:\n winlog.channel: 'System'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.system\n - add_fields:\n target: event\n fields:\n dataset: system.system\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.system-1.67.0\n \n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-PowerShell/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.powershell_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.powershell_operational\n module: windows\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.powershell_operational-2.5.0\n- add_fields:\n target: data_stream\n fields:\n dataset: import",
|
||||
"tags": [
|
||||
"import"
|
||||
]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
{% set SUB = salt['pillar.get']('elasticfleet:config:subscription_integrations', default=false) %}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
. /usr/sbin/so-elastic-fleet-common
|
||||
@@ -16,7 +17,6 @@ BULK_INSTALL_PACKAGE_TMP=/tmp/esfleet_bulk_install_tmp.json
|
||||
BULK_INSTALL_OUTPUT=/opt/so/state/esfleet_bulk_install_results.json
|
||||
PACKAGE_COMPONENTS=/opt/so/state/esfleet_package_components.json
|
||||
|
||||
SKIP_SUBSCRIPTION=true
|
||||
PENDING_UPDATE=false
|
||||
|
||||
# Integrations which are included in the package registry, but excluded from automatic installation via this script.
|
||||
@@ -63,7 +63,8 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
|
||||
bulk_package=$(echo "$package" | jq '{name: .name, version: .latest_version}' )
|
||||
|
||||
if [[ ! "${EXCLUDED_INTEGRATIONS[@]}" =~ "$package_name" ]]; then
|
||||
if $SKIP_SUBSCRIPTION && [[ "$subscription" != "basic" && "$subscription" != "null" && -n "$subscription" ]]; then
|
||||
{% if not SUB %}
|
||||
if [[ "$subscription" != "basic" && "$subscription" != "null" && -n "$subscription" ]]; then
|
||||
# pass over integrations that require non-basic elastic license
|
||||
echo "$package_name integration requires an Elastic license of $subscription or greater... skipping"
|
||||
continue
|
||||
@@ -83,6 +84,20 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
{% else %}
|
||||
if [[ "$installed_version" == "null" || -z "$installed_version" ]]; then
|
||||
echo "$package_name is not installed... Adding to next update."
|
||||
jq --argjson package "$bulk_package" '.packages += [$package]' $BULK_INSTALL_PACKAGE_LIST > $BULK_INSTALL_PACKAGE_TMP && mv $BULK_INSTALL_PACKAGE_TMP $BULK_INSTALL_PACKAGE_LIST
|
||||
PENDING_UPDATE=true
|
||||
else
|
||||
results=$(compare_versions "$latest_version" "$installed_version")
|
||||
if [ $results == "greater" ]; then
|
||||
echo "$package_name is at version $installed_version latest version is $latest_version... Adding to next update."
|
||||
jq --argjson package "$bulk_package" '.packages += [$package]' $BULK_INSTALL_PACKAGE_LIST > $BULK_INSTALL_PACKAGE_TMP && mv $BULK_INSTALL_PACKAGE_TMP $BULK_INSTALL_PACKAGE_LIST
|
||||
PENDING_UPDATE=true
|
||||
fi
|
||||
fi
|
||||
{% endif %}
|
||||
else
|
||||
echo "Skipping $package_name..."
|
||||
fi
|
||||
@@ -47,6 +47,7 @@ elasticsearch_sbin:
|
||||
- file_mode: 755
|
||||
- exclude_pat:
|
||||
- so-elasticsearch-pipelines # exclude this because we need to watch it for changes, we sync it in another state
|
||||
- show_changes: False
|
||||
|
||||
elasticsearch_sbin_jinja:
|
||||
file.recurse:
|
||||
@@ -60,6 +61,7 @@ elasticsearch_sbin_jinja:
|
||||
- so-elasticsearch-ilm-policy-load # exclude this because we need to watch it for changes, we sync it in another state
|
||||
- defaults:
|
||||
GLOBALS: {{ GLOBALS }}
|
||||
- show_changes: False
|
||||
|
||||
so-elasticsearch-ilm-policy-load-script:
|
||||
file.managed:
|
||||
@@ -69,6 +71,7 @@ so-elasticsearch-ilm-policy-load-script:
|
||||
- group: 939
|
||||
- mode: 754
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
so-elasticsearch-pipelines-script:
|
||||
file.managed:
|
||||
@@ -77,6 +80,7 @@ so-elasticsearch-pipelines-script:
|
||||
- user: 930
|
||||
- group: 939
|
||||
- mode: 754
|
||||
- show_changes: False
|
||||
|
||||
esingestdir:
|
||||
file.directory:
|
||||
@@ -110,6 +114,7 @@ esingestdynamicconf:
|
||||
- user: 930
|
||||
- group: 939
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
esingestconf:
|
||||
file.recurse:
|
||||
@@ -117,6 +122,7 @@ esingestconf:
|
||||
- source: salt://elasticsearch/files/ingest
|
||||
- user: 930
|
||||
- group: 939
|
||||
- show_changes: False
|
||||
|
||||
# Remove .fleet_final_pipeline-1 because we are using global@custom now
|
||||
so-fleet-final-pipeline-remove:
|
||||
@@ -153,6 +159,7 @@ esyml:
|
||||
- defaults:
|
||||
ESCONFIG: {{ ELASTICSEARCHMERGED.config }}
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
esroles:
|
||||
file.recurse:
|
||||
@@ -162,6 +169,7 @@ esroles:
|
||||
- template: jinja
|
||||
- user: 930
|
||||
- group: 939
|
||||
- show_changes: False
|
||||
|
||||
nsmesdir:
|
||||
file.directory:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
elasticsearch:
|
||||
enabled: false
|
||||
version: 8.17.2
|
||||
version: 8.17.3
|
||||
index_clean: true
|
||||
config:
|
||||
action:
|
||||
@@ -2659,7 +2659,7 @@ elasticsearch:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-osquery-manager-action_x_responses:
|
||||
so-logs-osquery_manager_x_action_x_responses:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
_meta:
|
||||
@@ -2667,17 +2667,51 @@ elasticsearch:
|
||||
managed_by: security_onion
|
||||
package:
|
||||
name: elastic_agent
|
||||
data_stream:
|
||||
allow_custom_routing: false
|
||||
hidden: false
|
||||
composed_of:
|
||||
- logs-osquery_manager.action.responses
|
||||
ignore_missing_component_templates: []
|
||||
- logs-osquery_manager.action.responses@package
|
||||
- logs-osquery_manager.action.responses@custom
|
||||
- so-fleet_integrations.ip_mappings-1
|
||||
- so-fleet_globals-1
|
||||
- so-fleet_agent_id_verification-1
|
||||
ignore_missing_component_templates:
|
||||
- logs-osquery_manager.action.responses@custom
|
||||
index_patterns:
|
||||
- .logs-osquery_manager.action.responses*
|
||||
- logs-osquery_manager.action.responses*
|
||||
priority: 501
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-osquery_manager.action.responses-logs
|
||||
number_of_replicas: 0
|
||||
so-logs-osquery-manager-actions:
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 60d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-osquery_manager_x_result:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
_meta:
|
||||
@@ -2685,16 +2719,50 @@ elasticsearch:
|
||||
managed_by: security_onion
|
||||
package:
|
||||
name: elastic_agent
|
||||
data_stream:
|
||||
allow_custom_routing: false
|
||||
hidden: false
|
||||
composed_of:
|
||||
- logs-osquery_manager.actions
|
||||
ignore_missing_component_templates: []
|
||||
- logs-osquery_manager.result@package
|
||||
- logs-osquery_manager.result@custom
|
||||
- so-fleet_integrations.ip_mappings-1
|
||||
- so-fleet_globals-1
|
||||
- so-fleet_agent_id_verification-1
|
||||
ignore_missing_component_templates:
|
||||
- logs-osquery_manager.result@custom
|
||||
index_patterns:
|
||||
- .logs-osquery_manager.actions*
|
||||
- logs-osquery_manager.result*
|
||||
priority: 501
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-osquery_manager.result-logs
|
||||
number_of_replicas: 0
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 60d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-soc:
|
||||
close: 30
|
||||
delete: 365
|
||||
|
||||
@@ -116,6 +116,7 @@ escomponenttemplates:
|
||||
- clean: True
|
||||
- onchanges_in:
|
||||
- file: so-elasticsearch-templates-reload
|
||||
- show_changes: False
|
||||
|
||||
# Auto-generate templates from defaults file
|
||||
{% for index, settings in ES_INDEX_SETTINGS.items() %}
|
||||
@@ -127,6 +128,7 @@ es_index_template_{{index}}:
|
||||
- defaults:
|
||||
TEMPLATE_CONFIG: {{ settings.index_template }}
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
- onchanges_in:
|
||||
- file: so-elasticsearch-templates-reload
|
||||
{% endif %}
|
||||
@@ -146,6 +148,7 @@ es_template_{{TEMPLATE.split('.')[0] | replace("/","_") }}:
|
||||
{% endif %}
|
||||
- user: 930
|
||||
- group: 939
|
||||
- show_changes: False
|
||||
- onchanges_in:
|
||||
- file: so-elasticsearch-templates-reload
|
||||
{% endfor %}
|
||||
|
||||
16
salt/elasticsearch/files/ingest/zeek.ntp
Normal file
16
salt/elasticsearch/files/ingest/zeek.ntp
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"description" : "zeek.ntp",
|
||||
"processors":[
|
||||
{"set": {"field":"event.dataset", "value":"ntp", "ignore_failure":true}},
|
||||
{"json": {"field":"message", "target_field":"message2", "ignore_failure":true}},
|
||||
{"rename": {"field":"message2.version", "target_field":"ntp.version", "ignore_missing":true}},
|
||||
{"rename": {"field":"message2.mode", "target_field":"ntp.mode", "ignore_missing":true}},
|
||||
{"rename": {"field":"message2.poll", "target_field":"ntp.poll", "ignore_missing":true}},
|
||||
{"rename": {"field":"message2.precision", "target_field":"ntp.precision", "ignore_missing":true}},
|
||||
{"rename": {"field":"message2.org_time", "target_field":"ntp.org_time", "ignore_missing":true}},
|
||||
{"rename": {"field":"message2.xmt_time", "target_field":"ntp.xmt_time", "ignore_missing":true}},
|
||||
{"date": {"field":"ntp.org_time", "target_field":"ntp.org_time", "formats":["UNIX", "UNIX_MS"], "ignore_failure": true, "if":"ctx?.ntp?.org_time != null"}},
|
||||
{"date": {"field":"ntp.xmt_time", "target_field":"ntp.xmt_time", "formats":["UNIX", "UNIX_MS"], "ignore_failure": true, "if":"ctx?.ntp?.xmt_time != null"}},
|
||||
{"pipeline":{"name":"zeek.common"}}
|
||||
]
|
||||
}
|
||||
10
salt/elasticsearch/files/ingest/zeek.traceroute
Normal file
10
salt/elasticsearch/files/ingest/zeek.traceroute
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"description":"zeek.traceroute",
|
||||
"processors":[
|
||||
{"set": {"field":"event.dataset", "value":"traceroute" }},
|
||||
{"json": {"field":"message", "target_field":"message2" }},
|
||||
{"rename": {"field":"message2.src", "target_field":"source.ip", "ignore_missing":true,"ignore_failure":true}},
|
||||
{"rename": {"field":"message2.dst", "target_field":"destination.ip", "ignore_missing":true,"ignore_failure":true}},
|
||||
{"pipeline": {"name":"zeek.common"}}
|
||||
]
|
||||
}
|
||||
@@ -80,6 +80,7 @@ elasticsearch:
|
||||
managed_integrations:
|
||||
description: List of integrations to add into SOC config UI. Enter the full or partial integration name. Eg. 1password, 1pass
|
||||
forcedType: "[]string"
|
||||
multiline: True
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: elasticsearch.html
|
||||
@@ -367,8 +368,8 @@ elasticsearch:
|
||||
so-logs-detections_x_alerts: *indexSettings
|
||||
so-logs-http_endpoint_x_generic: *indexSettings
|
||||
so-logs-httpjson_x_generic: *indexSettings
|
||||
so-logs-osquery-manager-actions: *indexSettings
|
||||
so-logs-osquery-manager-action_x_responses: *indexSettings
|
||||
so-logs-osquery_manager_x_action_x_responses: *indexSettings
|
||||
so-logs-osquery_manager_x_result: *indexSettings
|
||||
so-logs-elastic_agent_x_apm_server: *indexSettings
|
||||
so-logs-elastic_agent_x_auditbeat: *indexSettings
|
||||
so-logs-elastic_agent_x_cloudbeat: *indexSettings
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"template": {
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_replicas": "0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"template": {
|
||||
"settings": {
|
||||
"number_of_replicas": 0
|
||||
"index": {
|
||||
"number_of_replicas": "0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,7 @@ hydra:
|
||||
enabled:
|
||||
description: Enables or disables the API authentication system, used for service account authentication. Enabling this feature requires a valid Security Onion license key. Defaults to False.
|
||||
helpLink: connect.html
|
||||
global: True
|
||||
config:
|
||||
ttl:
|
||||
access_token:
|
||||
|
||||
@@ -85,6 +85,7 @@ influxdb-templates:
|
||||
- clean: True
|
||||
- defaults:
|
||||
INFLUXMERGED: {{ INFLUXMERGED }}
|
||||
- show_changes: False
|
||||
|
||||
influxdb_curl_config:
|
||||
file.managed:
|
||||
|
||||
@@ -47,7 +47,7 @@ import() {
|
||||
|
||||
# Load saved objects
|
||||
RESPONSE=$(curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X POST "localhost:5601/api/saved_objects/_import?overwrite=true" -H "kbn-xsrf: true" --form file=@"$ndjson_file")
|
||||
echo $RESPONSE; if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
|
||||
if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
|
||||
|
||||
if [[ "$RETURN_CODE" != "1" ]]; then
|
||||
touch /opt/so/state/kibana_$BASENAME.txt
|
||||
@@ -66,7 +66,7 @@ update() {
|
||||
IFS=$'\r\n' GLOBIGNORE='*' command eval 'LINES=($(cat $1))'
|
||||
for i in "${LINES[@]}"; do
|
||||
RESPONSE=$(curl -K /opt/so/conf/elasticsearch/curl.config -X PUT "localhost:5601/api/saved_objects/config/{{ELASTICSEARCHDEFAULTS.elasticsearch.version}}" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ")
|
||||
echo $RESPONSE; if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
|
||||
if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
|
||||
done
|
||||
|
||||
if [[ "$RETURN_CODE" != "1" ]]; then
|
||||
|
||||
@@ -66,6 +66,7 @@ repo_dir:
|
||||
- recurse:
|
||||
- user
|
||||
- group
|
||||
- show_changes: False
|
||||
|
||||
manager_sbin:
|
||||
file.recurse:
|
||||
@@ -76,6 +77,7 @@ manager_sbin:
|
||||
- file_mode: 755
|
||||
- exclude_pat:
|
||||
- "*_test.py"
|
||||
- show_changes: False
|
||||
|
||||
manager_sbin_jinja:
|
||||
file.recurse:
|
||||
@@ -85,6 +87,7 @@ manager_sbin_jinja:
|
||||
- group: socore
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
so-repo-file:
|
||||
file.managed:
|
||||
@@ -92,6 +95,7 @@ so-repo-file:
|
||||
- source: salt://manager/files/repodownload.conf
|
||||
- user: socore
|
||||
- group: socore
|
||||
- show_changes: False
|
||||
|
||||
so-repo-mirrorlist:
|
||||
file.managed:
|
||||
|
||||
@@ -166,7 +166,7 @@ airgap_update_dockers() {
|
||||
docker stop so-dockerregistry
|
||||
docker rm so-dockerregistry
|
||||
echo "Copying the new dockers over"
|
||||
tar xvf "$AGDOCKER/registry.tar" -C /nsm/docker-registry/docker
|
||||
tar xf "$AGDOCKER/registry.tar" -C /nsm/docker-registry/docker
|
||||
echo "Add Registry back"
|
||||
docker load -i "$AGDOCKER/registry_image.tar"
|
||||
fi
|
||||
@@ -1002,21 +1002,21 @@ unmount_update() {
|
||||
|
||||
update_airgap_rules() {
|
||||
# Copy the rules over to update them for airgap.
|
||||
rsync -av $UPDATE_DIR/agrules/suricata/* /nsm/rules/suricata/
|
||||
rsync -av $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
|
||||
rsync -av $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
|
||||
rsync -a $UPDATE_DIR/agrules/suricata/* /nsm/rules/suricata/
|
||||
rsync -a $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
|
||||
rsync -a $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
|
||||
# Copy the securityonion-resorces repo over for SOC Detection Summaries and checkout the published summaries branch
|
||||
rsync -av --delete --chown=socore:socore $UPDATE_DIR/agrules/securityonion-resources /opt/so/conf/soc/ai_summary_repos
|
||||
rsync -a --delete --chown=socore:socore $UPDATE_DIR/agrules/securityonion-resources /opt/so/conf/soc/ai_summary_repos
|
||||
git config --global --add safe.directory /opt/so/conf/soc/ai_summary_repos/securityonion-resources
|
||||
git -C /opt/so/conf/soc/ai_summary_repos/securityonion-resources checkout generated-summaries-published
|
||||
# Copy the securityonion-resorces repo over to nsm
|
||||
rsync -av $UPDATE_DIR/agrules/securityonion-resources/* /nsm/securityonion-resources/
|
||||
rsync -a $UPDATE_DIR/agrules/securityonion-resources/* /nsm/securityonion-resources/
|
||||
}
|
||||
|
||||
update_airgap_repo() {
|
||||
# Update the files in the repo
|
||||
echo "Syncing new updates to /nsm/repo"
|
||||
rsync -av $AGREPO/* /nsm/repo/
|
||||
rsync -a $AGREPO/* /nsm/repo/
|
||||
echo "Creating repo"
|
||||
dnf -y install yum-utils createrepo_c
|
||||
createrepo /nsm/repo
|
||||
|
||||
@@ -91,6 +91,7 @@ make-rule-dir-nginx:
|
||||
- recurse:
|
||||
- user
|
||||
- group
|
||||
- show_changes: False
|
||||
|
||||
{% endif %}
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ analyzerscripts:
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
- source: salt://sensoroni/files/analyzers
|
||||
- show_changes: False
|
||||
|
||||
sensoroni_sbin:
|
||||
file.recurse:
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"role": "{{ GLOBALS.role }}",
|
||||
"description": {{ SENSORONIMERGED.config.node_description | tojson }},
|
||||
"address": "{{ GLOBALS.node_ip }}",
|
||||
"mgmtNic": "{{ GLOBALS.main_interface }}",
|
||||
"model": "{{ GLOBALS.so_model }}",
|
||||
"pollIntervalMs": {{ SENSORONIMERGED.config.node_checkin_interval_ms }},
|
||||
"serverUrl": "https://{{ GLOBALS.url_base }}/sensoroniagents",
|
||||
|
||||
@@ -79,6 +79,7 @@ socmotd:
|
||||
- group: 939
|
||||
- mode: 600
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
filedetectionsbackup:
|
||||
file.managed:
|
||||
@@ -249,6 +250,7 @@ socore_own_custom_repos:
|
||||
- recurse:
|
||||
- user
|
||||
- group
|
||||
- show_changes: False
|
||||
|
||||
{% else %}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ soc:
|
||||
icon: fa-users-between-lines
|
||||
target: ''
|
||||
links:
|
||||
- '/#/hunt?q=({:process.entity_id}) | groupby event.dataset | groupby -sankey event.dataset event.action | groupby event.action | groupby process.name | groupby process.command_line | groupby host.name user.name | groupby source.ip source.port destination.ip destination.port | groupby dns.question.name | groupby dns.answers.data | groupby file.path | groupby registry.path | groupby dll.path'
|
||||
- '/#/hunt?q="{:process.entity_id}" | groupby event.dataset | groupby -sankey event.dataset event.action | groupby event.action | groupby process.name | groupby process.command_line | groupby host.name user.name | groupby source.ip source.port destination.ip destination.port | groupby dns.question.name | groupby dns.answers.data | groupby file.path | groupby registry.path | groupby dll.path'
|
||||
- name: actionProcessAncestors
|
||||
description: actionProcessAncestorsHelp
|
||||
icon: fa-people-roof
|
||||
@@ -1256,7 +1256,7 @@ soc:
|
||||
- soc_timestamp
|
||||
- event.dataset
|
||||
- host.name
|
||||
- user.name
|
||||
- user.effective.name
|
||||
- process.executable
|
||||
- event.action
|
||||
- event.outcome
|
||||
@@ -1900,7 +1900,7 @@ soc:
|
||||
query: 'event.module:endpoint | groupby event.dataset | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.name | groupby process.name'
|
||||
- name: Elastic Agent API Events
|
||||
description: API (Application Programming Interface) events from Elastic Agents
|
||||
query: 'event.dataset:endpoint.events.api | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.name | groupby process.name | groupby process.Ext.api.name'
|
||||
query: 'event.dataset:endpoint.events.api | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.name | groupby process.name | groupby -sankey process.name process.Ext.api.name | groupby process.Ext.api.name'
|
||||
- name: Elastic Agent File Events
|
||||
description: File events from Elastic Agents
|
||||
query: 'event.dataset:endpoint.events.file | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.name | groupby process.name | groupby event.action | groupby file.path'
|
||||
@@ -1918,7 +1918,7 @@ soc:
|
||||
query: 'event.dataset:endpoint.events.registry | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.name | groupby process.name | groupby event.action | groupby registry.path'
|
||||
- name: Elastic Agent Security Events
|
||||
description: Security events from Elastic Agents
|
||||
query: 'event.dataset:endpoint.events.security | groupby host.name | groupby -sankey host.name user.name | groupby user.name | groupby -sankey user.name process.executable | groupby process.executable | groupby event.action | groupby event.outcome'
|
||||
query: 'event.dataset:endpoint.events.security | groupby host.name | groupby -sankey host.name user.effective.name | groupby user.effective.name | groupby -sankey user.effective.name process.executable | groupby process.executable | groupby event.action | groupby event.outcome'
|
||||
- name: Host Overview
|
||||
description: Overview of all host data types
|
||||
query: '((event.category:registry OR event.category:host OR event.category:process OR event.category:driver OR event.category:configuration) OR (event.category:file AND _exists_:process.executable) OR (event.category:network AND _exists_:host.name)) | groupby event.dataset* event.category* event.action* | groupby event.type | groupby -sankey event.type host.name | groupby host.name | groupby user.name | groupby file.name | groupby process.executable'
|
||||
|
||||
@@ -54,7 +54,11 @@ soc:
|
||||
title: Log Level
|
||||
description: The SOC log level, useful for enabling debug logging for advanced troubleshooting. Allowed values are debug, info, warn, error. The SOC log is available at /opt/so/log/soc/sensoroni-server.log.
|
||||
global: True
|
||||
regex: ^(info|debug|warn|error)$
|
||||
options:
|
||||
- info
|
||||
- debug
|
||||
- warn
|
||||
- error
|
||||
actions:
|
||||
description: A list of actions a user can take from the SOC UI against a hunt, alert, and other records. The action must be defined in JSON object format, and contain a "name" key and "links" key. The links is a list of URLs, where the most suitable URL in the list will be the selected URL when the user clicks the action.
|
||||
global: True
|
||||
|
||||
Reference in New Issue
Block a user