merge with 120 dev and fix conflicts

This commit is contained in:
m0duspwnens
2025-01-23 10:56:48 -05:00
92 changed files with 3711 additions and 528531 deletions

View File

@@ -22,6 +22,7 @@ body:
- 2.4.90
- 2.4.100
- 2.4.110
- 2.4.111
- 2.4.120
- Other (please provide detail below)
validations:

View File

@@ -1,17 +1,17 @@
### 2.4.110-20241010 ISO image released on 2024/10/10
### 2.4.111-20241217 ISO image released on 2024/12/18
### Download and Verify
2.4.110-20241010 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.110-20241010.iso
2.4.111-20241217 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.111-20241217.iso
MD5: A8003DEBC4510D538F06238D9DBB86C0
SHA1: 441DE90A192C8FE8BEBAB9ACE1A3CC18F71A2B1F
SHA256: B087A0D12FC2CA3CCD02BD52E52421F4F60DC09BF826337A057E05A04D114CCE
MD5: 767823D75EB76A6DC6132F799FD0E720
SHA1: 0A7B6918FE5D4BC89EE3F2E03B4F8F4D6255141D
SHA256: 394BFCED9B5EAA0788E2D04806231B3A170839394AAF8DD23B4CE0EB9D6EF727
Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.110-20241010.iso.sig
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.111-20241217.iso.sig
Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
Download the signature file for the ISO:
```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.110-20241010.iso.sig
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.111-20241217.iso.sig
```
Download the ISO image:
```
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.110-20241010.iso
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.111-20241217.iso
```
Verify the downloaded ISO image using the signature file:
```
gpg --verify securityonion-2.4.110-20241010.iso.sig securityonion-2.4.110-20241010.iso
gpg --verify securityonion-2.4.111-20241217.iso.sig securityonion-2.4.111-20241217.iso
```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
```
gpg: Signature made Thu 10 Oct 2024 07:05:30 AM EDT using RSA key ID FE507013
gpg: Signature made Tue 17 Dec 2024 04:33:10 PM EST using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner.

View File

@@ -1 +1 @@
2.4.120
2.4.120

View File

@@ -49,6 +49,8 @@ base:
- kibana.adv_kibana
- kratos.soc_kratos
- kratos.adv_kratos
- hydra.soc_hydra
- hydra.adv_hydra
- redis.nodes
- redis.soc_redis
- redis.adv_redis
@@ -100,6 +102,7 @@ base:
- kibana.secrets
{% endif %}
- kratos.soc_kratos
- kratos.adv_kratos
- elasticsearch.soc_elasticsearch
- elasticsearch.adv_elasticsearch
- elasticfleet.soc_elasticfleet
@@ -116,8 +119,8 @@ base:
- kibana.adv_kibana
- strelka.soc_strelka
- strelka.adv_strelka
- kratos.soc_kratos
- kratos.adv_kratos
- hydra.soc_hydra
- hydra.adv_hydra
- redis.soc_redis
- redis.adv_redis
- influxdb.soc_influxdb
@@ -152,6 +155,8 @@ base:
- idstools.adv_idstools
- kratos.soc_kratos
- kratos.adv_kratos
- hydra.soc_hydra
- hydra.adv_hydra
- redis.nodes
- redis.soc_redis
- redis.adv_redis
@@ -261,6 +266,7 @@ base:
- kibana.secrets
{% endif %}
- kratos.soc_kratos
- kratos.adv_kratos
- elasticsearch.soc_elasticsearch
- elasticsearch.adv_elasticsearch
- elasticfleet.soc_elasticfleet
@@ -275,8 +281,8 @@ base:
- kibana.adv_kibana
- backup.soc_backup
- backup.adv_backup
- kratos.soc_kratos
- kratos.adv_kratos
- hydra.soc_hydra
- hydra.adv_hydra
- redis.soc_redis
- redis.adv_redis
- influxdb.soc_influxdb

View File

@@ -24,6 +24,7 @@
'influxdb',
'soc',
'kratos',
'hydra',
'elasticfleet',
'elastic-fleet-package-registry',
'firewall',
@@ -68,6 +69,7 @@
'strelka.manager',
'soc',
'kratos',
'hydra',
'influxdb',
'telegraf',
'firewall',
@@ -98,6 +100,7 @@
'strelka.manager',
'soc',
'kratos',
'hydra',
'elasticfleet',
'elastic-fleet-package-registry',
'firewall',
@@ -120,6 +123,7 @@
'strelka.manager',
'soc',
'kratos',
'hydra',
'elastic-fleet-package-registry',
'elasticfleet',
'firewall',
@@ -154,6 +158,7 @@
'influxdb',
'soc',
'kratos',
'hydra',
'elastic-fleet-package-registry',
'elasticfleet',
'firewall',

View File

@@ -4,4 +4,5 @@ backup:
- /etc/pki
- /etc/salt
- /nsm/kratos
- /nsm/hydra
destination: "/nsm/backup"

View File

@@ -11,6 +11,7 @@
{% else %}
{% set UPDATE_DIR='/tmp/sogh/securityonion' %}
{% endif %}
{% set SOVERSION = salt['file.read']('/etc/soversion').strip() %}
remove_common_soup:
file.absent:
@@ -107,6 +108,17 @@ copy_so-repo-sync_sbin:
- force: True
- preserve: True
{# this is added in 2.4.120 to remove salt repo files pointing to saltproject.io to accomodate the move to broadcom and new bootstrap-salt script #}
{% if salt['pkg.version_cmp'](SOVERSION, '2.4.120') == -1 %}
{% set saltrepofile = '/etc/yum.repos.d/salt.repo' %}
{% if grains.os_family == 'Debian' %}
{% set saltrepofile = '/etc/apt/sources.list.d/salt.list' %}
{% endif %}
remove_saltproject_io_repo_manager:
file.absent:
- name: {{ saltrepofile }}
{% endif %}
{% else %}
fix_23_soup_sbin:
cmd.run:

View File

@@ -29,6 +29,7 @@ container_list() {
"so-influxdb"
"so-kibana"
"so-kratos"
"so-hydra"
"so-nginx"
"so-pcaptools"
"so-soc"
@@ -53,6 +54,7 @@ container_list() {
"so-kafka"
"so-kibana"
"so-kratos"
"so-hydra"
"so-logstash"
"so-nginx"
"so-pcaptools"

View File

@@ -150,6 +150,7 @@ if [[ $EXCLUDE_FALSE_POSITIVE_ERRORS == 'Y' ]]; then
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|app_layer.error" # false positive (suricata 7) in stats.log e.g. app_layer.error.imap.parser | Total | 0
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|is not an ip string literal" # false positive (Open Canary logging out blank IP addresses)
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|syncing rule" # false positive (rule sync log line includes rule name which can contain 'error')
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|request_unauthorized" # false positive (login failures to Hydra result in an 'error' log)
fi
if [[ $EXCLUDE_KNOWN_ERRORS == 'Y' ]]; then
@@ -210,6 +211,7 @@ if [[ $EXCLUDE_KNOWN_ERRORS == 'Y' ]]; then
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|integrity check failed" # Detections: Exclude false positive due to automated testing
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|syncErrors" # Detections: Not an actual error
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|Initialized license manager" # SOC log: before fields.status was changed to fields.licenseStatus
EXCLUDED_ERRORS="$EXCLUDED_ERRORS|from NIC checksum offloading" # zeek reporter.log
fi
RESULT=0
@@ -248,6 +250,9 @@ exclude_log "agentstatus.log" # ignore this log since it tracks agents in error
exclude_log "detections_runtime-status_yara.log" # temporarily ignore this log until Detections is more stable
exclude_log "/nsm/kafka/data/" # ignore Kafka data directory from log check.
# Include Zeek reporter.log to detect errors after running known good pcap(s) through sensor
echo "/nsm/zeek/spool/logger/reporter.log" >> /tmp/log_check_files
for log_file in $(cat /tmp/log_check_files); do
status "Checking log file $log_file"
tail -n $RECENT_LOG_LINES $log_file > /tmp/log_check

View File

@@ -51,6 +51,14 @@ docker:
custom_bind_mounts: []
extra_hosts: []
extra_env: []
'so-hydra':
final_octet: 30
port_bindings:
- 0.0.0.0:4444:4444
- 0.0.0.0:4445:4445
custom_bind_mounts: []
extra_hosts: []
extra_env: []
'so-logstash':
final_octet: 29
port_bindings:
@@ -74,6 +82,7 @@ docker:
- 443:443
- 8443:8443
- 7788:7788
- 7789:7789
custom_bind_mounts: []
extra_hosts: []
extra_env: []

View File

@@ -45,6 +45,7 @@ docker:
so-influxdb: *dockerOptions
so-kibana: *dockerOptions
so-kratos: *dockerOptions
so-hydra: *dockerOptions
so-logstash: *dockerOptions
so-nginx: *dockerOptions
so-nginx-fleet-node: *dockerOptions

View File

@@ -48,10 +48,12 @@ elasticfleet:
- cisco_ios
- cisco_ise
- cisco_meraki
- cisco_secure_email_gateway
- cisco_umbrella
- citrix_adc
- citrix_waf
- cloudflare
- cloudflare_logpush
- crowdstrike
- darktrace
- elastic_agent
@@ -107,7 +109,9 @@ elasticfleet:
- ti_anomali
- ti_cybersixgill
- ti_misp
- ti_opencti
- ti_otx
- ti_rapid7_threat_command
- ti_recordedfuture
- ti_threatq
- trendmicro

View File

@@ -0,0 +1,30 @@
{
"package": {
"name": "log",
"version": ""
},
"name": "hydra-logs",
"namespace": "so",
"description": "Hydra logs",
"policy_id": "so-grid-nodes_general",
"inputs": {
"logs-logfile": {
"enabled": true,
"streams": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/hydra/hydra.log"
],
"data_stream.dataset": "hydra",
"tags": ["so-hydra"],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: iam\n module: hydra",
"custom": "pipeline: hydra"
}
}
}
}
},
"force": true
}

View File

@@ -76,5 +76,11 @@ do
printf "\n### $GOOS/$GOARCH Installer Generated...\n"
done
printf "\n\n### Generating MSI...\n"
docker run \
--mount type=bind,source=/opt/so/saltstack/local/salt/elasticfleet/files/so_agent-installers/,target=/output/ \
{{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elastic-agent-builder:{{ GLOBALS.so_version }} wixl -o /output/so-elastic-agent_windows_amd64_msi --arch x64 /workspace/so-elastic-agent.wxs
printf "\n### MSI Generated...\n"
printf "\n### Cleaning up temp files in /nsm/elastic-agent-workspace\n"
rm -rf /nsm/elastic-agent-workspace

File diff suppressed because it is too large Load Diff

View File

@@ -10,13 +10,13 @@
{ "split": { "if": "ctx.event?.dataset != null && ctx.event.dataset.contains('.')", "field": "event.dataset", "separator": "\\.", "target_field": "module_temp" } },
{ "set": { "if": "ctx.module_temp != null", "override": true, "field": "event.module", "value": "{{module_temp.0}}" } },
{ "gsub": { "if": "ctx.event?.dataset != null && ctx.event.dataset.contains('.')", "field": "event.dataset", "pattern": "^[^.]*.", "replacement": "", "target_field": "dataset_tag_temp" } },
{ "append": { "if": "ctx.dataset_tag_temp != null", "field": "tags", "value": "{{dataset_tag_temp}}" } },
{ "append": { "if": "ctx.dataset_tag_temp != null", "field": "tags", "value": "{{dataset_tag_temp}}", "allow_duplicates": false } },
{ "set": { "if": "ctx.network?.direction == 'egress'", "override": true, "field": "network.initiated", "value": "true" } },
{ "set": { "if": "ctx.network?.direction == 'ingress'", "override": true, "field": "network.initiated", "value": "false" } },
{ "set": { "if": "ctx.network?.type == 'ipv4'", "override": true, "field": "destination.ipv6", "value": "false" } },
{ "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } },
{ "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.dataset", "value": "import" } },
{ "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.namespace", "value": "so" } },
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.dataset", "value": "import" } },
{ "set": { "if": "ctx.tags != null && ctx.tags.contains('import')", "override": true, "field": "data_stream.namespace", "value": "so" } },
{ "date": { "if": "ctx.event?.module == 'system'", "field": "event.created", "target_field": "@timestamp","ignore_failure": true, "formats": ["yyyy-MM-dd'T'HH:mm:ss.SSSX","yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'"] } },
{ "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } },
{ "set": { "if": "ctx.event?.module == 'fim'", "override": true, "field": "event.module", "value": "file_integrity" } },

View File

@@ -0,0 +1,9 @@
{
"description" : "hydra",
"processors" : [
{"set":{"field":"audience","value":"access","override":false,"ignore_failure":true}},
{"set":{"field":"event.dataset","ignore_empty_value":true,"ignore_failure":true,"value":"hydra.{{{audience}}}","media_type":"text/plain"}},
{"set":{"field":"event.action","ignore_failure":true,"copy_from":"msg" }},
{ "pipeline": { "name": "common" } }
]
}

View File

@@ -1,7 +1,7 @@
{
"description" : "suricata.alert",
"processors" : [
{ "set": { "field": "_index", "value": "logs-suricata.alerts-so" } },
{ "set": { "if": "ctx.event?.imported != true", "field": "_index", "value": "logs-suricata.alerts-so" } },
{ "set": { "field": "tags","value": "alert" }},
{ "rename":{ "field": "message2.alert", "target_field": "rule", "ignore_failure": true } },
{ "rename":{ "field": "rule.signature", "target_field": "rule.name", "ignore_failure": true } },

View File

@@ -18,6 +18,7 @@
{ "set": { "if": "ctx.destination?.ip != null", "field": "server.ip", "value": "{{destination.ip}}" } },
{ "set": { "if": "ctx.destination?.port != null", "field": "server.port", "value": "{{destination.port}}" } },
{ "set": { "field": "observer.name", "value": "{{agent.name}}" } },
{ "append": { "if": "ctx.network?.protocol != null && ctx.network?.protocol.contains(\"openvpn\")","field": "tags","value": ["{{network.protocol}}"],"allow_duplicates": false,"ignore_failure": true}},
{ "date": { "field": "message2.ts", "target_field": "@timestamp", "formats": ["ISO8601", "UNIX"], "ignore_failure": true } },
{ "remove": { "field": ["agent"], "ignore_failure": true } },
{ "pipeline": { "name": "common" } }

View File

@@ -38,6 +38,8 @@
{ "set": { "if": "ctx.connection?.state == 'SH'", "field": "connection.state_description", "value": "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)" } },
{ "set": { "if": "ctx.connection?.state == 'SHR'", "field": "connection.state_description", "value": "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator" } },
{ "set": { "if": "ctx.connection?.state == 'OTH'", "field": "connection.state_description", "value": "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)" } },
{ "set": { "if": "ctx.network?.protocol != null && ctx.network?.protocol.contains(\"ipsec\")", "field": "network.protocol", "value": "ipsec"}},
{ "set": { "if": "ctx.network?.protocol != null && ctx.network?.protocol.contains(\"openvpn\")", "field": "network.protocol", "value": "openvpn"}},
{ "pipeline": { "name": "zeek.common" } }
]
}

View File

@@ -0,0 +1,37 @@
{
"description" : "zeek.http2",
"processors" : [
{ "set": { "field": "event.dataset", "value": "http2" } },
{ "set": { "field": "network.transport", "value": "tcp" } },
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
{ "rename": { "field": "message2.trans_depth", "target_field": "http.trans_depth", "ignore_missing": true } },
{ "rename": { "field": "message2.method", "target_field": "http.method", "ignore_missing": true } },
{ "rename": { "field": "message2.host", "target_field": "http.virtual_host", "ignore_missing": true } },
{ "rename": { "field": "message2.uri", "target_field": "http.uri", "ignore_missing": true } },
{ "rename": { "field": "message2.referrer", "target_field": "http.referrer", "ignore_missing": true } },
{ "rename": { "field": "message2.version", "target_field": "http.version", "ignore_missing": true } },
{ "rename": { "field": "message2.user_agent", "target_field": "http.useragent", "ignore_missing": true } },
{ "rename": { "field": "message2.request_body_len", "target_field": "http.request.body.length", "ignore_missing": true } },
{ "rename": { "field": "message2.response_body_len", "target_field": "http.response.body.length", "ignore_missing": true } },
{ "rename": { "field": "message2.status_code", "target_field": "http.status_code", "ignore_missing": true } },
{ "rename": { "field": "message2.status_msg", "target_field": "http.status_message", "ignore_missing": true } },
{ "rename": { "field": "message2.info_code", "target_field": "http.info_code", "ignore_missing": true } },
{ "rename": { "field": "message2.info_msg", "target_field": "http.info_message", "ignore_missing": true } },
{ "rename": { "field": "message2.username", "target_field": "http.user", "ignore_missing": true } },
{ "rename": { "field": "message2.password", "target_field": "http.password", "ignore_missing": true } },
{ "rename": { "field": "message2.proxied", "target_field": "http.proxied", "ignore_missing": true } },
{ "rename": { "field": "message2.orig_fuids", "target_field": "log.id.orig_fuids", "ignore_missing": true } },
{ "rename": { "field": "message2.orig_filenames", "target_field": "file.orig_filenames", "ignore_missing": true } },
{ "rename": { "field": "message2.orig_mime_types", "target_field": "file.orig_mime_types", "ignore_missing": true } },
{ "rename": { "field": "message2.resp_fuids", "target_field": "log.id.resp_fuids", "ignore_missing": true } },
{ "rename": { "field": "message2.resp_filenames", "target_field": "file.resp_filenames", "ignore_missing": true } },
{ "rename": { "field": "message2.resp_mime_types", "target_field": "file.resp_mime_types", "ignore_missing": true } },
{ "rename": { "field": "message2.stream_id", "target_field": "http2.stream_id", "ignore_missing": true } },
{ "remove": { "field": "message2.tags", "ignore_failure": true } },
{ "remove": { "field": ["host"], "ignore_failure": true } },
{ "script": { "lang": "painless", "source": "ctx.uri_length = ctx.uri.length()", "ignore_failure": true } },
{ "script": { "lang": "painless", "source": "ctx.useragent_length = ctx.useragent.length()", "ignore_failure": true } },
{ "script": { "lang": "painless", "source": "ctx.virtual_host_length = ctx.virtual_host.length()", "ignore_failure": true } },
{ "pipeline": { "name": "zeek.common" } }
]
}

View File

@@ -0,0 +1,38 @@
{
"description": "zeek.ipsec",
"processors": [
{"set": { "field": "event.dataset","value": "ipsec"}},
{"json": { "field": "message","target_field": "message2","ignore_failure": true}},
{"rename": {"field": "message2.initiator_spi","target_field": "ipsec.initiator_spi","ignore_missing": true}},
{"rename": {"field": "message2.responder_spi","target_field": "ipsec.responder_spi","ignore_missing": true}},
{"rename": {"field": "message2.maj_ver","target_field": "ipsec.maj_version","ignore_missing": true}},
{"rename": {"field": "message2.min_ver","target_field": "ipsec.min_version","ignore_missing": true}},
{"set": {"ignore_failure": true,"field": "ipsec.version","value": "{{ipsec.maj_version}}.{{ipsec.min_version}}"}},
{"rename": {"field": "message2.exchange_type","target_field": "ipsec.exchange_type","ignore_missing": true}},
{"rename": {"field": "message2.flag_e","target_field": "ipsec.flag_e","ignore_missing": true}},
{"rename": {"field": "message2.flag_c","target_field": "ipsec.flag_c","ignore_missing": true}},
{"rename": {"field": "message2.flag_a","target_field": "ipsec.flag_a","ignore_missing": true}},
{"rename": {"field": "message2.flag_i","target_field": "ipsec.flag_i","ignore_missing": true}},
{"rename": {"field": "message2.flag_v","target_field": "ipsec.flag_v","ignore_missing": true}},
{"rename": {"field": "message2.flag_r","target_field": "ipsec.flag_r","ignore_missing": true}},
{"rename": {"field": "message2.message_id","target_field": "ipsec.message_id","ignore_missing": true}},
{"rename": {"field": "message2.vendor_ids","target_field": "ipsec.vendor_ids","ignore_missing": true}},
{"rename": {"field": "message2.notify_messages","target_field": "ipsec.notify_messages","ignore_missing": true}},
{"rename": {"field": "message2.transforms","target_field": "ipsec.transforms","ignore_missing": true}},
{"rename": {"field": "message2.ke_dh_groups","target_field": "ipsec.ke_dh_groups","ignore_missing": true}},
{"rename": {"field": "message2.proposals","target_field": "ipsec.proposals","ignore_missing": true}},
{"rename": {"field": "message2.certificates","target_field": "ipsec.certificates","ignore_missing": true}},
{"rename": {"field": "message2.transform_attributes","target_field": "ipsec.transform_attributes","ignore_missing": true}},
{"rename": {"field": "message2.length","target_field": "ipsec.length","ignore_missing": true}},
{"rename": {"field": "message2.hash","target_field": "ipsec.hash","ignore_missing": true}},
{"rename": {"field": "message2.doi","target_field": "ipsec.doi","ignore_missing": true}},
{"rename": {"field": "message2.situation","target_field": "ipsec.situation","ignore_missing": true}},
{"script": {
"lang": "painless",
"description": "Remove ipsec fields with empty arrays",
"source": "if (ctx.containsKey('ipsec') && ctx.ipsec instanceof Map) {\n for (String field : ['certificates', 'ke_dh_groups', 'notify_messages', 'proposals', 'transforms', 'transform_attributes', 'vendor_ids']) {\n if (ctx.ipsec[field] instanceof List && ctx.ipsec[field].isEmpty()) {\n ctx.ipsec.remove(field);\n }\n }\n }",
"ignore_failure": true
}},
{"pipeline": {"name": "zeek.common"}}
]
}

View File

@@ -0,0 +1,25 @@
{
"description": "zeek.ldap",
"processors": [
{"set": {"field": "event.dataset", "value": "ldap"}},
{"json": {"field": "message", "target_field": "message2", "ignore_failure": true}},
{"rename": {"field": "message2.message_id", "target_field": "ldap.message_id", "ignore_missing": true}},
{"rename": {"field": "message2.opcode", "target_field": "ldap.opcode", "ignore_missing": true}},
{"rename": {"field": "message2.result", "target_field": "ldap.result", "ignore_missing": true}},
{"rename": {"field": "message2.diagnostic_message", "target_field": "ldap.diagnostic_message", "ignore_missing": true}},
{"rename": {"field": "message2.version", "target_field": "ldap.version", "ignore_missing": true}},
{"rename": {"field": "message2.object", "target_field": "ldap.object", "ignore_missing": true}},
{"rename": {"field": "message2.argument", "target_field": "ldap.argument", "ignore_missing": true}},
{"rename": {"field": "message2.scope", "target_field": "ldap_search.scope", "ignore_missing":true}},
{"rename": {"field": "message2.deref_aliases", "target_field": "ldap_search.deref_aliases", "ignore_missing":true}},
{"rename": {"field": "message2.base_object", "target_field": "ldap.object", "ignore_missing":true}},
{"rename": {"field": "message2.result_count", "target_field": "ldap_search.result_count", "ignore_missing":true}},
{"rename": {"field": "message2.filter", "target_field": "ldap_search.filter", "ignore_missing":true}},
{"rename": {"field": "message2.attributes", "target_field": "ldap_search.attributes", "ignore_missing":true}},
{"script": {"source": "if (ctx.containsKey('ldap') && ctx.ldap.containsKey('diagnostic_message') && ctx.ldap.diagnostic_message != null) {\n String message = ctx.ldap.diagnostic_message;\n\n // get user and property from SASL success\n if (message.toLowerCase().contains(\"sasl(0): successful result\")) {\n Pattern pattern = /user:\\s*([^ ]+)\\s*property:\\s*([^ ]+)/i;\n Matcher matcher = pattern.matcher(message);\n if (matcher.find()) {\n ctx.ldap.user_email = matcher.group(1); // Extract user email\n ctx.ldap.property = matcher.group(2); // Extract property\n }\n }\n if (message.toLowerCase().contains(\"ldaperr:\")) {\n Pattern pattern = /comment:\\s*([^,]+)/i;\n Matcher matcher = pattern.matcher(message);\n\n if (matcher.find()) {\n ctx.ldap.comment = matcher.group(1);\n }\n }\n }","ignore_failure": true}},
{"script": {"source": "if (ctx.containsKey('ldap') && ctx.ldap.containsKey('object') && ctx.ldap.object != null) {\n String message = ctx.ldap.object;\n\n // parse common name from ldap object\n if (message.toLowerCase().contains(\"cn=\")) {\n Pattern pattern = /cn=([^,]+)/i;\n Matcher matcher = pattern.matcher(message);\n if (matcher.find()) {\n ctx.ldap.common_name = matcher.group(1); // Extract CN\n }\n }\n // build domain from ldap object\n if (message.toLowerCase().contains(\"dc=\")) {\n Pattern dcPattern = /dc=([^,]+)/i;\n Matcher dcMatcher = dcPattern.matcher(message);\n\n StringBuilder domainBuilder = new StringBuilder();\n while (dcMatcher.find()) {\n if (domainBuilder.length() > 0 ){\n domainBuilder.append(\".\");\n }\n domainBuilder.append(dcMatcher.group(1));\n }\n if (domainBuilder.length() > 0) {\n ctx.ldap.domain = domainBuilder.toString();\n }\n }\n // create list of any organizational units from ldap object\n if (message.toLowerCase().contains(\"ou=\")) {\n Pattern ouPattern = /ou=([^,]+)/i;\n Matcher ouMatcher = ouPattern.matcher(message);\n ctx.ldap.organizational_unit = [];\n\n while (ouMatcher.find()) {\n ctx.ldap.organizational_unit.add(ouMatcher.group(1));\n }\n if(ctx.ldap.organizational_unit.isEmpty()) {\n ctx.remove(\"ldap.organizational_unit\");\n }\n }\n}\n","ignore_failure": true}},
{"remove": {"field": "message2.tags","ignore_failure": true}},
{"remove": {"field": ["host"],"ignore_failure": true}},
{"pipeline": {"name": "zeek.common"}}
]
}

View File

@@ -0,0 +1,9 @@
{
"description":"zeek.ldap_search",
"processors":[
{"pipeline": {"name": "zeek.ldap", "ignore_missing_pipeline":true,"ignore_failure":true}},
{"set": {"field": "event.dataset", "value":"ldap_search"}},
{"remove": {"field": "tags", "ignore_missing":true}},
{"pipeline": {"name": "zeek.common"}}
]
}

View File

@@ -0,0 +1,18 @@
{
"description" : "zeek.quic",
"processors" : [
{ "set": { "field": "event.dataset", "value": "quic" } },
{ "set": { "field": "network.transport", "value": "udp" } },
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
{ "rename": { "field": "message2.version", "target_field": "quic.version", "ignore_missing": true } },
{ "rename": { "field": "message2.client_initial_dcid", "target_field": "quic.client_initial_dcid", "ignore_missing": true } },
{ "rename": { "field": "message2.client_scid", "target_field": "quic.client_scid", "ignore_missing": true } },
{ "rename": { "field": "message2.server_scid", "target_field": "quic.server_scid", "ignore_missing": true } },
{ "rename": { "field": "message2.server_name", "target_field": "quic.server_name", "ignore_missing": true } },
{ "rename": { "field": "message2.client_protocol", "target_field": "quic.client_protocol", "ignore_missing": true } },
{ "rename": { "field": "message2.history", "target_field": "quic.history", "ignore_missing": true } },
{ "remove": { "field": "message2.tags", "ignore_failure": true } },
{ "remove": { "field": ["host"], "ignore_failure": true } },
{ "pipeline": { "name": "zeek.common" } }
]
}

View File

@@ -491,6 +491,7 @@ elasticsearch:
so-logs-ti_cybersixgill_x_threat: *indexSettings
so-logs-ti_misp_x_threat: *indexSettings
so-logs-ti_misp_x_threat_attributes: *indexSettings
so-logs-ti_opencti_x_indicator: *indexSettings
so-logs-ti_otx_x_pulses_subscribed: *indexSettings
so-logs-ti_otx_x_threat: *indexSettings
so-logs-ti_recordedfuture_x_latest_ioc-template: *indexSettings
@@ -545,6 +546,7 @@ elasticsearch:
so-suricata_x_alerts: *indexSettings
so-import: *indexSettings
so-kratos: *indexSettings
so-hydra: *indexSettings
so-kismet: *indexSettings
so-logstash: *indexSettings
so-redis: *indexSettings

View File

@@ -603,6 +603,89 @@
}
}
},
"ipsec": {
"properties": {
"certificates": {
"ignore_above": 1024,
"type": "keyword"
},
"exchange_type": {
"type": "short"
},
"flag_a": {
"type": "boolean"
},
"flag_c": {
"type": "boolean"
},
"flag_e": {
"type": "boolean"
},
"flag_i": {
"type": "boolean"
},
"flag_r": {
"type": "boolean"
},
"flag_v": {
"type": "boolean"
},
"hash": {
"ignore_above": 1024,
"type": "keyword"
},
"initiator_spi": {
"ignore_above": 1024,
"type": "keyword"
},
"ke_dh_groups": {
"type": "short"
},
"length": {
"type": "long"
},
"maj_version": {
"type": "short"
},
"message_id": {
"type": "long"
},
"min_version": {
"type": "short"
},
"notify_messages": {
"ignore_above": 1024,
"type": "keyword"
},
"proposals": {
"type": "long"
},
"responder_spi": {
"ignore_above": 1024,
"type": "keyword"
},
"situation": {
"ignore_above": 1024,
"type": "keyword"
},
"transform_attributes": {
"ignore_above": 1024,
"type": "keyword"
},
"transforms": {
"ignore_above": 1024,
"type": "keyword"
},
"vendor_ids": {
"ignore_above": 1024,
"type": "keyword"
},
"version": {
"ignore_above": 1024,
"type": "keyword"
}
}
},
"irc": {
"properties": {
"addl": {
@@ -751,6 +834,81 @@
}
}
},
"ldap": {
"type": "object",
"properties": {
"message_id": {
"type": "short"
},
"opcode": {
"ignore_above": 1024,
"type": "keyword"
},
"result": {
"ignore_above": 1024,
"type": "keyword"
},
"diagnostic_message": {
"ignore_above": 1024,
"type": "keyword"
},
"version": {
"type": "short"
},
"object": {
"ignore_above": 1024,
"type": "keyword"
},
"argument": {
"ignore_above": 1024,
"type": "keyword"
},
"user_email": {
"ignore_above": 1024,
"type": "keyword"
},
"property": {
"ignore_above": 1024,
"type": "keyword"
},
"common_name": {
"ignore_above": 1024,
"type": "keyword"
},
"organizational_unit": {
"ignore_above": 1024,
"type": "keyword"
},
"domain": {
"ignore_above": 1024,
"type": "keyword"
}
}
},
"ldap_search": {
"type": "object",
"properties": {
"scope": {
"ignore_above": 1024,
"type": "keyword"
},
"deref_aliases": {
"ignore_above": 1024,
"type": "keyword"
},
"result_count": {
"type": "long"
},
"filter": {
"ignore_above": 1024,
"type": "keyword"
},
"attributes": {
"ignore_above": 1024,
"type": "keyword"
}
}
},
"modbus": {
"properties": {
"exception": {
@@ -1089,6 +1247,38 @@
}
}
},
"quic": {
"type": "object",
"properties": {
"server_name": {
"ignore_above": 1024,
"type": "keyword"
},
"version": {
"type": "short"
},
"client_initial_dcid": {
"ignore_above": 1024,
"type": "keyword"
},
"client_scid": {
"ignore_above": 1024,
"type": "keyword"
},
"server_scid": {
"ignore_above": 1024,
"type": "keyword"
},
"client_protocol": {
"ignore_above": 1024,
"type": "keyword"
},
"history": {
"ignore_above": 1024,
"type": "keyword"
}
}
},
"radius": {
"properties": {
"connect_info": {

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -0,0 +1,36 @@
{
"template": {
"mappings": {
"properties": {
"host": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"related": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"destination": {
"properties":{
"ip": {
"type": "ip"
}
}
},
"source": {
"properties":{
"ip": {
"type": "ip"
}
}
}
}
}
}
}

View File

@@ -9,6 +9,7 @@
'so-influxdb',
'so-kibana',
'so-kratos',
'so-hydra',
'so-nginx',
'so-redis',
'so-soc',
@@ -30,6 +31,7 @@
'so-kafka',
'so-kibana',
'so-kratos',
'so-hydra',
'so-logstash',
'so-nginx',
'so-redis',
@@ -73,6 +75,7 @@
'so-influxdb',
'so-kibana',
'so-kratos',
'so-hydra',
'so-nginx',
'so-soc'
] %}

51
salt/hydra/config.sls Normal file
View File

@@ -0,0 +1,51 @@
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
{% from 'allowed_states.map.jinja' import allowed_states %}
{% if sls.split('.')[0] in allowed_states %}
{% from "hydra/map.jinja" import HYDRAMERGED %}
hydradir:
file.directory:
- name: /nsm/hydra
- user: 928
- group: 928
- mode: 700
- makedirs: True
hydradbdir:
file.directory:
- name: /nsm/hydra/db
- user: 928
- group: 928
- mode: 700
- makedirs: True
hydralogdir:
file.directory:
- name: /opt/so/log/hydra
- user: 928
- group: 928
- makedirs: True
hydraconfig:
file.managed:
- name: /opt/so/conf/hydra/hydra.yaml
- source: salt://hydra/files/hydra.yaml.jinja
- user: 928
- group: 928
- mode: 600
- template: jinja
- makedirs: True
- defaults:
HYDRAMERGED: {{ HYDRAMERGED }}
{% else %}
{{sls}}_state_not_allowed:
test.fail_without_changes:
- name: {{sls}}_state_not_allowed
{% endif %}

30
salt/hydra/defaults.yaml Normal file
View File

@@ -0,0 +1,30 @@
hydra:
enabled: False
config:
serve:
public:
port: 4444
admin:
port: 4445
urls:
self:
issuer: https://URL_BASE/connect
public: https://URL_BASE/connect
admin: http://localhost:4445
secrets:
system: []
ttl:
access_token: 1h
oidc:
subject_identifiers:
supported_types:
- pairwise
- public
pairwise:
salt: ""
log:
level: debug
format: json
sqa:
opt_out: true

27
salt/hydra/disabled.sls Normal file
View File

@@ -0,0 +1,27 @@
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
{% from 'allowed_states.map.jinja' import allowed_states %}
{% if sls.split('.')[0] in allowed_states %}
include:
- hydra.sostatus
so-hydra:
docker_container.absent:
- force: True
so-hydra_so-status.disabled:
file.comment:
- name: /opt/so/conf/so-status/so-status.conf
- regex: ^so-hydra$
{% else %}
{{sls}}_state_not_allowed:
test.fail_without_changes:
- name: {{sls}}_state_not_allowed
{% endif %}

103
salt/hydra/enabled.sls Normal file
View File

@@ -0,0 +1,103 @@
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
#
# Note: Per the Elastic License 2.0, the second limitation states:
#
# "You may not move, change, disable, or circumvent the license key functionality
# in the software, and you may not remove or obscure any functionality in the
# software that is protected by the license key."
{% from 'allowed_states.map.jinja' import allowed_states %}
{% if sls.split('.')[0] in allowed_states %}
{% from 'docker/docker.map.jinja' import DOCKER %}
{% from 'vars/globals.map.jinja' import GLOBALS %}
{% if 'api' in salt['pillar.get']('features', []) %}
include:
- hydra.config
- hydra.sostatus
so-hydra:
docker_container.running:
- image: {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-hydra:{{ GLOBALS.so_version }}
- hostname: hydra
- name: so-hydra
- networks:
- sobridge:
- ipv4_address: {{ DOCKER.containers['so-hydra'].ip }}
- binds:
- /opt/so/conf/hydra/:/hydra-conf:ro
- /opt/so/log/hydra/:/hydra-log:rw
- /nsm/hydra/db:/hydra-data:rw
{% if DOCKER.containers['so-hydra'].custom_bind_mounts %}
{% for BIND in DOCKER.containers['so-hydra'].custom_bind_mounts %}
- {{ BIND }}
{% endfor %}
{% endif %}
- port_bindings:
{% for BINDING in DOCKER.containers['so-hydra'].port_bindings %}
- {{ BINDING }}
{% endfor %}
{% if DOCKER.containers['so-hydra'].extra_hosts %}
- extra_hosts:
{% for XTRAHOST in DOCKER.containers['so-hydra'].extra_hosts %}
- {{ XTRAHOST }}
{% endfor %}
{% endif %}
{% if DOCKER.containers['so-hydra'].extra_env %}
- environment:
{% for XTRAENV in DOCKER.containers['so-hydra'].extra_env %}
- {{ XTRAENV }}
{% endfor %}
{% endif %}
- restart_policy: unless-stopped
- watch:
- file: hydraconfig
- require:
- file: hydraconfig
- file: hydralogdir
- file: hydradir
delete_so-hydra_so-status.disabled:
file.uncomment:
- name: /opt/so/conf/so-status/so-status.conf
- regex: ^so-hydra$
wait_for_hydra:
http.wait_for_successful_query:
- name: 'http://{{ GLOBALS.manager }}:4444/'
- ssl: True
- verify_ssl: False
- status:
- 200
- 301
- 302
- 404
- status_type: list
- wait_for: 300
- request_interval: 10
- require:
- docker_container: so-hydra
{% else %}
{{sls}}_no_license_detected:
test.fail_without_changes:
- name: {{sls}}_no_license_detected
- comment:
- "This is a feature supported only for customers with a valid license.
Contact Security Onion Solutions, LLC via our website at https://securityonionsolutions.com
for more information about purchasing a license to enable this feature."
include:
- hydra.disabled
{% endif %}
{% else %}
{{sls}}_state_not_allowed:
test.fail_without_changes:
- name: {{sls}}_state_not_allowed
{% endif %}

View File

@@ -0,0 +1 @@
{{ HYDRAMERGED.config | yaml(false) }}

13
salt/hydra/init.sls Normal file
View File

@@ -0,0 +1,13 @@
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
{% from 'hydra/map.jinja' import HYDRAMERGED %}
include:
{% if HYDRAMERGED.enabled %}
- hydra.enabled
{% else %}
- hydra.disabled
{% endif %}

13
salt/hydra/map.jinja Normal file
View File

@@ -0,0 +1,13 @@
{# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
https://securityonion.net/license; you may not use this file except in compliance with the
Elastic License 2.0. #}
{% from 'vars/globals.map.jinja' import GLOBALS %}
{% import_yaml 'hydra/defaults.yaml' as HYDRADEFAULTS %}
{% do HYDRADEFAULTS.hydra.config.urls.self.update({'issuer': HYDRADEFAULTS.hydra.config.urls.self.issuer | replace("URL_BASE", GLOBALS.url_base)}) %}
{% do HYDRADEFAULTS.hydra.config.urls.self.update({'public': HYDRADEFAULTS.hydra.config.urls.self.public | replace("URL_BASE", GLOBALS.url_base)}) %}
{% do HYDRADEFAULTS.hydra.config.urls.self.update({'admin': HYDRADEFAULTS.hydra.config.urls.self.admin | replace("URL_BASE", GLOBALS.url_base)}) %}
{% set HYDRAMERGED = salt['pillar.get']('hydra', default=HYDRADEFAULTS.hydra, merge=true) %}

28
salt/hydra/soc_hydra.yaml Normal file
View File

@@ -0,0 +1,28 @@
hydra:
enabled:
description: Enables or disables the API authentication system, used for service account authentication. Enabling this feature requires a valid Security Onion license key. Defaults to False.
helpLink: connect.html
config:
ttl:
access_token:
description: Amount of time that the generated access token will be valid. Specified in the form of 2h, which means 2 hours.
global: True
forcedType: string
helpLink: connect.html
log:
level:
description: Log level to use for Kratos logs.
global: True
helpLink: connect.html
format:
description: Log output format for Kratos logs.
global: True
helpLink: connect.html
secrets:
system:
description: Secrets used for token generation. Generated during installation.
global: True
sensitive: True
advanced: True
forcedType: "[]string"
helpLink: connect.html

21
salt/hydra/sostatus.sls Normal file
View File

@@ -0,0 +1,21 @@
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
{% from 'allowed_states.map.jinja' import allowed_states %}
{% if sls.split('.')[0] in allowed_states %}
append_so-hydra_so-status.conf:
file.append:
- name: /opt/so/conf/so-status/so-status.conf
- text: so-hydra
- unless: grep -q so-hydra /opt/so/conf/so-status/so-status.conf
{% else %}
{{sls}}_state_not_allowed:
test.fail_without_changes:
- name: {{sls}}_state_not_allowed
{% endif %}

File diff suppressed because one or more lines are too long

View File

@@ -50,6 +50,10 @@ kratos:
ui_url: https://URL_BASE/login/
registration:
ui_url: https://URL_BASE/login/
after:
oidc:
hooks:
- hook: session
default_browser_return_url: https://URL_BASE/
allowed_return_urls:
- http://127.0.0.1

View File

@@ -126,7 +126,6 @@ kratos:
issuer:
description: The name to show in the MFA authenticator app. Useful for differentiating between installations that share the same user email address.
global: True
advanced: True
helpLink: kratos.html
webauthn:
enabled:

View File

@@ -40,6 +40,16 @@ logrotate:
- extension .log
- dateext
- dateyesterday
/opt/so/log/hydra/*_x_log:
- daily
- rotate 14
- missingok
- copytruncate
- compress
- create
- extension .log
- dateext
- dateyesterday
/opt/so/log/kibana/*_x_log:
- daily
- rotate 14

View File

@@ -28,6 +28,13 @@ logrotate:
multiline: True
global: True
forcedType: "[]string"
"/opt/so/log/hydra/*_x_log":
description: List of logrotate options for this file.
title: /opt/so/log/hydra/*.log
advanced: True
multiline: True
global: True
forcedType: "[]string"
"/opt/so/log/kibana/*_x_log":
description: List of logrotate options for this file.
title: /opt/so/log/kibana/*.log

View File

@@ -6,6 +6,10 @@ so-user.lock:
file.missing:
- name: /var/tmp/so-user.lock
so-client.lock:
file.missing:
- name: /var/tmp/so-client.lock
# Must run before elasticsearch docker container is started!
sync_es_users:
cmd.run:

411
salt/manager/tools/sbin/so-client Executable file
View File

@@ -0,0 +1,411 @@
#!/bin/bash
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
if [[ -f /usr/sbin/so-common ]]; then
source /usr/sbin/so-common
else
source $(dirname $0)/../../../common/tools/sbin/so-common
fi
function usage() {
cat <<USAGE_EOF
Usage: $0 <operation> [supporting parameters]
where <operation> is one of the following:
list: Lists all client IDs and permissions currently defined in the oauth2 system
add: Adds a new client to the oauth2 system and outputs the generated secret
Required parameters:
--name <name>
Optional parameters:
--note <note> (defaults to blank)
--json output as JSON
delete: Deletes a client from the oauth2 system
Required parameters:
--id <id>
addperm: Grants a permission to an existing client
Required parameters:
--id <id>
--permission <permission>
delperm: Removes a permission from an existing client
Required parameters:
--id <id>
--permission <permission>
update: Updates a client name and note.
Required parameters:
--id <id>
--name <name>
--note <note>
--searchusername <run-as username>
generate-secret: Regenerates a client's secret and outputs the new secret.
Required parameters:
--id <id>
Optional parameters:
--json output as JSON
USAGE_EOF
exit 1
}
if [[ $# -lt 1 || $1 == --help || $1 == -h || $1 == -? || $1 == --h ]]; then
usage
fi
operation=$1
shift
searchUsername=__MISSING__
note=__MISSING__
while [[ $# -gt 0 ]]; do
param=$1
shift
case "$param" in
--id)
id=$(echo $1 | sed 's/"/\\"/g')
[[ ${#id} -gt 55 ]] && fail "id cannot be longer than 55 characters"
shift
;;
--permission)
perm=$(echo $1 | sed 's/"/\\"/g')
[[ ${#perm} -gt 50 ]] && fail "permission cannot be longer than 50 characters"
shift
;;
--name)
name=$(echo $1 | sed 's/"/\\"/g')
[[ ${#name} -gt 50 ]] && fail "name cannot be longer than 50 characters"
shift
;;
--note)
note=$(echo $1 | sed 's/"/\\"/g')
[[ ${#note} -gt 100 ]] && fail "note cannot be longer than 100 characters"
shift
;;
--searchusername)
searchUsername=$(echo $1 | sed 's/"/\\"/g')
[[ ${#searchUsername} -gt 50 ]] && fail "search username cannot be longer than 50 characters"
shift
;;
--json)
json=1
;;
*)
echo "Encountered unexpected parameter: $param"
usage
;;
esac
done
hydraUrl=${HYDRA_URL:-http://127.0.0.1:4445}
socRolesFile=${SOC_ROLES_FILE:-/opt/so/conf/soc/soc_clients_roles}
soUID=${SOCORE_UID:-939}
soGID=${SOCORE_GID:-939}
function lock() {
# Obtain file descriptor lock
exec 99>/var/tmp/so-client.lock || fail "Unable to create lock descriptor; if the system was not shutdown gracefully you may need to remove /var/tmp/so-client.lock manually."
flock -w 10 99 || fail "Another process is using so-client; if the system was not shutdown gracefully you may need to remove /var/tmp/so-client.lock manually."
trap 'rm -f /var/tmp/so-client.lock' EXIT
}
function fail() {
msg=$1
echo "$1"
exit 1
}
function require() {
cmd=$1
which "$1" 2>&1 > /dev/null
[[ $? != 0 ]] && fail "This script requires the following command be installed: ${cmd}"
}
# Verify this environment is capable of running this script
function verifyEnvironment() {
require "jq"
require "curl"
response=$(curl -Ss -L ${hydraUrl}/)
[[ "$response" != *"Error 404"* ]] && fail "Unable to communicate with Hydra; specify URL via HYDRA_URL environment variable"
}
function createFile() {
filename=$1
uid=$2
gid=$3
mkdir -p $(dirname "$filename")
truncate -s 0 "$filename"
chmod 600 "$filename"
chown "${uid}:${gid}" "$filename"
}
function ensureRoleFileExists() {
if [[ ! -f "$socRolesFile" ]]; then
# Generate the new roles file
rolesTmpFile="${socRolesFile}.tmp"
createFile "$rolesTmpFile" "$soUID" "$soGID"
if [[ -d "$socRolesFile" ]]; then
echo "Removing invalid roles directory created by Docker"
rm -fr "$socRolesFile"
fi
mv "${rolesTmpFile}" "${socRolesFile}"
fi
}
function listClients() {
response=$(curl -Ss -L -f ${hydraUrl}/admin/clients)
[[ $? != 0 ]] && fail "Unable to communicate with Hydra"
clientIds=$(echo "${response}" | jq -r ".[] | .client_id" | sort)
for clientId in $clientIds; do
perms=$(grep ":$clientId\$" "$socRolesFile" | cut -d: -f1 | tr '\n' ' ')
echo "$clientId: $perms"
done
}
function addClientPermission() {
id=$1
perm=$2
adjustClientPermission "$id" "$perm" "add"
}
function deleteClientPermission() {
id=$1
perm=$2
adjustClientPermission "$id" "$perm" "del"
}
function adjustClientPermission() {
identityId=$1
perm=$2
op=$3
[[ ${identityId} == "" ]] && fail "Client not found"
ensureRoleFileExists
filename="$socRolesFile"
hasPerm=0
grep "^$perm:" "$socRolesFile" | grep -q "$identityId" && hasPerm=1
if [[ "$op" == "add" ]]; then
if [[ "$hasPerm" == "1" ]]; then
echo "Client '$identityId' already has the permission: $perm"
return 1
else
echo "$perm:$identityId" >> "$filename"
fi
elif [[ "$op" == "del" ]]; then
if [[ "$hasPerm" -ne 1 ]]; then
fail "Client '$identityId' does not have the permission: $perm"
else
sed -e "\!^$perm:$identityId\$!d" "$filename" > "$filename.tmp"
cat "$filename".tmp > "$filename"
rm -f "$filename".tmp
fi
else
fail "Unsupported permission adjustment operation: $op"
fi
return 0
}
function convertNameToId() {
name=$1
name=${name//[^[:alnum:]]/_}
echo "socl_$name" | tr '[:upper:]' '[:lower:]'
}
function createClient() {
name=$1
note=$2
id=$(convertNameToId "$name")
now=$(date -u +%FT%TZ)
secret=$(get_random_value)
body=$(cat <<EOF
{
"access_token_strategy": "opaque",
"client_id": "$id",
"client_secret": "$secret",
"client_name": "$name",
"grant_types": [ "client_credentials" ],
"response_types": [ "code" ],
"metadata": {
"note": "$note",
"searchUsername": ""
}
}
EOF
)
response=$(curl -Ss -L --fail-with-body -X POST ${hydraUrl}/admin/clients -d "$body")
if [[ $? != 0 ]]; then
error=$(echo $response | jq .error)
fail "Failed to submit request to Hydra: $error"
fi
}
function update() {
clientId=$1
name=$2
note=$3
username=$4
body=$(cat <<EOF
[
{
"op": "replace",
"path": "/client_name",
"value": "$name"
},
{
"op": "replace",
"path": "/metadata",
"value": {
"note": "$note",
"searchUsername": "$username"
}
}
]
EOF
)
response=$(curl -Ss -L --fail-with-body -X PATCH ${hydraUrl}/admin/clients/$id -d "$body")
if [[ $? != 0 ]]; then
error=$(echo $response | jq .error)
fail "Failed to submit request to Hydra: $error"
fi
}
function generateSecret() {
clientId=$1
secret=$(get_random_value)
body=$(cat <<EOF
[
{
"op": "replace",
"path": "/client_secret",
"value": "$secret"
}
]
EOF
)
response=$(curl -Ss -L --fail-with-body -X PATCH ${hydraUrl}/admin/clients/$id -d "$body")
if [[ $? != 0 ]]; then
error=$(echo $response | jq .error)
fail "Failed to submit request to Hydra: $error"
fi
}
function deleteClient() {
identityId=$1
[[ ${identityId} == "" ]] && fail "Client not found"
response=$(curl -Ss -XDELETE -L --fail-with-body "${hydraUrl}/admin/clients/$identityId")
if [[ $? != 0 ]]; then
error=$(echo $response | jq .error)
fail "Failed to submit request to Hydra: $error"
fi
rolesTmpFile="${socRolesFile}.tmp"
createFile "$rolesTmpFile" "$soUID" "$soGID"
grep -v "$identityId" "$socRolesFile" > "$rolesTmpFile"
cat "$rolesTmpFile" > "$socRolesFile"
}
case "${operation}" in
"add")
verifyEnvironment
[[ "$name" == "" ]] && fail "A short client name must be provided"
lock
createClient "$name" "$note"
if [[ "$json" == "1" ]]; then
echo "{\"id\":\"$id\",\"secret\":\"$secret\"}"
else
echo "Successfully added client ID $id with generated secret: $secret"
fi
;;
"list")
verifyEnvironment
listClients
;;
"addperm")
verifyEnvironment
[[ "$id" == "" ]] && fail "Id must be provided"
[[ "$perm" == "" ]] && fail "Permission must be provided"
lock
if addClientPermission "$id" "$perm"; then
echo "Successfully added permission to client"
fi
;;
"delperm")
verifyEnvironment
[[ "$id" == "" ]] && fail "Id must be provided"
[[ "$perm" == "" ]] && fail "Permission must be provided"
lock
deleteClientPermission "$id" "$perm"
echo "Successfully removed permission from client"
;;
"update")
verifyEnvironment
[[ "$id" == "" ]] && fail "Id must be provided"
[[ "$name" == "" ]] && fail "Name must be provided"
[[ "$note" == "__MISSING__" ]] && fail "Note must be provided"
[[ "$searchUsername" == "__MISSING__" ]] && fail "Search Username must be provided"
lock
update "$id" "$name" "$note" "$searchUsername"
echo "Successfully updated client"
;;
"generate-secret")
verifyEnvironment
[[ "$id" == "" ]] && fail "Id must be provided"
lock
generateSecret "$id"
if [[ "$json" == "1" ]]; then
echo "{\"secret\":\"$secret\"}"
else
echo "Successfully generated secret: $secret"
fi
;;
"delete")
verifyEnvironment
[[ "$id" == "" ]] && fail "Id must be provided"
lock
deleteClient "$id"
echo "Successfully deleted client."
;;
*)
fail "Unsupported operation: $operation"
usage
;;
esac
exit 0

View File

@@ -100,23 +100,23 @@ while [[ $# -gt 0 ]]; do
shift
case "$param" in
--email)
email=$1
email=$(echo $1 | sed 's/"/\\"/g')
shift
;;
--role)
role=$1
role=$(echo $1 | sed 's/"/\\"/g')
shift
;;
--firstName)
firstName=$1
firstName=$(echo $1 | sed 's/"/\\"/g')
shift
;;
--lastName)
lastName=$1
lastName=$(echo $1 | sed 's/"/\\"/g')
shift
;;
--note)
note=$1
note=$(echo $1 | sed 's/"/\\"/g')
shift
;;
--skip-sync)
@@ -241,6 +241,10 @@ function updatePassword() {
[[ $? != 0 ]] && fail "Unable to clear aal2 identity IDs"
echo "delete from identity_credentials where identity_id='${identityId}' and identity_credential_type_id in (select id from identity_credential_types where name in ('totp', 'webauthn', 'oidc'));" | sqlite3 -cmd ".timeout ${databaseTimeout}" "$databasePath"
[[ $? != 0 ]] && fail "Unable to clear aal2 identity credentials"
echo "delete from session_devices where session_id in (select id from sessions where identity_id='${identityId}');" | sqlite3 -cmd ".timeout ${databaseTimeout}" "$databasePath"
[[ $? != 0 ]] && fail "Unable to clear session devices"
echo "delete from sessions where identity_id='${identityId}';" | sqlite3 -cmd ".timeout ${databaseTimeout}" "$databasePath"
[[ $? != 0 ]] && fail "Unable to clear sessions"
echo "update identities set available_aal='aal1' where id='${identityId}';" | sqlite3 -cmd ".timeout ${databaseTimeout}" "$databasePath"
[[ $? != 0 ]] && fail "Unable to reset aal"
fi
@@ -357,7 +361,6 @@ function syncElastic() {
random_crypt=$(get_random_value 53)
user_data_formatted=$(echo "${user_data_formatted}" | sed -r "s/^(.+:)\$/\\1\$2a\$12${random_crypt}/")
fi
echo "${user_data_formatted}" >> "$usersTmpFile"
# Append the user roles
@@ -373,7 +376,6 @@ function syncElastic() {
sqlite3 -cmd ".timeout ${databaseTimeout}" "$databasePath" >> "$rolesTmpFile"
[[ $? != 0 ]] && fail "Unable to read role identities from database"
done < "$socRolesFile"
else
echo "Database file or soc roles file does not exist yet, skipping users export"
fi

View File

@@ -404,7 +404,8 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.80 ]] && up_to_2.4.90
[[ "$INSTALLEDVERSION" == 2.4.90 ]] && up_to_2.4.100
[[ "$INSTALLEDVERSION" == 2.4.100 ]] && up_to_2.4.110
[[ "$INSTALLEDVERSION" == 2.4.110 ]] && up_to_2.4.120
[[ "$INSTALLEDVERSION" == 2.4.110 ]] && up_to_2.4.111
[[ "$INSTALLEDVERSION" == 2.4.111 ]] && up_to_2.4.120
true
}
@@ -426,7 +427,8 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.80 ]] && post_to_2.4.90
[[ "$POSTVERSION" == 2.4.90 ]] && post_to_2.4.100
[[ "$POSTVERSION" == 2.4.100 ]] && post_to_2.4.110
[[ "$POSTVERSION" == 2.4.110 ]] && post_to_2.4.120
[[ "$POSTVERSION" == 2.4.110 ]] && post_to_2.4.111
[[ "$POSTVERSION" == 2.4.111 ]] && post_to_2.4.120
true
}
@@ -519,8 +521,17 @@ post_to_2.4.110() {
POSTVERSION=2.4.110
}
post_to_2.4.120() {
post_to_2.4.111() {
echo "Nothing to apply"
POSTVERSION=2.4.111
}
post_to_2.4.120() {
update_elasticsearch_index_settings
# Manually rollover suricata alerts index to ensure data_stream.dataset expected mapping is set to 'suricata'
rollover_index "logs-suricata.alerts-so"
POSTVERSION=2.4.120
}
@@ -714,9 +725,17 @@ up_to_2.4.110() {
INSTALLEDVERSION=2.4.110
}
up_to_2.4.111() {
echo "Nothing to do for 2.4.111"
INSTALLEDVERSION=2.4.111
}
up_to_2.4.120() {
add_hydra_pillars
# this is needed for the new versionlock state
mkdir /opt/so/saltstack/local/pillar/versionlock
mkdir -p /opt/so/saltstack/local/pillar/versionlock
touch /opt/so/saltstack/local/pillar/versionlock/adv_versionlock.sls /opt/so/saltstack/local/pillar/versionlock/soc_versionlock.sls
# New Grid Integration added this release
@@ -725,6 +744,26 @@ up_to_2.4.120() {
INSTALLEDVERSION=2.4.120
}
add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
chmod 660 /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
touch /opt/so/saltstack/local/pillar/hydra/adv_hydra.sls
HYDRAKEY=$(get_random_value)
HYDRASALT=$(get_random_value)
printf '%s\n'\
"hydra:"\
" config:"\
" secrets:"\
" system:"\
" - '$HYDRAKEY'"\
" oidc:"\
" subject_identifiers:"\
" pairwise:"\
" salt: '$HYDRASALT'"\
"" > /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
}
add_detection_test_pillars() {
if [[ -n "$SOUP_INTERNAL_TESTING" ]]; then
echo "Adding detection pillar values for automated testing"
@@ -771,6 +810,22 @@ ASSIST_EOF
fi
}
rollover_index() {
idx=$1
exists=$(so-elasticsearch-query $idx -o /dev/null -w "%{http_code}")
if [[ $exists -eq 200 ]]; then
rollover=$(so-elasticsearch-query $idx/_rollover -o /dev/null -w "%{http_code}" -XPOST)
if [[ $rollover -eq 200 ]]; then
echo "Successfully triggered rollover for $idx..."
else
echo "Could not trigger rollover for $idx..."
fi
else
echo "Could not find index $idx..."
fi
}
suricata_idstools_migration() {
#Backup the pillars for idstools
mkdir -p /nsm/backup/detections-migration/idstools
@@ -922,7 +977,7 @@ update_airgap_rules() {
rsync -av $UPDATE_DIR/agrules/detect-sigma/* /nsm/rules/detect-sigma/
rsync -av $UPDATE_DIR/agrules/detect-yara/* /nsm/rules/detect-yara/
# Copy the securityonion-resorces repo over for SOC Detection Summaries and checkout the published summaries branch
rsync -av --chown=socore:socore $UPDATE_DIR/agrules/securityonion-resources /opt/so/conf/soc/ai_summary_repos
rsync -av --delete --chown=socore:socore $UPDATE_DIR/agrules/securityonion-resources /opt/so/conf/soc/ai_summary_repos
git config --global --add safe.directory /opt/so/conf/soc/ai_summary_repos/securityonion-resources
git -C /opt/so/conf/soc/ai_summary_repos/securityonion-resources checkout generated-summaries-published
# Copy the securityonion-resorces repo over to nsm
@@ -938,6 +993,30 @@ update_airgap_repo() {
createrepo /nsm/repo
}
update_elasticsearch_index_settings() {
# Update managed indices to reflect latest index template
for idx in "so-detection" "so-detectionhistory" "so-case" "so-casehistory"; do
ilm_name=$idx
if [ "$idx" = "so-detectionhistory" ]; then
ilm_name="so-detection"
elif [ "$idx" = "so-casehistory" ]; then
ilm_name="so-case"
fi
JSON_STRING=$( jq -n --arg ILM_NAME "$ilm_name" '{"settings": {"index.auto_expand_replicas":"0-2","index.lifecycle.name":($ILM_NAME + "-logs")}}')
echo "Checking if index \"$idx\" exists"
exists=$(curl -K /opt/so/conf/elasticsearch/curl.config -s -o /dev/null -w "%{http_code}" -k -L -H "Content-Type: application/json" "https://localhost:9200/$idx")
if [ $exists -eq 200 ]; then
echo "$idx index found..."
echo "Updating $idx index settings"
curl -K /opt/so/conf/elasticsearch/curl.config -s -k -L -H "Content-Type: application/json" "https://localhost:9200/$idx/_settings" -d "$JSON_STRING" -XPUT
echo -e "\n"
else
echo -e "Skipping $idx... index does not exist\n"
fi
done
}
update_salt_mine() {
echo "Populating the mine with mine_functions for each host."
set +e
@@ -1003,12 +1082,12 @@ upgrade_salt() {
# if oracle run with -r to ignore repos set by bootstrap
if [[ $OS == 'oracle' ]]; then
run_check_net_err \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -r -F -M -x python3 stable \"$NEWSALTVERSION\"" \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -r -F -M stable \"$NEWSALTVERSION\"" \
"Could not update salt, please check $SOUP_LOG for details."
# if another rhel family variant we want to run without -r to allow the bootstrap script to manage repos
else
run_check_net_err \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -F -M -x python3 stable \"$NEWSALTVERSION\"" \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -F -M stable \"$NEWSALTVERSION\"" \
"Could not update salt, please check $SOUP_LOG for details."
fi
set -e
@@ -1028,7 +1107,7 @@ upgrade_salt() {
echo ""
set +e
run_check_net_err \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -F -M -x python3 stable \"$NEWSALTVERSION\"" \
"sh $UPDATE_DIR/salt/salt/scripts/bootstrap-salt.sh -X -F -M stable \"$NEWSALTVERSION\"" \
"Could not update salt, please check $SOUP_LOG for details."
set -e
echo "Applying apt hold for Salt."
@@ -1334,6 +1413,10 @@ main() {
echo "Running a highstate to complete the Security Onion upgrade on this manager. This could take several minutes."
(wait_for_salt_minion "$MINIONID" "5" '/dev/stdout' || fail "Salt minion was not running or ready.") 2>&1 | tee -a "$SOUP_LOG"
# Stop long-running scripts to allow potentially updated scripts to load on the next execution.
killall salt-relay.sh
highstate
postupgrade_changes
[[ $is_airgap -eq 0 ]] && unmount_update

View File

@@ -49,33 +49,12 @@ navigatorconfig:
- makedirs: True
- template: jinja
navigatordefaultlayer:
file.managed:
- name: /opt/so/conf/navigator/layers/nav_layer_playbook.json
- source: salt://nginx/files/nav_layer_playbook.json
navigatorlayersdir:
file.directory:
- name: /opt/so/conf/navigator/layers/
- user: 939
- group: 939
- makedirs: True
- replace: False
- template: jinja
navigatorpreattack:
file.managed:
- name: /opt/so/conf/navigator/layers/pre-attack.json
- source: salt://nginx/files/pre-attack.json
- user: 939
- group: 939
- makedirs: True
- replace: False
navigatorenterpriseattack:
file.managed:
- name: /opt/so/conf/navigator/layers/enterprise-attack.json
- source: salt://nginx/files/enterprise-attack.json
- user: 939
- group: 939
- makedirs: True
- replace: False
nginx_sbin:
file.recurse:

View File

@@ -164,7 +164,6 @@ so-nginx:
- x509: managerssl_crt
{% endif%}
- file: navigatorconfig
- file: navigatordefaultlayer
{% endif %}
delete_so-nginx_so-status.disabled:

View File

@@ -89,11 +89,18 @@ http {
server_name _;
return 307 https://{{ GLOBALS.url_base }}$request_uri;
add_header Content-Security-Policy "default-src 'self' 'unsafe-inline' 'unsafe-eval' https: data: blob: wss:; frame-ancestors 'self'";
add_header X-Frame-Options SAMEORIGIN;
add_header X-XSS-Protection "1; mode=block";
add_header X-Content-Type-Options nosniff;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
add_header referrer-Policy no-referrer;
ssl_certificate "/etc/pki/nginx/server.crt";
ssl_certificate_key "/etc/pki/nginx/server.key";
ssl_session_cache shared:SSL:1m;
ssl_session_timeout 10m;
ssl_ciphers HIGH:!aNULL:!MD5;
ssl_ciphers TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256:TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256:TLS_RSA_WITH_AES_256_GCM_SHA384:TLS_RSA_WITH_AES_256_CCM:TLS_RSA_WITH_ARIA_256_GCM_SHA384:TLS_RSA_WITH_AES_128_GCM_SHA256:TLS_RSA_WITH_AES_128_CCM:TLS_RSA_WITH_ARIA_128_GCM_SHA256;
ssl_ecdh_curve secp521r1:secp384r1;
ssl_prefer_server_ciphers on;
ssl_protocols TLSv1.2;
@@ -123,11 +130,19 @@ http {
http2 on;
server_name {{ GLOBALS.url_base }};
root /surirules;
add_header Content-Security-Policy "default-src 'self' 'unsafe-inline' 'unsafe-eval' https: data: blob: wss:; frame-ancestors 'self'";
add_header X-Frame-Options SAMEORIGIN;
add_header X-XSS-Protection "1; mode=block";
add_header X-Content-Type-Options nosniff;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
add_header referrer-Policy no-referrer;
ssl_certificate "/etc/pki/nginx/server.crt";
ssl_certificate_key "/etc/pki/nginx/server.key";
ssl_session_cache shared:SSL:1m;
ssl_session_timeout 10m;
ssl_ciphers HIGH:!aNULL:!MD5;
ssl_ciphers TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256:TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256:TLS_RSA_WITH_AES_256_GCM_SHA384:TLS_RSA_WITH_AES_256_CCM:TLS_RSA_WITH_ARIA_256_GCM_SHA384:TLS_RSA_WITH_AES_128_GCM_SHA256:TLS_RSA_WITH_AES_128_CCM:TLS_RSA_WITH_ARIA_128_GCM_SHA256;
ssl_prefer_server_ciphers on;
ssl_protocols TLSv1.2;
location / {
@@ -153,13 +168,14 @@ http {
add_header X-Frame-Options SAMEORIGIN;
add_header X-XSS-Protection "1; mode=block";
add_header X-Content-Type-Options nosniff;
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload";
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
add_header referrer-Policy no-referrer;
ssl_certificate "/etc/pki/nginx/server.crt";
ssl_certificate_key "/etc/pki/nginx/server.key";
ssl_session_cache shared:SSL:1m;
ssl_session_timeout 10m;
ssl_ciphers HIGH:!aNULL:!MD5;
ssl_ciphers TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256:TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384:TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256:TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256:TLS_RSA_WITH_AES_256_GCM_SHA384:TLS_RSA_WITH_AES_256_CCM:TLS_RSA_WITH_ARIA_256_GCM_SHA384:TLS_RSA_WITH_AES_128_GCM_SHA256:TLS_RSA_WITH_AES_128_CCM:TLS_RSA_WITH_ARIA_128_GCM_SHA256;
ssl_prefer_server_ciphers on;
ssl_protocols TLSv1.2;
@@ -219,6 +235,37 @@ http {
proxy_set_header X-Forwarded-Proto $scheme;
}
{% if 'api' in salt['pillar.get']('features', []) %}
location ~* (^/oauth2/token.*|^.well-known/jwks.json|^.well-known/openid-configuration) {
limit_req zone=auth_throttle burst={{ NGINXMERGED.config.throttle_login_burst }} nodelay;
limit_req_status 429;
proxy_pass http://{{ GLOBALS.manager }}:4444;
proxy_read_timeout 90;
proxy_connect_timeout 90;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
}
location /connect/ {
if ($http_authorization !~ "Bearer .*") {
return 401;
}
rewrite /connect/(.*) /api/$1 break;
proxy_pass http://{{ GLOBALS.manager }}:9822/;
proxy_read_timeout 300;
proxy_connect_timeout 300;
proxy_set_header x-user-id "";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
}
{%- endif %}
location /cyberchef/ {
auth_request /auth/sessions/whoami;
proxy_read_timeout 90;
@@ -329,6 +376,9 @@ http {
error_page 429 = @error429;
location @error401 {
if ($request_uri ~* (^/connect/.*|^/oauth2/.*)) {
return 401;
}
if ($request_uri ~* ^/(?!(^/api/.*))) {
add_header Set-Cookie "AUTH_REDIRECT=$request_uri;Path=/;Max-Age=14400";
}
@@ -336,6 +386,9 @@ http {
}
location @error403 {
if ($request_uri ~* (^/connect/.*|^/oauth2/.*)) {
return 403;
}
add_header Set-Cookie "ory_kratos_session=;Path=/;Max-Age=0;expires=Thu, 01 Jan 1970 00:00:00 GMT;";
return 302 /auth/self-service/login/browser;
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,65 +0,0 @@
{
"name": "Playbook Coverage",
"versions": {
"attack": "14",
"navigator": "4.9.1",
"layer": "4.5"
},
"domain": "enterprise-attack",
"description": "",
"filters": {
"platforms": [
"Linux",
"macOS",
"Windows",
"Network",
"PRE",
"Containers",
"Office 365",
"SaaS",
"Google Workspace",
"IaaS",
"Azure AD"
]
},
"sorting": 0,
"layout": {
"layout": "side",
"aggregateFunction": "average",
"showID": false,
"showName": true,
"showAggregateScores": false,
"countUnscored": false,
"expandedSubtechniques": "none"
},
"hideDisabled": false,
"techniques": [
{
"techniqueID": "T1197",
"tactic": "defense-evasion",
"score": 100,
"color": "",
"comment": "",
"enabled": true,
"metadata": [],
"links": [],
"showSubtechniques": false
}
],
"gradient": {
"colors": [
"#ffffff00",
"#66b1ffff"
],
"minValue": 0,
"maxValue": 100
},
"legendItems": [],
"metadata": [],
"links": [],
"showTacticRowBackground": false,
"tacticRowBackground": "#dddddd",
"selectTechniquesAcrossTactics": true,
"selectSubtechniquesWithParent": false,
"selectVisibleTechniques": false
}

View File

@@ -1,33 +1,53 @@
{%- set URL_BASE = salt['pillar.get']('global:url_base', '') %}
{
"versions": [
"collection_index_url": "",
"versions": {
"enabled": true,
"entries": [
{
"name": "ATT&CK v14",
"version": "14",
"name": "MITRE ATT&CK",
"version": "16",
"domains": [
{
{
"name": "Enterprise",
"identifier": "enterprise-attack",
"data": ["assets/so/enterprise-attack.json"]
"data": ["assets/mitre/enterprise-attack.json"]
}
]
}
],
]
},
"custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}],
"custom_context_menu_items": [
{"label": "View related Detections","url": " https://{{URL_BASE}}/#/detections?q=*{{ "{{technique_attackID}}" }}*+|+groupby+so_detection.language+|+groupby+so_detection.ruleset+so_detection.isEnabled+|+groupby+%22so_detection.category%22&z=America/New_York&el=500&gl=50&rt=0&rtu=hours"},
{"label": "View related Alerts","url": " https://{{URL_BASE}}/#/alerts?q=*{{ "{{technique_attackID}}" }}*+|+groupby+rule.name+event.module*+event.severity_label+rule.uuid&z=America/New_York&el=500&gl=500&rt=15&rtu=days"}
],
"default_layers": {
"enabled": true,
"urls": ["assets/so/nav_layer_playbook.json"]
"urls": ["assets/so/navigator_layer_all_detections.json","assets/so/navigator_layer_sigma.json","assets/so/navigator_layer_suricata.json","assets/so/navigator_layer_alerts.json"]
},
"comment_color": "yellow",
"link_color": "blue",
"banner": "",
"customize_features": [
{"name": "multiselect", "enabled": true, "description": "Disable to remove the multiselect panel from interface."},
{"name": "export_render", "enabled": true, "description": "Disable to remove the button to render the current layer."},
{"name": "export_excel", "enabled": true, "description": "Disable to remove the button to export the current layer to MS Excel (.xlsx) format."},
{"name": "legend", "enabled": true, "description": "Disable to remove the legend panel from the interface."},
{"name": "background_color", "enabled": true, "description": "Disable to remove the background color effect on manually assigned colors."},
{"name": "non_aggregate_score_color", "enabled": true, "description": "Disable to remove the color effect on non-aggregate scores."},
{"name": "aggregate_score_color", "enabled": true, "description": "Disable to remove the color effect on aggregate scores."},
{"name": "comment_underline", "enabled": true, "description": "Disable to remove the comment underline effect on techniques."},
{"name": "metadata_underline", "enabled": true, "description": "Disable to remove the metadata underline effect on techniques."},
{"name": "link_underline", "enabled": true, "description": "Disable to remove the hyperlink underline effect on techniques."}
],
"features": [
{"name": "leave_site_dialog", "enabled": true, "description": "Disable to remove the dialog prompt when leaving site."},
{"name": "tabs", "enabled": true, "description": "Disable to remove the ability to open new tabs."},
{"name": "leave_site_dialog", "enabled": false, "description": "Disable to remove the dialog prompt when leaving site."},
{"name": "tabs", "disabled": true, "description": "Disable to remove the ability to open new tabs."},
{"name": "selecting_techniques", "enabled": true, "description": "Disable to remove the ability to select techniques."},
{"name": "header", "enabled": true, "description": "Disable to remove the header containing banner."},
{"name": "subtechniques", "enabled": true, "description": "Disable to remove all sub-technique features from the interface."},

File diff suppressed because it is too large Load Diff

View File

@@ -20,9 +20,9 @@
{% if grains.saltversion|string != SALTVERSION|string %}
{% if grains.os_family|lower == 'redhat' %}
{% set UPGRADECOMMAND = 'yum clean all ; /usr/sbin/bootstrap-salt.sh -s 120 -r -F -x python3 stable ' ~ SALTVERSION %}
{% set UPGRADECOMMAND = 'yum clean all ; /usr/sbin/bootstrap-salt.sh -s 120 -r -F stable ' ~ SALTVERSION %}
{% elif grains.os_family|lower == 'debian' %}
{% set UPGRADECOMMAND = '/usr/sbin/bootstrap-salt.sh -s 120 -F -x python3 stable ' ~ SALTVERSION %}
{% set UPGRADECOMMAND = '/usr/sbin/bootstrap-salt.sh -s 120 -F stable ' ~ SALTVERSION %}
{% endif %}
{% else %}
{% set UPGRADECOMMAND = 'echo Already running Salt Minion version ' ~ SALTVERSION %}

View File

@@ -19,6 +19,17 @@ include:
{% if INSTALLEDSALTVERSION|string != SALTVERSION|string %}
{# this is added in 2.4.120 to remove salt repo files pointing to saltproject.io to accomodate the move to broadcom and new bootstrap-salt script #}
{% if salt['pkg.version_cmp'](GLOBALS.so_version, '2.4.120') == -1 %}
{% set saltrepofile = '/etc/yum.repos.d/salt.repo' %}
{% if grains.os_family == 'Debian' %}
{% set saltrepofile = '/etc/apt/sources.list.d/salt.list' %}
{% endif %}
remove_saltproject_io_repo_minion:
file.absent:
- name: {{ saltrepofile }}
{% endif %}
unhold_salt_packages:
pkg.unheld:
- pkgs:

View File

@@ -176,6 +176,15 @@ socusersroles:
- require:
- sls: manager.sync_es_users
socclientsroles:
file.managed:
- name: /opt/so/conf/soc/soc_clients_roles
- user: 939
- group: 939
- mode: 600
- allow_empty: true
- create: true
socuploaddir:
file.directory:
- name: /nsm/soc/uploads

View File

@@ -119,6 +119,13 @@ soc:
- identity_id
- http_request.headers.user-agent
- msg
':hydra:':
- soc_timestamp
- event.dataset
- http_request.headers.x-real-ip
- identity_id
- http_request.headers.user-agent
- msg
'::conn':
- soc_timestamp
- event.dataset
@@ -277,6 +284,27 @@ soc:
- kerberos.service
- kerberos.request_type
- log.id.uid
'::ldap':
- soc_timestamp
- event.dataset
- source.ip
- source.port
- destination.ip
- destination.port
- ldap.result
- ldap.common_name
- ldap.object
- ldap.opcode
'::ldap_search':
- soc_timestamp
- event.dataset
- source.ip
- source.port
- destination.ip
- destination.port
- ldap.result
- ldap.object
- ldap_search.filter
'::modbus':
- soc_timestamp
- event.dataset
@@ -332,6 +360,16 @@ soc:
- file.os
- file.subsystem
- log.id.fuid
'::quic':
- soc_timestamp
- event.dataset
- source.ip
- source.port
- destination.ip
- destination.port
- quic.server_name
- log.id.uid
- network.community_id
'::radius':
- soc_timestamp
- event.dataset
@@ -1311,6 +1349,8 @@ soc:
jobDir: jobs
kratos:
hostUrl:
hydra:
hostUrl:
elastalertengine:
aiRepoUrl: https://github.com/Security-Onion-Solutions/securityonion-resources
aiRepoBranch: generated-summaries-published
@@ -1318,16 +1358,49 @@ soc:
showAiSummaries: true
autoUpdateEnabled: true
autoEnabledSigmaRules:
default:
- core+critical
- securityonion-resources+critical
- securityonion-resources+high
so-eval:
- securityonion-resources+critical
- securityonion-resources+high
so-import:
- securityonion-resources+critical
- securityonion-resources+high
default: []
so-eval: []
so-import: []
enabledSigmaRules:
default: |-
# SOS - resources ruleset
- ruleset: ["securityonion-resources"]
level: ["critical", "high"]
product: ["*"]
category: ["*"]
service: ["*"]
# SigmaHQ - Core ruleset - Logsource: System events supported by Elastic Agent
- ruleset: ["core"]
level: ["critical"]
product: ["*"]
category: ["process_creation", "file_event", "registry_event", "network_connection", "dns_query"]
service: ["*"]
# SigmaHQ - Core ruleset - Logsource: Windows eventlogs
- ruleset: ["core"]
level: ["critical"]
product: ["windows"]
category: ["*"]
service: ["security", "system", "dns-client", "application"]
# SigmaHQ - Core ruleset - Logsource: misc
- ruleset: ["core"]
level: ["critical"]
product: ["*"]
category: ["antivirus"]
service: ["*"]
so-eval: |-
# SOS - resources ruleset
- ruleset: ["securityonion-resources"]
level: ["critical", "high"]
product: ["*"]
category: ["*"]
service: ["*"]
so-import: |-
# SOS - resources ruleset
- ruleset: ["securityonion-resources"]
level: ["critical", "high"]
product: ["*"]
category: ["*"]
service: ["*"]
communityRulesImportFrequencySeconds: 86400
communityRulesImportErrorSeconds: 300
failAfterConsecutiveErrorCount: 10
@@ -1400,6 +1473,7 @@ soc:
- rbac/custom_roles
userFiles:
- rbac/users_roles
- rbac/clients_roles
strelkaengine:
aiRepoUrl: https://github.com/Security-Onion-Solutions/securityonion-resources
aiRepoBranch: generated-summaries-published
@@ -1449,6 +1523,10 @@ soc:
integrityCheckFrequencySeconds: 1200
ignoredSidRanges:
- '1100000-1101000'
navigator:
intervalMinutes: 30
outputPath: /opt/sensoroni/navigator
lookbackDays: 3
client:
enableReverseLookup: false
docsUrl: /docs/
@@ -1639,23 +1717,23 @@ soc:
showSubtitle: true
- name: HTTP
description: HTTP grouped by destination port
query: 'tags:http | groupby destination.port'
query: '(tags:http OR tags:http2) | groupby destination.port'
showSubtitle: true
- name: HTTP
description: HTTP grouped by status code and message
query: 'tags:http | groupby http.status_code http.status_message'
query: '(tags:http OR tags:http2) | groupby http.status_code http.status_message'
showSubtitle: true
- name: HTTP
description: HTTP grouped by method and user agent
query: 'tags:http | groupby http.method http.useragent'
query: '(tags:http OR tags:http2) | groupby http.method http.useragent'
showSubtitle: true
- name: HTTP
description: HTTP grouped by virtual host
query: 'tags:http | groupby http.virtual_host'
query: '(tags:http OR tags:http2) | groupby http.virtual_host'
showSubtitle: true
- name: HTTP
description: HTTP with exe downloads
query: 'tags:http AND file.resp_mime_types:*exec* | groupby http.virtual_host'
query: '(tags:http OR tags:http2) AND file.resp_mime_types:*exec* | groupby http.virtual_host'
showSubtitle: true
- name: Intel
description: Intel framework hits grouped by indicator
@@ -1669,6 +1747,14 @@ soc:
description: KERBEROS grouped by service
query: 'tags:kerberos | groupby kerberos.service'
showSubtitle: true
- name: LDAP
description: LDAP grouped by source ip and result
query: 'tags:ldap | groupby source.ip ldap.result'
showSubtitle: true
- name: LDAP_SEARCH
description: LDAP_SEARCH grouped by source.ip and filter
query: 'tags:ldap_search | groupby source.ip | groupby ldap_search.filter'
showSubtitle: true
- name: MODBUS
description: MODBUS grouped by function
query: 'tags:modbus | groupby modbus.function'
@@ -1689,6 +1775,10 @@ soc:
description: PE files list
query: 'tags:pe | groupby file.machine file.os file.subsystem'
showSubtitle: true
- name: QUIC
description: QUIC connections
query: 'tags:quic | groupby quic.server_name | groupby source.ip quic.server_name destination.ip'
showSubtitle: true
- name: RADIUS
description: RADIUS grouped by username
query: 'tags:radius | groupby user.name'
@@ -1882,25 +1972,40 @@ soc:
query: 'tags:ftp | groupby ftp.command | groupby -sankey ftp.command source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name | groupby ftp.argument | groupby ftp.user'
- name: HTTP
description: HTTP (Hyper Text Transport Protocol) network metadata
query: 'tags:http | groupby http.method | groupby -sankey http.method http.virtual_host | groupby http.virtual_host | groupby http.uri | groupby http.useragent | groupby http.status_code | groupby http.status_message | groupby file.resp_mime_types | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'
query: '(tags:http OR tags:http2) | groupby http.method | groupby -sankey http.method http.virtual_host | groupby http.virtual_host | groupby http.uri | groupby http.useragent | groupby http.status_code | groupby http.status_message | groupby file.resp_mime_types | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'
- name: Intel
description: Zeek Intel framework hits
query: 'tags:intel | groupby intel.indicator | groupby -sankey intel.indicator source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby intel.indicator_type | groupby intel.seen_where'
- name: IPSec
description: IPSec VPN connection metadata
query: 'tags:ipsec | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby destination.geo.country_name | groupby ipsec.version'
- name: IRC
description: IRC (Internet Relay Chat) network metadata
query: 'tags:irc | groupby irc.command.type | groupby -sankey irc.command.type irc.username | groupby irc.username | groupby irc.nickname | groupby irc.command.value | groupby irc.command.info | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'
- name: Kerberos
description: Kerberos network metadata
query: 'tags:kerberos | groupby kerberos.service | groupby -sankey kerberos.service source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby kerberos.client | groupby kerberos.request_type'
- name: LDAP
description: LDAP (Lightweight Directory Access Protocol) network metadata
query: 'tags:ldap | groupby source.ip | groupby destination.ip | groupby destination.port | groupby ldap.user_email | groupby ldap.property | groupby ldap.result | groupby ldap.common_name | groupby ldap.organizational_unit | groupby ldap.domain | groupby ldap.version | groupby ldap.object'
- name: LDAP_SEARCH
description: LDAP_SEARCH (Lightweight Directory Access Protocol) Search network metadata
query: 'tags:ldap_search | groupby source.ip | groupby destination.ip | groupby destination.port | groupby ldap_search.scope | groupby ldap.object | groupby ldap.domain | groupby ldap_search.filter'
- name: MySQL
description: MySQL network metadata
query: 'tags:mysql | groupby mysql.command | groupby -sankey mysql.command source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby mysql.argument | groupby mysql.success | groupby mysql.response | groupby mysql.rows'
- name: NTLM
description: NTLM (New Technology LAN Manager) network metadata
query: 'tags:ntlm | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby ntlm.server.dns.name | groupby ntlm.server.nb.name | groupby ntlm.server.tree.name | groupby ntlm.success | groupby source.ip | groupby destination.ip'
- name: OpenVPN
description: OpenVPN connection metadata
query: 'tags:openvpn | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby destination.geo.country_name'
- name: PE
description: PE (Portable Executable) files transferred via network traffic
query: 'tags:pe | groupby file.machine | groupby -sankey file.machine file.os | groupby file.os | groupby -sankey file.os file.subsystem | groupby file.subsystem | groupby file.section_names | groupby file.is_exe | groupby file.is_64bit'
- name: QUIC
description: QUIC network metadata
query: 'tags:quic | groupby quic.server_name | groupby -sankey quic.server_name source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name | groupby quic.server_scid | groupby quic.version | groupby quic.client_protocol'
- name: RADIUS
description: RADIUS (Remote Authentication Dial-In User Service) network metadata
query: 'tags:radius | groupby user.name | groupby -sankey user.name source.ip | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'

View File

@@ -34,6 +34,7 @@ so-soc:
- /opt/so/log/soc/:/opt/sensoroni/logs/:rw
- /opt/so/conf/soc/soc.json:/opt/sensoroni/sensoroni.json:ro
- /opt/so/conf/soc/ai_summary_repos:/opt/sensoroni/ai_summary_repos:rw
- /opt/so/conf/navigator/layers/:/opt/sensoroni/navigator/:rw
{% if SOCMERGED.telemetryEnabled and not GLOBALS.airgap %}
- /opt/so/conf/soc/analytics.js:/opt/sensoroni/html/js/analytics.js:ro
{% endif %}
@@ -44,6 +45,7 @@ so-soc:
- /opt/so/conf/soc/custom.js:/opt/sensoroni/html/js/custom.js:ro
- /opt/so/conf/soc/custom_roles:/opt/sensoroni/rbac/custom_roles:ro
- /opt/so/conf/soc/soc_users_roles:/opt/sensoroni/rbac/users_roles:rw
- /opt/so/conf/soc/soc_clients_roles:/opt/sensoroni/rbac/clients_roles:rw
- /opt/so/conf/soc/queue:/opt/sensoroni/queue:rw
- /opt/so/saltstack:/opt/so/saltstack:rw
- /opt/so/conf/soc/migrations:/opt/so/conf/soc/migrations:rw
@@ -82,6 +84,7 @@ so-soc:
- file: soccustom
- file: soccustomroles
- file: socusersroles
- file: socclientsroles
delete_so-soc_so-status.disabled:
file.uncomment:

View File

@@ -89,7 +89,7 @@ function manage_user() {
add)
email=$(echo "$request" | jq -r .email)
password=$(echo "$request" | jq -r .password)
role=$(echo "$request" | jq -r .role)
perm=$(echo "$request" | jq -r .role)
firstName=$(echo "$request" | jq -r .firstName)
lastName=$(echo "$request" | jq -r .lastName)
note=$(echo "$request" | jq -r .note)
@@ -97,7 +97,7 @@ function manage_user() {
response=$(echo "$password" | so-user "$op" --email "$email" --firstName "$firstName" --lastName "$lastName" --note "$note" --role "$role" --skip-sync)
exit_code=$?
;;
add|enable|disable|delete)
enable|disable|delete)
email=$(echo "$request" | jq -r .email)
log "Performing user '$op' for user '$email'"
response=$(so-user "$op" --email "$email" --skip-sync)
@@ -155,6 +155,77 @@ function manage_user() {
fi
}
function manage_client() {
id=$1
request=$2
op=$(echo "$request" | jq -r .operation)
webResponse="true"
max_tries=10
tries=0
while [[ $tries -lt $max_tries ]]; do
case "$op" in
add)
name=$(echo "$request" | jq -r .name)
note=$(echo "$request" | jq -r .note)
log "Performing client '$op' for client with name '$name' and note '$note'"
response=$(so-client "$op" --name "$name" --note "$note" --json)
exit_code=$?
webResponse=$response
;;
delete)
client_id=$(echo "$request" | jq -r .id)
log "Performing client '$op' for client '$client_id'"
response=$(so-client "$op" --id "$client_id")
exit_code=$?
;;
addperm|delperm)
client_id=$(echo "$request" | jq -r .id)
perm=$(echo "$request" | jq -r .permission)
log "Performing '$op' for client '$client_id' with permission '$perm'"
response=$(so-client "$op" --id "$client_id" --permission "$perm")
exit_code=$?
;;
generate-secret)
client_id=$(echo "$request" | jq -r .id)
log "Performing '$op' operation for client '$client_id'"
response=$(so-client "$op" --id "$client_id" --json)
exit_code=$?
webResponse=$response
;;
update)
client_id=$(echo "$request" | jq -r .id)
name=$(echo "$request" | jq -r .name)
note=$(echo "$request" | jq -r .note)
searchusername=$(echo "$request" | jq -r .searchusername)
log "Performing '$op' update for client '$client_id' with name '$name', search username '$searchusername', and note '$note'"
response=$(so-client "$op" --id "$client_id" --name "$name" --searchusername "$searchusername" --note "$note")
exit_code=$?
;;
*)
response="Unsupported client operation: $op"
exit_code=1
;;
esac
tries=$((tries+1))
if [[ "$response" == "Another process is using so-user"* ]]; then
log "Retrying after brief delay to let so-user unlock ($tries/$max_tries)"
sleep 5
else
break
fi
done
if [[ exit_code -eq 0 ]]; then
log "Successful command execution"
respond "$id" "$webResponse"
else
log "Unsuccessful command execution: $response ($exit_code)"
respond "$id" "false"
fi
}
function manage_salt() {
id=$1
request=$2
@@ -319,6 +390,9 @@ while true; do
list-minions)
list_minions "$id"
;;
manage-client)
manage_client "$id" "${request}"
;;
manage-minion)
manage_minion "$id" "${request}"
;;

View File

@@ -45,24 +45,25 @@ transformations:
rule_conditions:
- type: logsource
category: antivirus
# Drops the Hashes field which is specific to Sysmon logs
# Ingested sysmon logs will have the Hashes field mapped to ECS specific fields
- id: hashes_drop_sysmon-specific-field
type: drop_detection_item
# Transforms the `Hashes` field to ECS fields
# ECS fields are used by the hash fields emitted by Elastic Defend
# If shipped with Elastic Agent, sysmon logs will also have hashes mapped to ECS fields
- id: hashes_break_out_field
type: hashes_fields
valid_hash_algos: ["MD5", "SHA1", "SHA256", "SHA512", "IMPHASH"]
field_prefix: "file"
drop_algo_prefix: False
field_name_conditions:
- type: include_fields
fields:
- winlog.event_data.Hashes
rule_conditions:
- type: logsource
product: windows
- winlog.event_data.Hashes
- id: hashes_process-creation
type: field_name_mapping
mapping:
winlog.event_data.sha256: process.hash.sha256
winlog.event_data.sha1: process.hash.sha1
winlog.event_data.md5: process.hash.md5
winlog.event_data.Imphash: process.pe.imphash
fileSHA256: process.hash.sha256
fileSHA1: process.hash.sha1
fileMD5: process.hash.md5
fileIMPHASH: process.pe.imphash
rule_conditions:
- type: logsource
product: windows
@@ -70,10 +71,10 @@ transformations:
- id: hashes_image-load
type: field_name_mapping
mapping:
winlog.event_data.sha256: dll.hash.sha256
winlog.event_data.sha1: dll.hash.sha1
winlog.event_data.md5: dll.hash.md5
winlog.event_data.Imphash: dll.pe.imphash
fileSHA256: dll.hash.sha256
fileSHA1: dll.hash.sha1
fileMD5: dll.hash.md5
fileIMPHASH: dll.pe.imphash
rule_conditions:
- type: logsource
product: windows
@@ -81,10 +82,10 @@ transformations:
- id: hashes_driver-load
type: field_name_mapping
mapping:
winlog.event_data.sha256: dll.hash.sha256
winlog.event_data.sha1: dll.hash.sha1
winlog.event_data.md5: dll.hash.md5
winlog.event_data.Imphash: dll.pe.imphash
fileSHA256: dll.hash.sha256
fileSHA1: dll.hash.sha1
fileMD5: dll.hash.md5
fileIMPHASH: dll.pe.imphash
rule_conditions:
- type: logsource
product: windows

View File

@@ -35,13 +35,21 @@
{# since cases is not a valid soc config item and only used for the map files, remove it from being placed in the config #}
{% do SOCMERGED.config.server.modules.pop('cases') %}
{# set Sigma rules based on role if defined and default if not #}
{# set enabled Sigma rules based on role if defined and default if not #}
{# this particular config is deprecated as of 2.4.120 - use enabledSigmaRules instead #}
{% if GLOBALS.role in SOCMERGED.config.server.modules.elastalertengine.autoEnabledSigmaRules %}
{% do SOCMERGED.config.server.modules.elastalertengine.update({'autoEnabledSigmaRules': SOCMERGED.config.server.modules.elastalertengine.autoEnabledSigmaRules[GLOBALS.role]}) %}
{% else %}
{% do SOCMERGED.config.server.modules.elastalertengine.update({'autoEnabledSigmaRules': SOCMERGED.config.server.modules.elastalertengine.autoEnabledSigmaRules.default}) %}
{% endif %}
{# set enabled Sigma rules based on role if defined and default if not #}
{% if GLOBALS.role in SOCMERGED.config.server.modules.elastalertengine.enabledSigmaRules %}
{% do SOCMERGED.config.server.modules.elastalertengine.update({'enabledSigmaRules': SOCMERGED.config.server.modules.elastalertengine.enabledSigmaRules[GLOBALS.role]}) %}
{% else %}
{% do SOCMERGED.config.server.modules.elastalertengine.update({'enabledSigmaRules': SOCMERGED.config.server.modules.elastalertengine.enabledSigmaRules.default}) %}
{% endif %}
{# set elastalertengine.rulesRepos and strelkaengine.rulesRepos based on airgap or not #}
{% if GLOBALS.airgap %}
{% do SOCMERGED.config.server.modules.elastalertengine.update({'rulesRepos': SOCMERGED.config.server.modules.elastalertengine.rulesRepos.airgap}) %}

View File

@@ -215,14 +215,29 @@ soc:
duplicates: True
forcedType: string
jinjaEscaped: True
enabledSigmaRules:
default: &enabledSigmaRules
description: 'Sigma rules to automatically enable on initial import. The format is a YAML list, with the ability to filter for ruleset, level, product, category and service. Refer to the documentation for further details. These will be applied based on role if defined and default if not.'
global: True
helpLink: sigma.html
multiline: True
syntax: yaml
forcedType: string
jinjaEscaped: True
so-eval: *enabledSigmaRules
so-import: *enabledSigmaRules
autoEnabledSigmaRules:
default: &autoEnabledSigmaRules
description: 'Sigma rules to automatically enable on initial import. Format is $Ruleset+$Level - for example, for the core community ruleset and critical level rules: core+critical. These will be applied based on role if defined and default if not.'
description: 'DEPRECATED: Will be removed in a future release - use enabledSigmaRules instead.'
global: True
advanced: True
helpLink: sigma.html
so-eval: *autoEnabledSigmaRules
so-import: *autoEnabledSigmaRules
autoUpdateEnabled:
description: 'Automatically update Sigma rules on a regular basis. This will update the rules based on the configured frequency.'
global: True
advanced: True
communityRulesImportFrequencySeconds:
description: 'How often to check for new Sigma rules (in seconds). This applies to both Community Rule Packages and any configured Git repos.'
global: True
@@ -329,6 +344,10 @@ soc:
showAiSummaries:
description: Show AI summaries for Strelka rules.
global: True
autoUpdateEnabled:
description: 'Automatically update YARA rules on a regular basis. This will update the rules based on the configured frequency.'
global: True
advanced: True
autoEnabledYaraRules:
description: 'YARA rules to automatically enable on initial import. Format is $Ruleset - for example, for the default shipped ruleset: securityonion-yara'
global: True
@@ -367,6 +386,10 @@ soc:
showAiSummaries:
description: Show AI summaries for Suricata rules.
global: True
autoUpdateEnabled:
description: 'Automatically update Suricata rules on a regular basis. This will update the rules based on the configured frequency.'
global: True
advanced: True
communityRulesImportFrequencySeconds:
description: 'How often to check for new Suricata rules (in seconds).'
global: True
@@ -396,6 +419,15 @@ soc:
advanced: True
forcedType: "[]string"
helpLink: detections.html#rule-engine-status
navigator:
intervalMinutes:
description: How often to generate the Navigator Layers. (minutes)
global: True
helpLink: attack-navigator.html
lookbackDays:
description: How far back to search for ATT&CK-tagged alerts. (days)
global: True
helpLink: attack-navigator.html
client:
enableReverseLookup:
description: Set to true to enable reverse DNS lookups for IP addresses in the SOC UI.

View File

@@ -62,6 +62,7 @@ base:
- influxdb
- soc
- kratos
- hydra
- sensoroni
- telegraf
- firewall
@@ -91,6 +92,7 @@ base:
- strelka.manager
- soc
- kratos
- hydra
- firewall
- manager
- sensoroni
@@ -123,6 +125,7 @@ base:
- influxdb
- soc
- kratos
- hydra
- firewall
- sensoroni
- telegraf
@@ -169,6 +172,7 @@ base:
- strelka.manager
- soc
- kratos
- hydra
- firewall
- manager
- sensoroni
@@ -220,6 +224,7 @@ base:
- strelka.manager
- soc
- kratos
- hydra
- sensoroni
- telegraf
- firewall

View File

@@ -53,6 +53,7 @@
{%
do GLOBALS.update({
'application_urls': {
'hydra': 'http://' ~ GLOBALS.manager ~ ':4445/',
'kratos': 'http://' ~ GLOBALS.manager ~ ':4434/',
'elastic': 'https://' ~ GLOBALS.manager ~ ':9200/',
'influxdb': 'https://' ~ GLOBALS.manager ~ ':8086/'

View File

@@ -23,10 +23,10 @@ zeek:
CfgDir: /opt/zeek/etc
CompressLogs: 1
ZeekPort: 27760
FileExtractDir: ""
local:
load:
- misc/loaded-scripts
- tuning/defaults
- misc/capture-loss
- frameworks/software/vulnerable
- frameworks/software/version-changes
@@ -70,6 +70,9 @@ zeek:
- zeek-plugin-profinet
- zeek-spicy-wireguard
- zeek-spicy-stun
- http2
- zeek-spicy-ipsec
- zeek-spicy-openvpn
load-sigs:
- frameworks/signatures/detect-windows-shells
redef:

View File

@@ -1,6 +1,6 @@
## Global ZeekControl configuration file.
{%- set ALLOWEDOPTIONS = ['commtimeout','commandtimeout','compresscmd','compressextension','compresslogs','compresslogsinflight','controltopic','crashexpireinterval','croncmd','debug','env_vars','havenfs','keeplogs','logdir','logexpireinterval','logrotationinterval','mailalarmsinterval','mailalarmsto','mailarchivelogfail','mailconnectionsummary','mailfrom','mailhostupdown','mailreceivingpackets','mailreplyto','mailsubjectprefix','mailto','makearchivename','memlimit','mindiskspace','pfringclusterid','pfringclustertype','pfringfirstappinstance','prefixes','savetraces','sendmail','sitepluginpath','sitepolicypath','sitepolicyscripts','statslogenable','statslogexpireinterval','statuscmdshowall','stoptimeout','stopwait','timefmt','timemachinehost','timemachineport','zeekargs','zeekport','bindir','capstatspath','cfgdir','debuglog','defaultstoredir','helperdir','libdir','libdir64','libdirinternal','localnetscfg','lockfile','logexpireminutes','nodecfg','os','pcapbufsize','pcapsnaplen','plugindir','pluginzeekdir','policydir','policydirsiteinstall','policydirsiteinstallauto','postprocdir','scriptsdir','spooldir','standalone','statefile','staticdir','statsdir','statslog','time','tmpdir','tmpexecdir','tracesummary','version','zeek','zeekbase'] %}
{%- set ALLOWEDOPTIONS = ['commtimeout','commandtimeout','compresscmd','compressextension','compresslogs','compresslogsinflight','controltopic','crashexpireinterval','croncmd','debug','env_vars','fileextractdir','havenfs','keeplogs','logdir','logexpireinterval','logrotationinterval','mailalarmsinterval','mailalarmsto','mailarchivelogfail','mailconnectionsummary','mailfrom','mailhostupdown','mailreceivingpackets','mailreplyto','mailsubjectprefix','mailto','makearchivename','memlimit','mindiskspace','pfringclusterid','pfringclustertype','pfringfirstappinstance','prefixes','savetraces','sendmail','sitepluginpath','sitepolicypath','sitepolicyscripts','statslogenable','statslogexpireinterval','statuscmdshowall','stoptimeout','stopwait','timefmt','timemachinehost','timemachineport','zeekargs','zeekport','bindir','capstatspath','cfgdir','debuglog','defaultstoredir','helperdir','libdir','libdir64','libdirinternal','localnetscfg','lockfile','logexpireminutes','nodecfg','os','pcapbufsize','pcapsnaplen','plugindir','pluginzeekdir','policydir','policydirsiteinstall','policydirsiteinstallauto','postprocdir','scriptsdir','spooldir','standalone','statefile','staticdir','statsdir','statslog','time','tmpdir','tmpexecdir','tracesummary','version','zeek','zeekbase'] %}
{%- for option in ZEEKCTL|sort %}
{%- if option|lower in ALLOWEDOPTIONS %}

View File

@@ -830,6 +830,7 @@ create_manager_pillars() {
redis_pillar
idstools_pillar
kratos_pillar
hydra_pillar
soc_pillar
idh_pillar
influxdb_pillar
@@ -1000,7 +1001,12 @@ docker_seed_update() {
docker_seed_registry() {
local VERSION="$SOVERSION"
if ! [ -f /nsm/docker-registry/docker/registry.tar ]; then
if [ -f /nsm/docker-registry/docker/registry.tar ]; then
logCmd "tar xvf /nsm/docker-registry/docker/registry.tar -C /nsm/docker-registry/docker"
logCmd "rm /nsm/docker-registry/docker/registry.tar"
elif [ -d /nsm/docker-registry/docker/registry ] && [ -f /etc/SOCLOUD ]; then
echo "Using existing docker registry content for cloud install"
else
if [ "$install_type" == 'IMPORT' ]; then
container_list 'so-import'
else
@@ -1010,9 +1016,6 @@ docker_seed_registry() {
docker_seed_update_percent=25
update_docker_containers 'netinstall' '' 'docker_seed_update' '/dev/stdout' 2>&1 | tee -a "$setup_log"
else
logCmd "tar xvf /nsm/docker-registry/docker/registry.tar -C /nsm/docker-registry/docker"
logCmd "rm /nsm/docker-registry/docker/registry.tar"
fi
}
@@ -1147,6 +1150,8 @@ generate_passwords(){
INFLUXTOKEN=$(head -c 64 /dev/urandom | base64 --wrap=0)
SENSORONIKEY=$(get_random_value)
KRATOSKEY=$(get_random_value)
HYDRAKEY=$(get_random_value)
HYDRASALT=$(get_random_value)
REDISPASS=$(get_random_value)
SOCSRVKEY=$(get_random_value 64)
IMPORTPASS=$(get_random_value)
@@ -1338,6 +1343,24 @@ kratos_pillar() {
"" > "$kratos_pillar_file"
}
hydra_pillar() {
title "Create the Hydra pillar file"
touch $adv_hydra_pillar_file
touch $hydra_pillar_file
chmod 660 $hydra_pillar_file
printf '%s\n'\
"hydra:"\
" config:"\
" secrets:"\
" system:"\
" - '$HYDRAKEY'"\
" oidc:"\
" subject_identifiers:"\
" pairwise:"\
" salt: '$HYDRASALT'"\
"" > "$hydra_pillar_file"
}
create_global() {
title "Creating the global.sls"
touch $adv_global_pillar_file
@@ -1439,7 +1462,7 @@ make_some_dirs() {
mkdir -p $local_salt_dir/salt/firewall/portgroups
mkdir -p $local_salt_dir/salt/firewall/ports
for THEDIR in bpf pcap elasticsearch ntp firewall redis backup influxdb strelka sensoroni soc docker zeek suricata nginx telegraf logstash soc manager kratos idstools idh elastalert stig global kafka versionlock hypervisor; do
for THEDIR in bpf pcap elasticsearch ntp firewall redis backup influxdb strelka sensoroni soc docker zeek suricata nginx telegraf logstash soc manager kratos hydra idstools idh elastalert stig global kafka versionlock hypervisor; do
mkdir -p $local_salt_dir/pillar/$THEDIR
touch $local_salt_dir/pillar/$THEDIR/adv_$THEDIR.sls
touch $local_salt_dir/pillar/$THEDIR/soc_$THEDIR.sls
@@ -1674,6 +1697,7 @@ reinstall_init() {
# Backup (and erase) directories in /nsm to prevent app errors
backup_dir /nsm/mysql "$date_string"
backup_dir /nsm/kratos "$date_string"
backup_dir /nsm/hydra "$date_string"
backup_dir /nsm/influxdb "$date_string"
# Uninstall local Elastic Agent, if installed

View File

@@ -160,6 +160,12 @@ export kratos_pillar_file
adv_kratos_pillar_file="$local_salt_dir/pillar/kratos/adv_kratos.sls"
export adv_kratos_pillar_file
hydra_pillar_file="$local_salt_dir/pillar/hydra/soc_hydra.sls"
export hydra_pillar_file
adv_hydra_pillar_file="$local_salt_dir/pillar/hydra/adv_hydra.sls"
export adv_hydra_pillar_file
idstools_pillar_file="$local_salt_dir/pillar/idstools/soc_idstools.sls"
export idstools_pillar_file

Binary file not shown.