mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
3
.github/.gitleaks.toml
vendored
3
.github/.gitleaks.toml
vendored
@@ -536,11 +536,10 @@ secretGroup = 4
|
||||
|
||||
[allowlist]
|
||||
description = "global allow lists"
|
||||
regexes = ['''219-09-9999''', '''078-05-1120''', '''(9[0-9]{2}|666)-\d{2}-\d{4}''', '''RPM-GPG-KEY.*''']
|
||||
regexes = ['''219-09-9999''', '''078-05-1120''', '''(9[0-9]{2}|666)-\d{2}-\d{4}''', '''RPM-GPG-KEY.*''', '''.*:.*StrelkaHexDump.*''', '''.*:.*PLACEHOLDER.*''']
|
||||
paths = [
|
||||
'''gitleaks.toml''',
|
||||
'''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
|
||||
'''(go.mod|go.sum)$''',
|
||||
|
||||
'''salt/nginx/files/enterprise-attack.json'''
|
||||
]
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
### 2.4.40-20240116 ISO image released on 2024/01/17
|
||||
### 2.4.50-20240220 ISO image released on 2024/02/20
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.4.40-20240116 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.40-20240116.iso
|
||||
2.4.50-20240220 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.50-20240220.iso
|
||||
|
||||
MD5: AC55D027B663F3CE0878FEBDAD9DD78B
|
||||
SHA1: C2B51723B17F3DC843CC493EB80E93B123E3A3E1
|
||||
SHA256: C5F135FCF45A836BBFF58C231F95E1EA0CD894898322187AD5FBFCD24BC2F123
|
||||
MD5: BCA6476EF1BF79773D8EFB11700FDE8E
|
||||
SHA1: 9FF0A304AA368BCD2EF2BE89AD47E65650241927
|
||||
SHA256: 49D7695EFFF6F3C4840079BF564F3191B585639816ADE98672A38017F25E9570
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.40-20240116.iso.sig
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.50-20240220.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
||||
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.40-20240116.iso.sig
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.50-20240220.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.40-20240116.iso
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.50-20240220.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.4.40-20240116.iso.sig securityonion-2.4.40-20240116.iso
|
||||
gpg --verify securityonion-2.4.50-20240220.iso.sig securityonion-2.4.50-20240220.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Tue 16 Jan 2024 07:34:40 PM EST using RSA key ID FE507013
|
||||
gpg: Signature made Fri 16 Feb 2024 11:36:25 AM EST using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
|
||||
@@ -41,7 +41,8 @@ file_roots:
|
||||
base:
|
||||
- /opt/so/saltstack/local/salt
|
||||
- /opt/so/saltstack/default/salt
|
||||
|
||||
- /nsm/elastic-fleet/artifacts
|
||||
- /opt/so/rules/nids
|
||||
|
||||
# The master_roots setting configures a master-only copy of the file_roots dictionary,
|
||||
# used by the state compiler.
|
||||
|
||||
@@ -65,6 +65,7 @@ base:
|
||||
- soctopus.adv_soctopus
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
- stig.soc_stig
|
||||
|
||||
'*_sensor':
|
||||
- healthcheck.sensor
|
||||
@@ -80,6 +81,8 @@ base:
|
||||
- suricata.adv_suricata
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
- stig.soc_stig
|
||||
- soc.license
|
||||
|
||||
'*_eval':
|
||||
- secrets
|
||||
@@ -180,6 +183,7 @@ base:
|
||||
- suricata.adv_suricata
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
- stig.soc_stig
|
||||
|
||||
'*_heavynode':
|
||||
- elasticsearch.auth
|
||||
@@ -222,6 +226,8 @@ base:
|
||||
- redis.adv_redis
|
||||
- minions.{{ grains.id }}
|
||||
- minions.adv_{{ grains.id }}
|
||||
- stig.soc_stig
|
||||
- soc.license
|
||||
|
||||
'*_receiver':
|
||||
- logstash.nodes
|
||||
|
||||
@@ -102,7 +102,8 @@
|
||||
'utility',
|
||||
'schedule',
|
||||
'soctopus',
|
||||
'docker_clean'
|
||||
'docker_clean',
|
||||
'stig'
|
||||
],
|
||||
'so-managersearch': [
|
||||
'salt.master',
|
||||
@@ -123,7 +124,8 @@
|
||||
'utility',
|
||||
'schedule',
|
||||
'soctopus',
|
||||
'docker_clean'
|
||||
'docker_clean',
|
||||
'stig'
|
||||
],
|
||||
'so-searchnode': [
|
||||
'ssl',
|
||||
@@ -131,7 +133,8 @@
|
||||
'telegraf',
|
||||
'firewall',
|
||||
'schedule',
|
||||
'docker_clean'
|
||||
'docker_clean',
|
||||
'stig'
|
||||
],
|
||||
'so-standalone': [
|
||||
'salt.master',
|
||||
@@ -156,7 +159,8 @@
|
||||
'schedule',
|
||||
'soctopus',
|
||||
'tcpreplay',
|
||||
'docker_clean'
|
||||
'docker_clean',
|
||||
'stig'
|
||||
],
|
||||
'so-sensor': [
|
||||
'ssl',
|
||||
@@ -168,13 +172,15 @@
|
||||
'healthcheck',
|
||||
'schedule',
|
||||
'tcpreplay',
|
||||
'docker_clean'
|
||||
'docker_clean',
|
||||
'stig'
|
||||
],
|
||||
'so-fleet': [
|
||||
'ssl',
|
||||
'telegraf',
|
||||
'firewall',
|
||||
'logstash',
|
||||
'nginx',
|
||||
'healthcheck',
|
||||
'schedule',
|
||||
'elasticfleet',
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
|
||||
include:
|
||||
- common.soup_scripts
|
||||
- common.packages
|
||||
{% if GLOBALS.role in GLOBALS.manager_roles %}
|
||||
- manager.elasticsearch # needed for elastic_curl_config state
|
||||
@@ -134,6 +133,18 @@ common_sbin_jinja:
|
||||
- file_mode: 755
|
||||
- template: jinja
|
||||
|
||||
{% if not GLOBALS.is_manager%}
|
||||
# prior to 2.4.50 these scripts were in common/tools/sbin on the manager because of soup and distributed to non managers
|
||||
# these two states remove the scripts from non manager nodes
|
||||
remove_soup:
|
||||
file.absent:
|
||||
- name: /usr/sbin/soup
|
||||
|
||||
remove_so-firewall:
|
||||
file.absent:
|
||||
- name: /usr/sbin/so-firewall
|
||||
{% endif %}
|
||||
|
||||
so-status_script:
|
||||
file.managed:
|
||||
- name: /usr/sbin/so-status
|
||||
|
||||
@@ -1,23 +1,70 @@
|
||||
# Sync some Utilities
|
||||
soup_scripts:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- user: root
|
||||
- group: root
|
||||
- file_mode: 755
|
||||
- source: salt://common/tools/sbin
|
||||
- include_pat:
|
||||
- so-common
|
||||
- so-image-common
|
||||
{% import_yaml '/opt/so/saltstack/local/pillar/global/soc_global.sls' as SOC_GLOBAL %}
|
||||
{% if SOC_GLOBAL.global.airgap %}
|
||||
{% set UPDATE_DIR='/tmp/soagupdate/SecurityOnion' %}
|
||||
{% else %}
|
||||
{% set UPDATE_DIR='/tmp/sogh/securityonion' %}
|
||||
{% endif %}
|
||||
|
||||
soup_manager_scripts:
|
||||
file.recurse:
|
||||
- name: /usr/sbin
|
||||
- user: root
|
||||
- group: root
|
||||
- file_mode: 755
|
||||
- source: salt://manager/tools/sbin
|
||||
- include_pat:
|
||||
- so-firewall
|
||||
- so-repo-sync
|
||||
- soup
|
||||
remove_common_soup:
|
||||
file.absent:
|
||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/soup
|
||||
|
||||
remove_common_so-firewall:
|
||||
file.absent:
|
||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-firewall
|
||||
|
||||
copy_so-common_common_tools_sbin:
|
||||
file.copy:
|
||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-common
|
||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-common
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_so-image-common_common_tools_sbin:
|
||||
file.copy:
|
||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-image-common
|
||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-image-common
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_soup_manager_tools_sbin:
|
||||
file.copy:
|
||||
- name: /opt/so/saltstack/default/salt/manager/tools/sbin/soup
|
||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/soup
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_so-firewall_manager_tools_sbin:
|
||||
file.copy:
|
||||
- name: /opt/so/saltstack/default/salt/manager/tools/sbin/so-firewall
|
||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-firewall
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_so-common_sbin:
|
||||
file.copy:
|
||||
- name: /usr/sbin/so-common
|
||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-common
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_so-image-common_sbin:
|
||||
file.copy:
|
||||
- name: /usr/sbin/so-image-common
|
||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-image-common
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_soup_sbin:
|
||||
file.copy:
|
||||
- name: /usr/sbin/soup
|
||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/soup
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
copy_so-firewall_sbin:
|
||||
file.copy:
|
||||
- name: /usr/sbin/so-firewall
|
||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-firewall
|
||||
- force: True
|
||||
- preserve: True
|
||||
|
||||
@@ -366,6 +366,13 @@ is_feature_enabled() {
|
||||
return 1
|
||||
}
|
||||
|
||||
read_feat() {
|
||||
if [ -f /opt/so/log/sostatus/lks_enabled ]; then
|
||||
lic_id=$(cat /opt/so/saltstack/local/pillar/soc/license.sls | grep license_id: | awk '{print $2}')
|
||||
echo "$lic_id/$(cat /opt/so/log/sostatus/lks_enabled)/$(cat /opt/so/log/sostatus/fps_enabled)"
|
||||
fi
|
||||
}
|
||||
|
||||
require_manager() {
|
||||
if is_manager_node; then
|
||||
echo "This is a manager, so we can proceed."
|
||||
@@ -559,6 +566,14 @@ status () {
|
||||
printf "\n=========================================================================\n$(date) | $1\n=========================================================================\n"
|
||||
}
|
||||
|
||||
sync_options() {
|
||||
set_version
|
||||
set_os
|
||||
salt_minion_count
|
||||
|
||||
echo "$VERSION/$OS/$(uname -r)/$MINIONCOUNT/$(read_feat)"
|
||||
}
|
||||
|
||||
systemctl_func() {
|
||||
local action=$1
|
||||
local echo_action=$1
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import sys
|
||||
import subprocess
|
||||
import os
|
||||
import json
|
||||
|
||||
sys.path.append('/opt/saltstack/salt/lib/python3.10/site-packages/')
|
||||
import salt.config
|
||||
@@ -36,17 +37,63 @@ def check_needs_restarted():
|
||||
with open(outfile, 'w') as f:
|
||||
f.write(val)
|
||||
|
||||
def check_for_fps():
|
||||
feat = 'fps'
|
||||
feat_full = feat.replace('ps', 'ips')
|
||||
fps = 0
|
||||
try:
|
||||
result = subprocess.run([feat_full + '-mode-setup', '--is-enabled'], stdout=subprocess.PIPE)
|
||||
if result.returncode == 0:
|
||||
fps = 1
|
||||
except FileNotFoundError:
|
||||
fn = '/proc/sys/crypto/' + feat_full + '_enabled'
|
||||
with open(fn, 'r') as f:
|
||||
contents = f.read()
|
||||
if '1' in contents:
|
||||
fps = 1
|
||||
|
||||
with open('/opt/so/log/sostatus/lks_enabled', 'w') as f:
|
||||
f.write(str(fps))
|
||||
|
||||
def check_for_lks():
|
||||
feat = 'Lks'
|
||||
feat_full = feat.replace('ks', 'uks')
|
||||
lks = 0
|
||||
result = subprocess.run(['lsblk', '-p', '-J'], check=True, stdout=subprocess.PIPE)
|
||||
data = json.loads(result.stdout)
|
||||
for device in data['blockdevices']:
|
||||
if 'children' in device:
|
||||
for gc in device['children']:
|
||||
if 'children' in gc:
|
||||
try:
|
||||
arg = 'is' + feat_full
|
||||
result = subprocess.run(['cryptsetup', arg, gc['name']], stdout=subprocess.PIPE)
|
||||
if result.returncode == 0:
|
||||
lks = 1
|
||||
except FileNotFoundError:
|
||||
for ggc in gc['children']:
|
||||
if 'crypt' in ggc['type']:
|
||||
lks = 1
|
||||
if lks:
|
||||
break
|
||||
with open('/opt/so/log/sostatus/fps_enabled', 'w') as f:
|
||||
f.write(str(lks))
|
||||
|
||||
def fail(msg):
|
||||
print(msg, file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def main():
|
||||
proc = subprocess.run(['id', '-u'], stdout=subprocess.PIPE, encoding="utf-8")
|
||||
if proc.stdout.strip() != "0":
|
||||
fail("This program must be run as root")
|
||||
|
||||
# Ensure that umask is 0022 so that files created by this script have rw-r-r permissions
|
||||
org_umask = os.umask(0o022)
|
||||
check_needs_restarted()
|
||||
check_for_fps()
|
||||
check_for_lks()
|
||||
# Restore umask to whatever value was set before this script was run. SXIG sets to 0077 rw---
|
||||
os.umask(org_umask)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -334,6 +334,7 @@ desktop_packages:
|
||||
- pulseaudio-libs
|
||||
- pulseaudio-libs-glib2
|
||||
- pulseaudio-utils
|
||||
- putty
|
||||
- sane-airscan
|
||||
- sane-backends
|
||||
- sane-backends-drivers-cameras
|
||||
|
||||
@@ -84,6 +84,13 @@ docker:
|
||||
custom_bind_mounts: []
|
||||
extra_hosts: []
|
||||
extra_env: []
|
||||
'so-nginx-fleet-node':
|
||||
final_octet: 31
|
||||
port_bindings:
|
||||
- 8443:8443
|
||||
custom_bind_mounts: []
|
||||
extra_hosts: []
|
||||
extra_env: []
|
||||
'so-playbook':
|
||||
final_octet: 32
|
||||
port_bindings:
|
||||
|
||||
@@ -48,6 +48,7 @@ docker:
|
||||
so-logstash: *dockerOptions
|
||||
so-mysql: *dockerOptions
|
||||
so-nginx: *dockerOptions
|
||||
so-nginx-fleet-node: *dockerOptions
|
||||
so-playbook: *dockerOptions
|
||||
so-redis: *dockerOptions
|
||||
so-sensoroni: *dockerOptions
|
||||
|
||||
@@ -45,6 +45,8 @@ elasticfleet:
|
||||
- cisco_ise
|
||||
- cisco_meraki
|
||||
- cisco_umbrella
|
||||
- citrix_adc
|
||||
- citrix_waf
|
||||
- cloudflare
|
||||
- crowdstrike
|
||||
- darktrace
|
||||
@@ -75,6 +77,7 @@ elasticfleet:
|
||||
- mimecast
|
||||
- mysql
|
||||
- netflow
|
||||
- nginx
|
||||
- o365
|
||||
- okta
|
||||
- osquery_manager
|
||||
@@ -103,6 +106,7 @@ elasticfleet:
|
||||
- udp
|
||||
- vsphere
|
||||
- windows
|
||||
- winlog
|
||||
- zscaler_zia
|
||||
- zscaler_zpa
|
||||
- 1password
|
||||
|
||||
@@ -17,6 +17,11 @@ include:
|
||||
- elasticfleet.sostatus
|
||||
- ssl
|
||||
|
||||
# Wait for Elasticsearch to be ready - no reason to try running Elastic Fleet server if ES is not ready
|
||||
wait_for_elasticsearch_elasticfleet:
|
||||
cmd.run:
|
||||
- name: so-elasticsearch-wait
|
||||
|
||||
# If enabled, automatically update Fleet Logstash Outputs
|
||||
{% if ELASTICFLEETMERGED.config.server.enable_auto_configuration and grains.role not in ['so-import', 'so-eval', 'so-fleet'] %}
|
||||
so-elastic-fleet-auto-configure-logstash-outputs:
|
||||
@@ -33,12 +38,26 @@ so-elastic-fleet-auto-configure-server-urls:
|
||||
- retry: True
|
||||
{% endif %}
|
||||
|
||||
# Automatically update Fleet Server Elasticsearch URLs
|
||||
# Automatically update Fleet Server Elasticsearch URLs & Agent Artifact URLs
|
||||
{% if grains.role not in ['so-fleet'] %}
|
||||
so-elastic-fleet-auto-configure-elasticsearch-urls:
|
||||
cmd.run:
|
||||
- name: /usr/sbin/so-elastic-fleet-es-url-update
|
||||
- retry: True
|
||||
|
||||
so-elastic-fleet-auto-configure-artifact-urls:
|
||||
cmd.run:
|
||||
- name: /usr/sbin/so-elastic-fleet-artifacts-url-update
|
||||
- retry: True
|
||||
|
||||
{% endif %}
|
||||
|
||||
# Sync Elastic Agent artifacts to Fleet Node
|
||||
{% if grains.role in ['so-fleet'] %}
|
||||
elasticagent_syncartifacts:
|
||||
file.recurse:
|
||||
- name: /nsm/elastic-fleet/artifacts/beats
|
||||
- source: salt://beats
|
||||
{% endif %}
|
||||
|
||||
{% if SERVICETOKEN != '' %}
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"package": {
|
||||
"name": "log",
|
||||
"version": ""
|
||||
},
|
||||
"name": "rita-logs",
|
||||
"namespace": "so",
|
||||
"description": "RITA Logs",
|
||||
"policy_id": "so-grid-nodes_general",
|
||||
"vars": {},
|
||||
"inputs": {
|
||||
"logs-logfile": {
|
||||
"enabled": true,
|
||||
"streams": {
|
||||
"log.logs": {
|
||||
"enabled": true,
|
||||
"vars": {
|
||||
"paths": [
|
||||
"/nsm/rita/beacons.csv",
|
||||
"/nsm/rita/exploded-dns.csv",
|
||||
"/nsm/rita/long-connections.csv"
|
||||
],
|
||||
"exclude_files": [],
|
||||
"ignore_older": "72h",
|
||||
"data_stream.dataset": "rita",
|
||||
"tags": [],
|
||||
"processors": "- dissect:\n tokenizer: \"/nsm/rita/%{pipeline}.csv\"\n field: \"log.file.path\"\n trim_chars: \".csv\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"pipeline\").split(\"-\");\n if (pl.length > 1) {\n pl = pl[1];\n }\n else {\n pl = pl[0];\n }\n event.Put(\"@metadata.pipeline\", \"rita.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: rita",
|
||||
"custom": "exclude_lines: ['^Score', '^Source', '^Domain', '^No results']"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
# Only run on Managers
|
||||
if ! is_manager_node; then
|
||||
printf "Not a Manager Node... Exiting"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Function to check if an array contains a value
|
||||
array_contains () {
|
||||
local array="$1[@]"
|
||||
local seeking=$2
|
||||
local in=1
|
||||
for element in "${!array}"; do
|
||||
if [[ $element == "$seeking" ]]; then
|
||||
in=0
|
||||
break
|
||||
fi
|
||||
done
|
||||
return $in
|
||||
}
|
||||
|
||||
# Query for the current Grid Nodes that are running Logstash (which includes Fleet Nodes)
|
||||
LOGSTASHNODES='{{ salt['pillar.get']('logstash:nodes', {}) | tojson }}'
|
||||
|
||||
# Initialize an array for new hosts from Fleet Nodes
|
||||
declare -a NEW_LIST=()
|
||||
|
||||
# Query for Fleet Nodes & add them to the list (Hostname)
|
||||
if grep -q "fleet" <<< "$LOGSTASHNODES"; then
|
||||
readarray -t FLEETNODES < <(jq -r '.fleet | keys_unsorted[]' <<< "$LOGSTASHNODES")
|
||||
for NODE in "${FLEETNODES[@]}"; do
|
||||
URL="http://$NODE:8443/artifacts/"
|
||||
NAME="FleetServer_$NODE"
|
||||
NEW_LIST+=("$URL=$NAME")
|
||||
done
|
||||
fi
|
||||
|
||||
# Create an array for expected hosts and their names
|
||||
declare -A expected_urls=(
|
||||
["http://{{ GLOBALS.url_base }}:8443/artifacts/"]="FleetServer_{{ GLOBALS.hostname }}"
|
||||
["https://artifacts.elastic.co/downloads/"]="Elastic Artifacts"
|
||||
)
|
||||
|
||||
# Merge NEW_LIST into expected_urls
|
||||
for entry in "${NEW_LIST[@]}"; do
|
||||
# Extract URL and Name from each entry
|
||||
IFS='=' read -r URL NAME <<< "$entry"
|
||||
# Add to expected_urls, automatically handling URL as key and NAME as value
|
||||
expected_urls["$URL"]="$NAME"
|
||||
done
|
||||
|
||||
# Fetch the current hosts from the API
|
||||
current_urls=$(curl -K /opt/so/conf/elasticsearch/curl.config 'http://localhost:5601/api/fleet/agent_download_sources' | jq -r .items[].host)
|
||||
|
||||
# Convert current hosts to an array
|
||||
IFS=$'\n' read -rd '' -a current_urls_array <<<"$current_urls"
|
||||
|
||||
# Flag to track if any host was added
|
||||
any_url_added=0
|
||||
|
||||
# Check each expected host
|
||||
for host in "${!expected_urls[@]}"; do
|
||||
array_contains current_urls_array "$host" || {
|
||||
echo "$host (${expected_urls[$host]}) is missing. Adding it..."
|
||||
|
||||
# Prepare the JSON payload
|
||||
JSON_STRING=$( jq -n \
|
||||
--arg NAME "${expected_urls[$host]}" \
|
||||
--arg URL "$host" \
|
||||
'{"name":$NAME,"host":$URL}' )
|
||||
|
||||
# Create the missing host
|
||||
curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/agent_download_sources" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
|
||||
|
||||
# Flag that an artifact URL was added
|
||||
any_url_added=1
|
||||
}
|
||||
|
||||
done
|
||||
|
||||
|
||||
if [[ $any_url_added -eq 0 ]]; then
|
||||
echo "All expected artifact URLs are present. No updates needed."
|
||||
fi
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0; you may not use
|
||||
# this file except in compliance with the Elastic License 2.0.
|
||||
|
||||
@@ -118,6 +118,19 @@ esingestconf:
|
||||
- user: 930
|
||||
- group: 939
|
||||
|
||||
# Auto-generate Elasticsearch ingest node pipelines from pillar
|
||||
{% for pipeline, config in ELASTICSEARCHMERGED.pipelines.items() %}
|
||||
es_ingest_conf_{{pipeline}}:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/elasticsearch/ingest/{{ pipeline }}
|
||||
- source: salt://elasticsearch/base-template.json.jinja
|
||||
- defaults:
|
||||
TEMPLATE_CONFIG: {{ config }}
|
||||
- template: jinja
|
||||
- onchanges_in:
|
||||
- file: so-pipelines-reload
|
||||
{% endfor %}
|
||||
|
||||
eslog4jfile:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/elasticsearch/log4j2.properties
|
||||
|
||||
@@ -55,6 +55,87 @@ elasticsearch:
|
||||
key: /usr/share/elasticsearch/config/elasticsearch.key
|
||||
verification_mode: none
|
||||
enabled: false
|
||||
pipelines:
|
||||
custom001:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom001
|
||||
- pipeline:
|
||||
name: common
|
||||
custom002:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom002
|
||||
- pipeline:
|
||||
name: common
|
||||
custom003:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom003
|
||||
- pipeline:
|
||||
name: common
|
||||
custom004:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom004
|
||||
- pipeline:
|
||||
name: common
|
||||
custom005:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom005
|
||||
- pipeline:
|
||||
name: common
|
||||
custom006:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom006
|
||||
- pipeline:
|
||||
name: common
|
||||
custom007:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom007
|
||||
- pipeline:
|
||||
name: common
|
||||
custom008:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom008
|
||||
- pipeline:
|
||||
name: common
|
||||
custom009:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom009
|
||||
- pipeline:
|
||||
name: common
|
||||
custom010:
|
||||
description: Custom Pipeline
|
||||
processors:
|
||||
- set:
|
||||
field: tags
|
||||
value: custom010
|
||||
- pipeline:
|
||||
name: common
|
||||
index_settings:
|
||||
global_overrides:
|
||||
index_template:
|
||||
@@ -2537,6 +2618,270 @@ elasticsearch:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_adc_x_interface:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_adc.interface-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_adc.interface-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_adc.interface@package"
|
||||
- "logs-citrix_adc.interface@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_adc_x_lbvserver:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_adc.lbvserver-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_adc.lbvserver-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_adc.lbvserver@package"
|
||||
- "logs-citrix_adc.lbvserver@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_adc_x_service:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_adc.service-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_adc.service-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_adc.service@package"
|
||||
- "logs-citrix_adc.service@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_adc_x_system:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_adc.system-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_adc.system-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_adc.system@package"
|
||||
- "logs-citrix_adc.system@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_adc_x_vpn:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_adc.vpn-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_adc.vpn-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_adc.vpn@package"
|
||||
- "logs-citrix_adc.vpn@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-citrix_waf_x_log:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-citrix_waf.log-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-citrix_waf.log-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-citrix_waf.log@package"
|
||||
- "logs-citrix_waf.log@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-cloudflare_x_audit:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
@@ -3539,6 +3884,62 @@ elasticsearch:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-endpoint_x_diagnostic_x_collection:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
composed_of:
|
||||
- event-mappings
|
||||
- logs-endpoint.diagnostic.collection@custom
|
||||
- logs-endpoint.diagnostic.collection@package
|
||||
- so-fleet_globals-1
|
||||
- so-fleet_agent_id_verification-1
|
||||
data_stream:
|
||||
allow_custom_routing: false
|
||||
hidden: false
|
||||
index_patterns:
|
||||
- logs-endpoint.diagnostic.collection-*
|
||||
priority: 501
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-endpoint.diagnostic.collection-logs
|
||||
mapping:
|
||||
total_fields:
|
||||
limit: 5000
|
||||
number_of_replicas: 0
|
||||
sort:
|
||||
field: '@timestamp'
|
||||
order: desc
|
||||
policy:
|
||||
_meta:
|
||||
managed: true
|
||||
managed_by: security_onion
|
||||
package:
|
||||
name: elastic_agent
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-endpoint_x_events_x_api:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
@@ -6659,6 +7060,138 @@ elasticsearch:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-nginx_x_access:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-nginx.access-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-nginx.access-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-nginx.access@package"
|
||||
- "logs-nginx.access@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-nginx_x_error:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-nginx.error-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-nginx.error-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-nginx.error@package"
|
||||
- "logs-nginx.error@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-metrics-nginx_x_stubstatus:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "metrics-nginx.stubstatus-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-metrics-nginx.stubstatus-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "metrics-nginx.stubstatus@package"
|
||||
- "metrics-nginx.stubstatus@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-o365_x_audit:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
@@ -8854,6 +9387,50 @@ elasticsearch:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-winlog_x_winlog:
|
||||
index_sorting: False
|
||||
index_template:
|
||||
index_patterns:
|
||||
- "logs-winlog.winlog-*"
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
lifecycle:
|
||||
name: so-logs-winlog.winlog-logs
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- "logs-winlog.winlog@package"
|
||||
- "logs-winlog.winlog@custom"
|
||||
- "so-fleet_globals-1"
|
||||
- "so-fleet_agent_id_verification-1"
|
||||
priority: 501
|
||||
data_stream:
|
||||
hidden: false
|
||||
allow_custom_routing: false
|
||||
policy:
|
||||
phases:
|
||||
cold:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 0
|
||||
min_age: 30d
|
||||
delete:
|
||||
actions:
|
||||
delete: {}
|
||||
min_age: 365d
|
||||
hot:
|
||||
actions:
|
||||
rollover:
|
||||
max_age: 30d
|
||||
max_primary_shard_size: 50gb
|
||||
set_priority:
|
||||
priority: 100
|
||||
min_age: 0ms
|
||||
warm:
|
||||
actions:
|
||||
set_priority:
|
||||
priority: 50
|
||||
min_age: 30d
|
||||
so-logs-zscaler_zia_x_alerts:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
|
||||
@@ -67,6 +67,7 @@
|
||||
{ "set": { "if": "ctx.scan?.pe?.image_version == '0'", "field": "scan.pe.image_version", "value": "0.0", "override": true } },
|
||||
{ "set": { "field": "observer.name", "value": "{{agent.name}}" }},
|
||||
{ "convert" : { "field" : "scan.exiftool","type": "string", "ignore_missing":true }},
|
||||
{ "convert" : { "field" : "scan.pe.flags","type": "string", "ignore_missing":true }},
|
||||
{ "remove": { "field": ["host", "path", "message", "exiftool", "scan.yara.meta"], "ignore_missing": true } },
|
||||
{ "pipeline": { "name": "common" } }
|
||||
]
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.pkt_src", "target_field": "network.packet_source","ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.proto", "target_field": "network.transport", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.in_iface", "target_field": "observer.ingress.interface.name", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.flow_id", "target_field": "log.id.uid", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.src_ip", "target_field": "source.ip", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.src_port", "target_field": "source.port", "ignore_failure": true } },
|
||||
@@ -12,6 +13,7 @@
|
||||
{ "rename": { "field": "message2.vlan", "target_field": "network.vlan.id", "ignore_failure": true } },
|
||||
{ "rename": { "field": "message2.community_id", "target_field": "network.community_id", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.xff", "target_field": "xff.ip", "ignore_missing": true } },
|
||||
{ "lowercase": { "field": "network.transport", "ignore_failure": true } },
|
||||
{ "set": { "field": "event.dataset", "value": "{{ message2.event_type }}" } },
|
||||
{ "set": { "field": "observer.name", "value": "{{agent.name}}" } },
|
||||
{ "set": { "field": "event.ingested", "value": "{{@timestamp}}" } },
|
||||
|
||||
21
salt/elasticsearch/files/ingest/suricata.ike
Normal file
21
salt/elasticsearch/files/ingest/suricata.ike
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"description" : "suricata.ike",
|
||||
"processors" : [
|
||||
{ "rename": { "field": "message2.proto", "target_field": "network.transport", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.app_proto", "target_field": "network.protocol", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.alg_auth", "target_field": "ike.algorithm.authentication", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.alg_enc", "target_field": "ike.algorithm.encryption", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.alg_esn", "target_field": "ike.algorithm.esn", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.alg_dh", "target_field": "ike.algorithm.dh", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.alg_prf", "target_field": "ike.algorithm.prf", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.exchange_type", "target_field": "ike.exchange_type", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.payload", "target_field": "ike.payload", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.role", "target_field": "ike.role", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.init_spi", "target_field": "ike.spi.initiator", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.resp_spi", "target_field": "ike.spi.responder", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.version_major", "target_field": "ike.version.major", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.version_minor", "target_field": "ike.version.minor", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.ike.ikev2.errors", "target_field": "ike.ikev2.errors", "ignore_missing": true } },
|
||||
{ "pipeline": { "name": "common" } }
|
||||
]
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"description" : "suricata.ikev2",
|
||||
"processors" : [
|
||||
{ "rename": { "field": "message2.proto", "target_field": "network.transport", "ignore_missing": true } },
|
||||
{ "rename": { "field": "message2.app_proto", "target_field": "network.protocol", "ignore_missing": true } },
|
||||
{ "pipeline": { "name": "common" } }
|
||||
]
|
||||
}
|
||||
@@ -45,6 +45,28 @@ elasticsearch:
|
||||
description: Max number of boolean clauses per query.
|
||||
global: True
|
||||
helpLink: elasticsearch.html
|
||||
pipelines:
|
||||
custom001: &pipelines
|
||||
description:
|
||||
description: Description of the ingest node pipeline
|
||||
global: True
|
||||
advanced: True
|
||||
helpLink: elasticsearch.html
|
||||
processors:
|
||||
description: Processors for the ingest node pipeline
|
||||
global: True
|
||||
advanced: True
|
||||
multiline: True
|
||||
helpLink: elasticsearch.html
|
||||
custom002: *pipelines
|
||||
custom003: *pipelines
|
||||
custom004: *pipelines
|
||||
custom005: *pipelines
|
||||
custom006: *pipelines
|
||||
custom007: *pipelines
|
||||
custom008: *pipelines
|
||||
custom009: *pipelines
|
||||
custom010: *pipelines
|
||||
index_settings:
|
||||
global_overrides:
|
||||
index_template:
|
||||
@@ -318,6 +340,7 @@ elasticsearch:
|
||||
so-logs-windows_x_powershell: *indexSettings
|
||||
so-logs-windows_x_powershell_operational: *indexSettings
|
||||
so-logs-windows_x_sysmon_operational: *indexSettings
|
||||
so-logs-winlog_x_winlog: *indexSettings
|
||||
so-logs-apache_x_access: *indexSettings
|
||||
so-logs-apache_x_error: *indexSettings
|
||||
so-logs-auditd_x_log: *indexSettings
|
||||
@@ -346,6 +369,12 @@ elasticsearch:
|
||||
so-logs-cisco_ftd_x_log: *indexSettings
|
||||
so-logs-cisco_ios_x_log: *indexSettings
|
||||
so-logs-cisco_ise_x_log: *indexSettings
|
||||
so-logs-citrix_adc_x_interface: *indexSettings
|
||||
so-logs-citrix_adc_x_lbvserver: *indexSettings
|
||||
so-logs-citrix_adc_x_service: *indexSettings
|
||||
so-logs-citrix_adc_x_system: *indexSettings
|
||||
so-logs-citrix_adc_x_vpn: *indexSettings
|
||||
so-logs-citrix_waf_x_log: *indexSettings
|
||||
so-logs-cloudflare_x_audit: *indexSettings
|
||||
so-logs-cloudflare_x_logpull: *indexSettings
|
||||
so-logs-crowdstrike_x_falcon: *indexSettings
|
||||
@@ -406,6 +435,8 @@ elasticsearch:
|
||||
so-logs-mysql_x_error: *indexSettings
|
||||
so-logs-mysql_x_slowlog: *indexSettings
|
||||
so-logs-netflow_x_log: *indexSettings
|
||||
so-logs-nginx_x_access: *indexSettings
|
||||
so-logs-nginx_x_error: *indexSettings
|
||||
so-logs-o365_x_audit: *indexSettings
|
||||
so-logs-okta_x_system: *indexSettings
|
||||
so-logs-panw_x_panos: *indexSettings
|
||||
@@ -471,6 +502,7 @@ elasticsearch:
|
||||
so-metrics-endpoint_x_metadata: *indexSettings
|
||||
so-metrics-endpoint_x_metrics: *indexSettings
|
||||
so-metrics-endpoint_x_policy: *indexSettings
|
||||
so-metrics-nginx_x_stubstatus: *indexSettings
|
||||
so-case: *indexSettings
|
||||
so-common: *indexSettings
|
||||
so-endgame: *indexSettings
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
{"template": {
|
||||
{
|
||||
"template": {
|
||||
"settings": {
|
||||
"index": {
|
||||
"lifecycle": {
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"template": {
|
||||
"settings": {}
|
||||
},
|
||||
"_meta": {
|
||||
"package": {
|
||||
"name": "endpoint"
|
||||
},
|
||||
"managed_by": "fleet",
|
||||
"managed": true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,132 @@
|
||||
{
|
||||
"template": {
|
||||
"settings": {
|
||||
"index": {
|
||||
"lifecycle": {
|
||||
"name": "logs-endpoint.collection-diagnostic"
|
||||
},
|
||||
"codec": "best_compression",
|
||||
"default_pipeline": "logs-endpoint.diagnostic.collection-8.10.2",
|
||||
"mapping": {
|
||||
"total_fields": {
|
||||
"limit": "10000"
|
||||
},
|
||||
"ignore_malformed": "true"
|
||||
},
|
||||
"query": {
|
||||
"default_field": [
|
||||
"ecs.version",
|
||||
"event.action",
|
||||
"event.category",
|
||||
"event.code",
|
||||
"event.dataset",
|
||||
"event.hash",
|
||||
"event.id",
|
||||
"event.kind",
|
||||
"event.module",
|
||||
"event.outcome",
|
||||
"event.provider",
|
||||
"event.type"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"dynamic": false,
|
||||
"properties": {
|
||||
"@timestamp": {
|
||||
"ignore_malformed": false,
|
||||
"type": "date"
|
||||
},
|
||||
"ecs": {
|
||||
"properties": {
|
||||
"version": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"data_stream": {
|
||||
"properties": {
|
||||
"namespace": {
|
||||
"type": "constant_keyword"
|
||||
},
|
||||
"type": {
|
||||
"type": "constant_keyword"
|
||||
},
|
||||
"dataset": {
|
||||
"type": "constant_keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"properties": {
|
||||
"severity": {
|
||||
"type": "long"
|
||||
},
|
||||
"code": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"created": {
|
||||
"type": "date"
|
||||
},
|
||||
"kind": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"module": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"type": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"sequence": {
|
||||
"type": "long"
|
||||
},
|
||||
"ingested": {
|
||||
"type": "date"
|
||||
},
|
||||
"provider": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"action": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"id": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"category": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"dataset": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"hash": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"outcome": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"_meta": {
|
||||
"package": {
|
||||
"name": "endpoint"
|
||||
},
|
||||
"managed_by": "fleet",
|
||||
"managed": true
|
||||
}
|
||||
}
|
||||
@@ -14,15 +14,18 @@
|
||||
},
|
||||
"pe": {
|
||||
"properties": {
|
||||
"flags": {
|
||||
"type": "text"
|
||||
},
|
||||
"image_version": {
|
||||
"type": "float"
|
||||
},
|
||||
"sections": {
|
||||
"properties": {
|
||||
"entropy": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
"image_version": {
|
||||
"type": "float"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -95,6 +95,7 @@
|
||||
{% set NODE_CONTAINERS = [
|
||||
'so-elastic-fleet',
|
||||
'so-logstash',
|
||||
'so-nginx-fleet-node'
|
||||
] %}
|
||||
|
||||
{% elif GLOBALS.role == 'so-sensor' %}
|
||||
|
||||
@@ -39,7 +39,7 @@ so-idstools:
|
||||
{% endif %}
|
||||
- binds:
|
||||
- /opt/so/conf/idstools/etc:/opt/so/idstools/etc:ro
|
||||
- /opt/so/rules/nids:/opt/so/rules/nids:rw
|
||||
- /opt/so/rules/nids/suri:/opt/so/rules/nids/suri:rw
|
||||
- /nsm/rules/:/nsm/rules/:rw
|
||||
{% if DOCKER.containers['so-idstools'].custom_bind_mounts %}
|
||||
{% for BIND in DOCKER.containers['so-idstools'].custom_bind_mounts %}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
{%- from 'vars/globals.map.jinja' import GLOBALS -%}
|
||||
{%- from 'idstools/map.jinja' import IDSTOOLSMERGED -%}
|
||||
--merged=/opt/so/rules/nids/all.rules
|
||||
--local=/opt/so/rules/nids/local.rules
|
||||
--merged=/opt/so/rules/nids/suri/all.rules
|
||||
--local=/opt/so/rules/nids/suri/local.rules
|
||||
{%- if GLOBALS.md_engine == "SURICATA" %}
|
||||
--local=/opt/so/rules/nids/extraction.rules
|
||||
--local=/opt/so/rules/nids/filters.rules
|
||||
--local=/opt/so/rules/nids/suri/extraction.rules
|
||||
--local=/opt/so/rules/nids/suri/filters.rules
|
||||
{%- endif %}
|
||||
--url=http://{{ GLOBALS.manager }}:7788/suricata/emerging-all.rules
|
||||
--disable=/opt/so/idstools/etc/disable.conf
|
||||
|
||||
@@ -21,7 +21,7 @@ idstoolsetcsync:
|
||||
|
||||
rulesdir:
|
||||
file.directory:
|
||||
- name: /opt/so/rules/nids
|
||||
- name: /opt/so/rules/nids/suri
|
||||
- user: 939
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
@@ -29,7 +29,7 @@ rulesdir:
|
||||
# Don't show changes because all.rules can be large
|
||||
synclocalnidsrules:
|
||||
file.recurse:
|
||||
- name: /opt/so/rules/nids/
|
||||
- name: /opt/so/rules/nids/suri/
|
||||
- source: salt://idstools/rules/
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
|
||||
{% set KRATOSMERGED = salt['pillar.get']('kratos', default=KRATOSDEFAULTS.kratos, merge=true) %}
|
||||
|
||||
{% if KRATOSMERGED.oidc.enabled and 'oidc' in salt['pillar.get']('features') %}
|
||||
{% if KRATOSMERGED.oidc.enabled and 'odc' in salt['pillar.get']('features') %}
|
||||
{% do KRATOSMERGED.config.selfservice.methods.update({'oidc': {'enabled': true, 'config': {'providers': [KRATOSMERGED.oidc.config]}}}) %}
|
||||
{% endif %}
|
||||
|
||||
|
||||
@@ -63,6 +63,20 @@ lspipelinedir:
|
||||
- user: 931
|
||||
- group: 939
|
||||
|
||||
# Auto-generate Logstash pipeline config
|
||||
{% for pipeline, config in LOGSTASH_MERGED.pipeline_config.items() %}
|
||||
{% for assigned_pipeline in ASSIGNED_PIPELINES %}
|
||||
{% set custom_pipeline = 'custom/' + pipeline + '.conf' %}
|
||||
{% if custom_pipeline in LOGSTASH_MERGED.defined_pipelines[assigned_pipeline] %}
|
||||
ls_custom_pipeline_conf_{{assigned_pipeline}}_{{pipeline}}:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/logstash/pipelines/{{assigned_pipeline}}/{{ pipeline }}.conf
|
||||
- contents: LOGSTASH_MERGED.pipeline_config.{{pipeline}}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
|
||||
{% for assigned_pipeline in ASSIGNED_PIPELINES %}
|
||||
{% for CONFIGFILE in LOGSTASH_MERGED.defined_pipelines[assigned_pipeline] %}
|
||||
ls_pipeline_{{assigned_pipeline}}_{{CONFIGFILE.split('.')[0] | replace("/","_") }}:
|
||||
|
||||
@@ -42,6 +42,24 @@ logstash:
|
||||
custom2: []
|
||||
custom3: []
|
||||
custom4: []
|
||||
pipeline_config:
|
||||
custom001: |-
|
||||
filter {
|
||||
if [event][module] =~ "zeek" {
|
||||
mutate {
|
||||
add_tag => ["network_stuff"]
|
||||
}
|
||||
}
|
||||
}
|
||||
custom002: PLACEHOLDER
|
||||
custom003: PLACEHOLDER
|
||||
custom004: PLACEHOLDER
|
||||
custom005: PLACEHOLDER
|
||||
custom006: PLACEHOLDER
|
||||
custom007: PLACEHOLDER
|
||||
custom008: PLACEHOLDER
|
||||
custom009: PLACEHOLDER
|
||||
custom010: PLACEHOLDER
|
||||
settings:
|
||||
lsheap: 500m
|
||||
config:
|
||||
|
||||
@@ -31,6 +31,22 @@ logstash:
|
||||
custom2: *defined_pipelines
|
||||
custom3: *defined_pipelines
|
||||
custom4: *defined_pipelines
|
||||
pipeline_config:
|
||||
custom001: &pipeline_config
|
||||
description: Pipeline configuration for Logstash
|
||||
advanced: True
|
||||
multiline: True
|
||||
forcedType: string
|
||||
helpLink: logstash.html
|
||||
custom002: *pipeline_config
|
||||
custom003: *pipeline_config
|
||||
custom004: *pipeline_config
|
||||
custom005: *pipeline_config
|
||||
custom006: *pipeline_config
|
||||
custom007: *pipeline_config
|
||||
custom008: *pipeline_config
|
||||
custom009: *pipeline_config
|
||||
custom010: *pipeline_config
|
||||
settings:
|
||||
lsheap:
|
||||
description: Heap size to use for logstash
|
||||
|
||||
@@ -7,12 +7,8 @@
|
||||
NOROOT=1
|
||||
. /usr/sbin/so-common
|
||||
|
||||
set_version
|
||||
set_os
|
||||
salt_minion_count
|
||||
|
||||
set -e
|
||||
|
||||
curl --retry 5 --retry-delay 60 -A "reposync/$VERSION/$OS/$(uname -r)/$MINIONCOUNT" https://sigs.securityonion.net/checkup --output /tmp/checkup
|
||||
curl --retry 5 --retry-delay 60 -A "reposync/$(sync_options)" https://sigs.securityonion.net/checkup --output /tmp/checkup
|
||||
dnf reposync --norepopath -g --delete -m -c /opt/so/conf/reposync/repodownload.conf --repoid=securityonionsync --download-metadata -p /nsm/repo/
|
||||
createrepo /nsm/repo
|
||||
|
||||
@@ -347,7 +347,7 @@ function syncElastic() {
|
||||
[[ $? != 0 ]] && fail "Unable to read credential hashes from database"
|
||||
|
||||
user_data_formatted=$(echo "${userData}" | jq -r '.user + ":" + .data.hashed_password')
|
||||
if lookup_salt_value "licensed_features" "" "pillar" | grep -x oidc; then
|
||||
if lookup_salt_value "features" "" "pillar" | grep -x odc; then
|
||||
# generate random placeholder salt/hash for users without passwords
|
||||
random_crypt=$(get_random_value 53)
|
||||
user_data_formatted=$(echo "${user_data_formatted}" | sed -r "s/^(.+:)\$/\\1\$2a\$12${random_crypt}/")
|
||||
|
||||
@@ -16,12 +16,14 @@ lockFile = "/tmp/so-yaml.lock"
|
||||
def showUsage(args):
|
||||
print('Usage: {} <COMMAND> <YAML_FILE> [ARGS...]'.format(sys.argv[0]))
|
||||
print(' General commands:')
|
||||
print(' append - Append a list item to a yaml key, if it exists and is a list. Requires KEY and LISTITEM args.')
|
||||
print(' remove - Removes a yaml key, if it exists. Requires KEY arg.')
|
||||
print(' help - Prints this usage information.')
|
||||
print('')
|
||||
print(' Where:')
|
||||
print(' YAML_FILE - Path to the file that will be modified. Ex: /opt/so/conf/service/conf.yaml')
|
||||
print(' KEY - YAML key, does not support \' or " characters at this time. Ex: level1.level2')
|
||||
print(' LISTITEM - Item to add to the list.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -35,6 +37,35 @@ def writeYaml(filename, content):
|
||||
file = open(filename, "w")
|
||||
return yaml.dump(content, file)
|
||||
|
||||
def appendItem(content, key, listItem):
|
||||
pieces = key.split(".", 1)
|
||||
if len(pieces) > 1:
|
||||
appendItem(content[pieces[0]], pieces[1], listItem)
|
||||
else:
|
||||
try:
|
||||
content[key].append(listItem)
|
||||
except AttributeError:
|
||||
print("The existing value for the given key is not a list. No action was taken on the file.")
|
||||
return 1
|
||||
except KeyError:
|
||||
print("The key provided does not exist. No action was taken on the file.")
|
||||
return 1
|
||||
|
||||
def append(args):
|
||||
if len(args) != 3:
|
||||
print('Missing filename, key arg, or list item to append', file=sys.stderr)
|
||||
showUsage(None)
|
||||
return
|
||||
|
||||
filename = args[0]
|
||||
key = args[1]
|
||||
listItem = args[2]
|
||||
|
||||
content = loadYaml(filename)
|
||||
appendItem(content, key, listItem)
|
||||
writeYaml(filename, content)
|
||||
|
||||
return 0
|
||||
|
||||
def removeKey(content, key):
|
||||
pieces = key.split(".", 1)
|
||||
@@ -69,6 +100,7 @@ def main():
|
||||
|
||||
commands = {
|
||||
"help": showUsage,
|
||||
"append": append,
|
||||
"remove": remove,
|
||||
}
|
||||
|
||||
|
||||
@@ -105,3 +105,99 @@ class TestRemove(unittest.TestCase):
|
||||
self.assertEqual(actual, expected)
|
||||
sysmock.assert_called_once_with(1)
|
||||
self.assertIn(mock_stdout.getvalue(), "Missing filename or key arg\n")
|
||||
|
||||
def test_append(self):
|
||||
filename = "/tmp/so-yaml_test-remove.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: abc }, key2: false, key3: [a,b,c]}")
|
||||
file.close()
|
||||
|
||||
soyaml.append([filename, "key3", "d"])
|
||||
|
||||
file = open(filename, "r")
|
||||
actual = file.read()
|
||||
file.close()
|
||||
expected = "key1:\n child1: 123\n child2: abc\nkey2: false\nkey3:\n- a\n- b\n- c\n- d\n"
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_append_nested(self):
|
||||
filename = "/tmp/so-yaml_test-remove.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: [a,b,c] }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
soyaml.append([filename, "key1.child2", "d"])
|
||||
|
||||
file = open(filename, "r")
|
||||
actual = file.read()
|
||||
file.close()
|
||||
|
||||
expected = "key1:\n child1: 123\n child2:\n - a\n - b\n - c\n - d\nkey2: false\nkey3:\n- e\n- f\n- g\n"
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_append_nested_deep(self):
|
||||
filename = "/tmp/so-yaml_test-remove.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
soyaml.append([filename, "key1.child2.deep2", "d"])
|
||||
|
||||
file = open(filename, "r")
|
||||
actual = file.read()
|
||||
file.close()
|
||||
|
||||
expected = "key1:\n child1: 123\n child2:\n deep1: 45\n deep2:\n - a\n - b\n - c\n - d\nkey2: false\nkey3:\n- e\n- f\n- g\n"
|
||||
self.assertEqual(actual, expected)
|
||||
|
||||
def test_append_key_noexist(self):
|
||||
filename = "/tmp/so-yaml_test-append.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
sys.argv = ["cmd", "append", filename, "key4", "h"]
|
||||
soyaml.main()
|
||||
sysmock.assert_called()
|
||||
self.assertEqual(mock_stdout.getvalue(), "The key provided does not exist. No action was taken on the file.\n")
|
||||
|
||||
def test_append_key_noexist_deep(self):
|
||||
filename = "/tmp/so-yaml_test-append.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
sys.argv = ["cmd", "append", filename, "key1.child2.deep3", "h"]
|
||||
soyaml.main()
|
||||
sysmock.assert_called()
|
||||
self.assertEqual(mock_stdout.getvalue(), "The key provided does not exist. No action was taken on the file.\n")
|
||||
|
||||
def test_append_key_nonlist(self):
|
||||
filename = "/tmp/so-yaml_test-append.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
sys.argv = ["cmd", "append", filename, "key1", "h"]
|
||||
soyaml.main()
|
||||
sysmock.assert_called()
|
||||
self.assertEqual(mock_stdout.getvalue(), "The existing value for the given key is not a list. No action was taken on the file.\n")
|
||||
|
||||
def test_append_key_nonlist_deep(self):
|
||||
filename = "/tmp/so-yaml_test-append.yaml"
|
||||
file = open(filename, "w")
|
||||
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
|
||||
file.close()
|
||||
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
sys.argv = ["cmd", "append", filename, "key1.child2.deep1", "h"]
|
||||
soyaml.main()
|
||||
sysmock.assert_called()
|
||||
self.assertEqual(mock_stdout.getvalue(), "The existing value for the given key is not a list. No action was taken on the file.\n")
|
||||
|
||||
@@ -372,6 +372,17 @@ enable_highstate() {
|
||||
echo ""
|
||||
}
|
||||
|
||||
get_soup_script_hashes() {
|
||||
CURRENTSOUP=$(md5sum /usr/sbin/soup | awk '{print $1}')
|
||||
GITSOUP=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/soup | awk '{print $1}')
|
||||
CURRENTCMN=$(md5sum /usr/sbin/so-common | awk '{print $1}')
|
||||
GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
|
||||
CURRENTIMGCMN=$(md5sum /usr/sbin/so-image-common | awk '{print $1}')
|
||||
GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
|
||||
CURRENTSOFIREWALL=$(md5sum /usr/sbin/so-firewall | awk '{print $1}')
|
||||
GITSOFIREWALL=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/so-firewall | awk '{print $1}')
|
||||
}
|
||||
|
||||
highstate() {
|
||||
# Run a highstate.
|
||||
salt-call state.highstate -l info queue=True
|
||||
@@ -405,6 +416,7 @@ preupgrade_changes() {
|
||||
[[ "$INSTALLEDVERSION" == 2.4.10 ]] && up_to_2.4.20
|
||||
[[ "$INSTALLEDVERSION" == 2.4.20 ]] && up_to_2.4.30
|
||||
[[ "$INSTALLEDVERSION" == 2.4.30 ]] && up_to_2.4.40
|
||||
[[ "$INSTALLEDVERSION" == 2.4.40 ]] && up_to_2.4.50
|
||||
true
|
||||
}
|
||||
|
||||
@@ -419,6 +431,7 @@ postupgrade_changes() {
|
||||
[[ "$POSTVERSION" == 2.4.10 ]] && post_to_2.4.20
|
||||
[[ "$POSTVERSION" == 2.4.20 ]] && post_to_2.4.30
|
||||
[[ "$POSTVERSION" == 2.4.30 ]] && post_to_2.4.40
|
||||
[[ "$POSTVERSION" == 2.4.40 ]] && post_to_2.4.50
|
||||
true
|
||||
}
|
||||
|
||||
@@ -470,6 +483,11 @@ post_to_2.4.40() {
|
||||
POSTVERSION=2.4.40
|
||||
}
|
||||
|
||||
post_to_2.4.50() {
|
||||
echo "Nothing to apply"
|
||||
POSTVERSION=2.4.50
|
||||
}
|
||||
|
||||
repo_sync() {
|
||||
echo "Sync the local repo."
|
||||
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
|
||||
@@ -570,6 +588,35 @@ up_to_2.4.40() {
|
||||
INSTALLEDVERSION=2.4.40
|
||||
}
|
||||
|
||||
up_to_2.4.50() {
|
||||
echo "Creating additional pillars.."
|
||||
mkdir -p /opt/so/saltstack/local/pillar/stig/
|
||||
touch /opt/so/saltstack/local/pillar/stig/adv_stig.sls
|
||||
touch /opt/so/saltstack/local/pillar/stig/soc_stig.sls
|
||||
|
||||
# the file_roots need to be update due to salt 3006.6 upgrade not allowing symlinks outside the file_roots
|
||||
# put new so-yaml in place
|
||||
echo "Updating so-yaml"
|
||||
\cp -v "$UPDATE_DIR/salt/manager/tools/sbin/so-yaml.py" "$DEFAULT_SALT_DIR/salt/manager/tools/sbin/"
|
||||
\cp -v "$UPDATE_DIR/salt/manager/tools/sbin/so-yaml.py" /usr/sbin/
|
||||
echo "Creating a backup of the salt-master config."
|
||||
# INSTALLEDVERSION is 2.4.40 at this point, but we want the backup to have the version
|
||||
# so was at prior to starting upgrade. use POSTVERSION here since it doesnt change until
|
||||
# post upgrade changes. POSTVERSION set to INSTALLEDVERSION at start of soup
|
||||
cp -v /etc/salt/master "/etc/salt/master.so-$POSTVERSION.bak"
|
||||
echo "Adding /opt/so/rules to file_roots in /etc/salt/master using so-yaml"
|
||||
so-yaml.py append /etc/salt/master file_roots.base /opt/so/rules/nids
|
||||
echo "Moving Suricata rules"
|
||||
mkdir /opt/so/rules/nids/suri
|
||||
chown socore:socore /opt/so/rules/nids/suri
|
||||
mv -v /opt/so/rules/nids/*.rules /opt/so/rules/nids/suri/.
|
||||
|
||||
echo "Adding /nsm/elastic-fleet/artifacts to file_roots in /etc/salt/master using so-yaml"
|
||||
so-yaml.py append /etc/salt/master file_roots.base /nsm/elastic-fleet/artifacts
|
||||
|
||||
INSTALLEDVERSION=2.4.50
|
||||
}
|
||||
|
||||
determine_elastic_agent_upgrade() {
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
update_elastic_agent_airgap
|
||||
@@ -742,31 +789,29 @@ upgrade_salt() {
|
||||
}
|
||||
|
||||
verify_latest_update_script() {
|
||||
# Check to see if the update scripts match. If not run the new one.
|
||||
CURRENTSOUP=$(md5sum /usr/sbin/soup | awk '{print $1}')
|
||||
GITSOUP=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/soup | awk '{print $1}')
|
||||
CURRENTCMN=$(md5sum /usr/sbin/so-common | awk '{print $1}')
|
||||
GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
|
||||
CURRENTIMGCMN=$(md5sum /usr/sbin/so-image-common | awk '{print $1}')
|
||||
GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
|
||||
CURRENTSOFIREWALL=$(md5sum /usr/sbin/so-firewall | awk '{print $1}')
|
||||
GITSOFIREWALL=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/so-firewall | awk '{print $1}')
|
||||
|
||||
get_soup_script_hashes
|
||||
if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" && "$CURRENTSOFIREWALL" == "$GITSOFIREWALL" ]]; then
|
||||
echo "This version of the soup script is up to date. Proceeding."
|
||||
else
|
||||
echo "You are not running the latest soup version. Updating soup and its components. This might take multiple runs to complete."
|
||||
cp $UPDATE_DIR/salt/manager/tools/sbin/soup $DEFAULT_SALT_DIR/salt/common/tools/sbin/
|
||||
cp $UPDATE_DIR/salt/common/tools/sbin/so-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
|
||||
cp $UPDATE_DIR/salt/common/tools/sbin/so-image-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
|
||||
cp $UPDATE_DIR/salt/manager/tools/sbin/so-firewall $DEFAULT_SALT_DIR/salt/common/tools/sbin/
|
||||
|
||||
salt-call state.apply common.soup_scripts queue=True -linfo --file-root=$UPDATE_DIR/salt --local
|
||||
|
||||
# Verify that soup scripts updated as expected
|
||||
get_soup_script_hashes
|
||||
if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" && "$CURRENTSOFIREWALL" == "$GITSOFIREWALL" ]]; then
|
||||
echo "Succesfully updated soup scripts."
|
||||
else
|
||||
echo "There was a problem updating soup scripts. Trying to rerun script update."
|
||||
salt-call state.apply common.soup_scripts queue=True -linfo --file-root=$UPDATE_DIR/salt --local
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "The soup script has been modified. Please run soup again to continue the upgrade."
|
||||
exit 0
|
||||
fi
|
||||
}
|
||||
|
||||
}
|
||||
# Keeping this block in case we need to do a hotfix that requires salt update
|
||||
apply_hotfix() {
|
||||
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
|
||||
@@ -909,9 +954,6 @@ main() {
|
||||
|
||||
systemctl_func "stop" "$cron_service_name"
|
||||
|
||||
# update mine items prior to stopping salt-minion and salt-master
|
||||
update_salt_mine
|
||||
|
||||
echo "Updating dockers to $NEWVERSION."
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
airgap_update_dockers
|
||||
@@ -987,6 +1029,9 @@ main() {
|
||||
salt-call state.apply salt.minion -l info queue=True
|
||||
echo ""
|
||||
|
||||
# ensure the mine is updated and populated before highstates run, following the salt-master restart
|
||||
update_salt_mine
|
||||
|
||||
enable_highstate
|
||||
|
||||
echo ""
|
||||
|
||||
@@ -14,6 +14,9 @@ include:
|
||||
- nginx.config
|
||||
- nginx.sostatus
|
||||
|
||||
|
||||
{% if grains.role not in ['so-fleet'] %}
|
||||
|
||||
{# if the user has selected to replace the crt and key in the ui #}
|
||||
{% if NGINXMERGED.ssl.replace_cert %}
|
||||
|
||||
@@ -89,17 +92,26 @@ make-rule-dir-nginx:
|
||||
- user
|
||||
- group
|
||||
|
||||
{% endif %}
|
||||
|
||||
{# if this is an so-fleet node then we want to use the port bindings, custom bind mounts defined for fleet #}
|
||||
{% if GLOBALS.role == 'so-fleet' %}
|
||||
{% set container_config = 'so-nginx-fleet-node' %}
|
||||
{% else %}
|
||||
{% set container_config = 'so-nginx' %}
|
||||
{% endif %}
|
||||
|
||||
so-nginx:
|
||||
docker_container.running:
|
||||
- image: {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-nginx:{{ GLOBALS.so_version }}
|
||||
- hostname: so-nginx
|
||||
- networks:
|
||||
- sobridge:
|
||||
- ipv4_address: {{ DOCKER.containers['so-nginx'].ip }}
|
||||
- ipv4_address: {{ DOCKER.containers[container_config].ip }}
|
||||
- extra_hosts:
|
||||
- {{ GLOBALS.manager }}:{{ GLOBALS.manager_ip }}
|
||||
{% if DOCKER.containers['so-nginx'].extra_hosts %}
|
||||
{% for XTRAHOST in DOCKER.containers['so-nginx'].extra_hosts %}
|
||||
{% if DOCKER.containers[container_config].extra_hosts %}
|
||||
{% for XTRAHOST in DOCKER.containers[container_config].extra_hosts %}
|
||||
- {{ XTRAHOST }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
@@ -119,20 +131,20 @@ so-nginx:
|
||||
- /nsm/repo:/opt/socore/html/repo:ro
|
||||
- /nsm/rules:/nsm/rules:ro
|
||||
{% endif %}
|
||||
{% if DOCKER.containers['so-nginx'].custom_bind_mounts %}
|
||||
{% for BIND in DOCKER.containers['so-nginx'].custom_bind_mounts %}
|
||||
{% if DOCKER.containers[container_config].custom_bind_mounts %}
|
||||
{% for BIND in DOCKER.containers[container_config].custom_bind_mounts %}
|
||||
- {{ BIND }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% if DOCKER.containers['so-nginx'].extra_env %}
|
||||
{% if DOCKER.containers[container_config].extra_env %}
|
||||
- environment:
|
||||
{% for XTRAENV in DOCKER.containers['so-nginx'].extra_env %}
|
||||
{% for XTRAENV in DOCKER.containers[container_config].extra_env %}
|
||||
- {{ XTRAENV }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
- cap_add: NET_BIND_SERVICE
|
||||
- port_bindings:
|
||||
{% for BINDING in DOCKER.containers['so-nginx'].port_bindings %}
|
||||
{% for BINDING in DOCKER.containers[container_config].port_bindings %}
|
||||
- {{ BINDING }}
|
||||
{% endfor %}
|
||||
- watch:
|
||||
|
||||
@@ -39,6 +39,26 @@ http {
|
||||
|
||||
include /etc/nginx/conf.d/*.conf;
|
||||
|
||||
{%- if role in ['fleet'] %}
|
||||
|
||||
server {
|
||||
listen 8443;
|
||||
server_name {{ GLOBALS.hostname }};
|
||||
root /opt/socore/html;
|
||||
location /artifacts/ {
|
||||
try_files $uri =206;
|
||||
proxy_read_timeout 90;
|
||||
proxy_connect_timeout 90;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
}
|
||||
|
||||
{%- endif %}
|
||||
|
||||
{%- if role in ['eval', 'managersearch', 'manager', 'standalone', 'import'] %}
|
||||
|
||||
server {
|
||||
|
||||
@@ -7,6 +7,7 @@ logfile=/var/log/yum.log
|
||||
exactarch=1
|
||||
obsoletes=1
|
||||
gpgcheck=1
|
||||
localpkg_gpgcheck=1
|
||||
plugins=1
|
||||
installonly_limit={{ salt['pillar.get']('yum:config:installonly_limit', 2) }}
|
||||
bugtracker_url=http://bugs.centos.org/set_project.php?project_id=23&ref=http://bugs.centos.org/bug_report_page.php?category=yum
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
|
||||
salt:
|
||||
master:
|
||||
version: 3006.5
|
||||
version: 3006.6
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
|
||||
salt:
|
||||
minion:
|
||||
version: 3006.5
|
||||
version: 3006.6
|
||||
check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default
|
||||
service_start_delay: 30 # in seconds.
|
||||
|
||||
@@ -9,7 +9,7 @@ soc:
|
||||
icon: fa-crosshairs
|
||||
target:
|
||||
links:
|
||||
- '/#/hunt?q="{value|escape}" | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q="{value|escape}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- name: actionAddToCase
|
||||
description: actionAddToCaseHelp
|
||||
icon: fa-briefcase
|
||||
@@ -23,13 +23,13 @@ soc:
|
||||
icon: fab fa-searchengin
|
||||
target: ''
|
||||
links:
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}") | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:network.community_id}") | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q=("{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q="{:log.id.fuid}" | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q="{:log.id.uid}" | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q="{:network.community_id}" | groupby event.module* event.dataset'
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q=("{:log.id.fuid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q=("{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q="{:log.id.fuid}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q="{:log.id.uid}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- '/#/hunt?q="{:network.community_id}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- name: actionPcap
|
||||
description: actionPcapHelp
|
||||
icon: fa-stream
|
||||
@@ -59,12 +59,18 @@ soc:
|
||||
target: _blank
|
||||
links:
|
||||
- 'https://www.virustotal.com/gui/search/{value}'
|
||||
- name: Sublime Platform Email Review
|
||||
description: Review email in Sublime Platform
|
||||
- name: actionSublime
|
||||
description: actionSublimeHelp
|
||||
icon: fa-external-link-alt
|
||||
target: _blank
|
||||
links:
|
||||
- 'https://{:sublime.url}/messages/{:sublime.message_group_id}'
|
||||
- name: actionProcessAncestors
|
||||
description: actionProcessAncestorsHelp
|
||||
icon: fa-people-roof
|
||||
target: ''
|
||||
links:
|
||||
- '/#/hunt?q=(process.entity_id:"{:process.entity_id}" OR process.entity_id:"{:process.Ext.ancestry|processAncestors}") | groupby process.parent.name | groupby -sankey process.parent.name process.name | groupby process.name | groupby event.module event.dataset | table soc_timestamp event.dataset host.name user.name process.parent.name process.name process.working_directory'
|
||||
eventFields:
|
||||
default:
|
||||
- soc_timestamp
|
||||
@@ -1411,7 +1417,7 @@ soc:
|
||||
query: 'event.category: network AND _exists_:process.executable AND (_exists_:dns.question.name OR _exists_:dns.answers.data) | groupby -sankey host.name dns.question.name | groupby event.dataset event.type | groupby host.name | groupby process.executable | groupby dns.question.name | groupby dns.answers.data'
|
||||
- name: Host Process Activity
|
||||
description: Process activity captured on an endpoint
|
||||
query: 'event.category:process | groupby -sankey host.name user.name* | groupby event.dataset event.action | groupby host.name | groupby user.name | groupby process.working_directory | groupby process.executable | groupby process.command_line | groupby process.parent.executable | groupby process.parent.command_line | groupby -sankey process.parent.executable process.executable'
|
||||
query: 'event.category:process | groupby -sankey host.name user.name* | groupby event.dataset event.action | groupby host.name | groupby user.name | groupby process.working_directory | groupby process.executable | groupby process.command_line | groupby process.parent.executable | groupby process.parent.command_line | groupby -sankey process.parent.executable process.executable | table soc_timestamp event.dataset host.name user.name process.parent.name process.name process.working_directory'
|
||||
- name: Host File Activity
|
||||
description: File activity captured on an endpoint
|
||||
query: 'event.category: file AND _exists_:process.executable | groupby -sankey host.name process.executable | groupby host.name | groupby event.dataset event.action event.type | groupby file.name | groupby process.executable'
|
||||
@@ -1424,8 +1430,11 @@ soc:
|
||||
- name: Zeek Notice
|
||||
description: Zeek notice logs
|
||||
query: 'event.dataset:zeek.notice | groupby -sankey notice.note destination.ip | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'
|
||||
- name: Connections
|
||||
description: Network connection metadata
|
||||
- name: Connections and Metadata with community_id
|
||||
description: Network connections that include community_id
|
||||
query: '_exists_:network.community_id | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
|
||||
- name: Connections seen by Zeek or Suricata
|
||||
description: Network connections logged by Zeek or Suricata
|
||||
query: 'tags:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby -sankey destination.port network.protocol | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name | groupby client.ip_bytes | groupby server.ip_bytes | groupby client.oui'
|
||||
- name: DCE_RPC
|
||||
description: DCE_RPC (Distributed Computing Environment / Remote Procedure Calls) network metadata
|
||||
@@ -1562,6 +1571,9 @@ soc:
|
||||
- name: Firewall
|
||||
description: Firewall logs
|
||||
query: 'observer.type:firewall | groupby -sankey event.action observer.ingress.interface.name | groupby event.action | groupby observer.ingress.interface.name | groupby network.type | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port'
|
||||
- name: Firewall Auth
|
||||
description: Firewall authentication logs
|
||||
query: 'observer.type:firewall AND event.category:authentication | groupby user.name | groupby -sankey user.name source.ip | groupby source.ip | table soc_timestamp user.name source.ip message'
|
||||
- name: VLAN
|
||||
description: VLAN (Virtual Local Area Network) tagged logs
|
||||
query: '* AND _exists_:network.vlan.id | groupby network.vlan.id | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby event.dataset | groupby event.module | groupby observer.name | groupby source.geo.country_name | groupby destination.geo.country_name'
|
||||
|
||||
@@ -2,6 +2,10 @@ trusttheca:
|
||||
file.absent:
|
||||
- name: /etc/pki/tls/certs/intca.crt
|
||||
|
||||
symlinkca:
|
||||
file.absent:
|
||||
- name: /etc/ssl/certs/intca.crt
|
||||
|
||||
influxdb_key:
|
||||
file.absent:
|
||||
- name: /etc/pki/influxdb.key
|
||||
|
||||
3
salt/stig/defaults.yaml
Normal file
3
salt/stig/defaults.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
stig:
|
||||
enabled: False
|
||||
run_interval: 12
|
||||
15
salt/stig/disabled.sls
Normal file
15
salt/stig/disabled.sls
Normal file
@@ -0,0 +1,15 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states %}
|
||||
|
||||
stig_remediate_schedule:
|
||||
schedule.absent
|
||||
|
||||
remove_stig_script:
|
||||
file.absent:
|
||||
- name: /usr/sbin/so-stig
|
||||
{% endif %}
|
||||
104
salt/stig/enabled.sls
Normal file
104
salt/stig/enabled.sls
Normal file
@@ -0,0 +1,104 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
#
|
||||
# Note: Per the Elastic License 2.0, the second limitation states:
|
||||
#
|
||||
# "You may not move, change, disable, or circumvent the license key functionality
|
||||
# in the software, and you may not remove or obscure any functionality in the
|
||||
# software that is protected by the license key."
|
||||
|
||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls.split('.')[0] in allowed_states and GLOBALS.os == 'OEL' %}
|
||||
{% if 'stg' in salt['pillar.get']('features', []) %}
|
||||
{% set OSCAP_PROFILE_NAME = 'xccdf_org.ssgproject.content_profile_stig' %}
|
||||
{% set OSCAP_PROFILE_LOCATION = '/opt/so/conf/stig/sos-oscap.xml' %}
|
||||
{% set OSCAP_OUTPUT_DIR = '/opt/so/log/stig' %}
|
||||
oscap_packages:
|
||||
pkg.installed:
|
||||
- skip_suggestions: True
|
||||
- pkgs:
|
||||
- openscap
|
||||
- openscap-scanner
|
||||
- scap-security-guide
|
||||
|
||||
make_some_dirs:
|
||||
file.directory:
|
||||
- name: /opt/so/log/stig
|
||||
- user: socore
|
||||
- group: socore
|
||||
- makedirs: True
|
||||
|
||||
make_more_dir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/stig
|
||||
- user: socore
|
||||
- group: socore
|
||||
- makedirs: True
|
||||
|
||||
update_stig_profile:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/stig/sos-oscap.xml
|
||||
- source: salt://stig/files/sos-oscap.xml
|
||||
- user: socore
|
||||
- group: socore
|
||||
- mode: 0644
|
||||
|
||||
{% if not salt['file.file_exists'](OSCAP_OUTPUT_DIR ~ '/pre-oscap-report.html') %}
|
||||
run_initial_scan:
|
||||
module.run:
|
||||
- name: openscap.xccdf
|
||||
- params: 'eval --profile {{ OSCAP_PROFILE_NAME }} --results {{ OSCAP_OUTPUT_DIR }}/pre-oscap-results.xml --report {{ OSCAP_OUTPUT_DIR }}/pre-oscap-report.html {{ OSCAP_PROFILE_LOCATION }}'
|
||||
{% endif %}
|
||||
|
||||
run_remediate:
|
||||
module.run:
|
||||
- name: openscap.xccdf
|
||||
- params: 'eval --remediate --profile {{ OSCAP_PROFILE_NAME }} --results {{ OSCAP_OUTPUT_DIR }}/post-oscap-results.xml --report {{ OSCAP_PROFILE_LOCATION }}'
|
||||
|
||||
{# OSCAP rule id: xccdf_org.ssgproject.content_rule_disable_ctrlaltdel_burstaction #}
|
||||
disable_ctrl_alt_del_action:
|
||||
file.replace:
|
||||
- name: /etc/systemd/system.conf
|
||||
- pattern: '^#CtrlAltDelBurstAction=none'
|
||||
- repl: 'CtrlAltDelBurstAction=none'
|
||||
- backup: '.bak'
|
||||
|
||||
{# OSCAP rule id: xccdf_org.ssgproject.content_rule_no_empty_passwords #}
|
||||
remove_nullok_from_password_auth:
|
||||
file.replace:
|
||||
- name: /etc/pam.d/password-auth
|
||||
- pattern: ' nullok'
|
||||
- repl: ''
|
||||
- backup: '.bak'
|
||||
|
||||
remove_nullok_from_system_auth_auth:
|
||||
file.replace:
|
||||
- name: /etc/pam.d/system-auth
|
||||
- pattern: ' nullok'
|
||||
- repl: ''
|
||||
- backup: '.bak'
|
||||
|
||||
run_post_scan:
|
||||
module.run:
|
||||
- name: openscap.xccdf
|
||||
- params: 'eval --profile {{ OSCAP_PROFILE_NAME }} --results {{ OSCAP_OUTPUT_DIR }}/post-oscap-results.xml --report {{ OSCAP_OUTPUT_DIR }}/post-oscap-report.html {{ OSCAP_PROFILE_LOCATION }}'
|
||||
|
||||
{% else %}
|
||||
{{sls}}_no_license_detected:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_no_license_detected
|
||||
- comment:
|
||||
- "The application of STIGs is a feature supported only for customers with a valid license.
|
||||
Contact Security Onion Solutions, LLC via our website at https://securityonionsolutions.com
|
||||
for more information about purchasing a license to enable this feature."
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
244945
salt/stig/files/sos-oscap.xml
Normal file
244945
salt/stig/files/sos-oscap.xml
Normal file
File diff suppressed because one or more lines are too long
16
salt/stig/init.sls
Normal file
16
salt/stig/init.sls
Normal file
@@ -0,0 +1,16 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'stig/map.jinja' import STIGMERGED %}
|
||||
|
||||
include:
|
||||
{% if STIGMERGED.enabled %}
|
||||
- stig.schedule
|
||||
{% if not salt['schedule.is_enabled'](name="stig_remediate_schedule") %}
|
||||
- stig.enabled
|
||||
{% endif %}
|
||||
{% else %}
|
||||
- stig.disabled
|
||||
{% endif %}
|
||||
7
salt/stig/map.jinja
Normal file
7
salt/stig/map.jinja
Normal file
@@ -0,0 +1,7 @@
|
||||
{# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
Elastic License 2.0. #}
|
||||
|
||||
{% import_yaml 'stig/defaults.yaml' as STIGDEFAULTS with context %}
|
||||
{% set STIGMERGED = salt['pillar.get']('stig', STIGDEFAULTS.stig, merge=True) %}
|
||||
24
salt/stig/schedule.sls
Normal file
24
salt/stig/schedule.sls
Normal file
@@ -0,0 +1,24 @@
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
{% from 'stig/map.jinja' import STIGMERGED %}
|
||||
{% if 'stg' in salt['pillar.get']('features', []) %}
|
||||
stig_remediate_schedule:
|
||||
schedule.present:
|
||||
- function: state.apply
|
||||
- job_args:
|
||||
- stig.enabled
|
||||
- hours: {{ STIGMERGED.run_interval }}
|
||||
- maxrunning: 1
|
||||
- enabled: true
|
||||
{% else %}
|
||||
{{sls}}_no_license_detected:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_no_license_detected
|
||||
- comment:
|
||||
- "The application of STIGs is a feature supported only for customers with a valid license.
|
||||
Contact Security Onion Solutions, LLC via our website at https://securityonionsolutions.com
|
||||
for more information about purchasing a license to enable this feature."
|
||||
{% endif %}
|
||||
11
salt/stig/soc_stig.yaml
Normal file
11
salt/stig/soc_stig.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
stig:
|
||||
enabled:
|
||||
description: You can enable or disable the application of STIGS using oscap. Note that the actions performed by OSCAP are not automatically reversible.
|
||||
forcedType: bool
|
||||
advanced: True
|
||||
run_interval:
|
||||
description: The interval in hours between OSCAP remediate executions.
|
||||
forcedType: int
|
||||
regex: ^([1-9][0-9]{0,2})$
|
||||
regexFailureMessage: The value must be an integer between 1 and 999.
|
||||
advanced: True
|
||||
@@ -17,9 +17,10 @@ strelka:
|
||||
mime_db: '/usr/lib/file/magic.mgc'
|
||||
yara_rules: '/etc/strelka/taste/'
|
||||
scanners:
|
||||
'ScanBase64':
|
||||
'ScanBase64PE':
|
||||
- positive:
|
||||
filename: '^base64_'
|
||||
flavors:
|
||||
- 'base64_pe'
|
||||
priority: 5
|
||||
'ScanBatch':
|
||||
- positive:
|
||||
@@ -27,12 +28,27 @@ strelka:
|
||||
- 'text/x-msdos-batch'
|
||||
- 'batch_file'
|
||||
priority: 5
|
||||
'ScanBmpEof':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'image/x-ms-bmp'
|
||||
- 'bmp_file'
|
||||
negative:
|
||||
source:
|
||||
- 'ScanTranscode'
|
||||
priority: 5
|
||||
'ScanBzip2':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'application/x-bzip2'
|
||||
- 'bzip2_file'
|
||||
priority: 5
|
||||
'ScanDmg':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'dmg_disk_image'
|
||||
- 'hfsplus_disk_image'
|
||||
priority: 5
|
||||
'ScanDocx':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -40,6 +56,11 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
extract_text: False
|
||||
'ScanDonut':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'hacktool_win_shellcode_donut'
|
||||
priority: 5
|
||||
'ScanElf':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -56,6 +77,26 @@ strelka:
|
||||
- 'message/rfc822'
|
||||
- 'email_file'
|
||||
priority: 5
|
||||
'ScanEncryptedDoc':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'encrypted_word_document'
|
||||
priority: 5
|
||||
options:
|
||||
max_length: 5
|
||||
scanner_timeout: 150
|
||||
log_pws: True
|
||||
password_file: "/etc/strelka/passwords.dat"
|
||||
'ScanEncryptedZip':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'encrypted_zip'
|
||||
priority: 5
|
||||
options:
|
||||
max_length: 5
|
||||
scanner_timeout: 150
|
||||
log_pws: True
|
||||
password_file: '/etc/strelka/passwords.dat'
|
||||
'ScanEntropy':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -111,6 +152,16 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
tmp_directory: '/dev/shm/'
|
||||
'ScanFooter':
|
||||
- positive:
|
||||
flavors:
|
||||
- '*'
|
||||
priority: 5
|
||||
options:
|
||||
length: 50
|
||||
encodings:
|
||||
- classic
|
||||
- backslash
|
||||
'ScanGif':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -144,13 +195,25 @@ strelka:
|
||||
- 'html_file'
|
||||
priority: 5
|
||||
options:
|
||||
parser: "html5lib"
|
||||
max_hyperlinks: 50
|
||||
'ScanIqy':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'iqy_file'
|
||||
priority: 5
|
||||
'ScanIni':
|
||||
- positive:
|
||||
filename: '(\.([Cc][Ff][Gg]|[Ii][Nn][Ii])|PROJECT)$'
|
||||
flavors:
|
||||
- 'ini_file'
|
||||
priority: 5
|
||||
'ScanIso':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'application/x-iso9660-image'
|
||||
priority: 5
|
||||
options:
|
||||
limit: 50
|
||||
'ScanJarManifest':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -198,6 +261,25 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
limit: 1000
|
||||
'ScanLNK':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'lnk_file'
|
||||
priority: 5
|
||||
'ScanLsb':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'image/png'
|
||||
- 'png_file'
|
||||
- 'image/jpeg'
|
||||
- 'jpeg_file'
|
||||
- 'image/x-ms-bmp'
|
||||
- 'bmp_file'
|
||||
- 'image/webp'
|
||||
negative:
|
||||
source:
|
||||
- 'ScanTranscode'
|
||||
priority: 5
|
||||
'ScanLzma':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -214,6 +296,36 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
tmp_directory: '/dev/shm/'
|
||||
'ScanManifest':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'browser_manifest'
|
||||
priority: 5
|
||||
'ScanMsi':
|
||||
- positive:
|
||||
flavors:
|
||||
- "image/vnd.fpx"
|
||||
- "application/vnd.ms-msi"
|
||||
- "application/x-msi"
|
||||
priority: 5
|
||||
options:
|
||||
tmp_directory: '/dev/shm/'
|
||||
keys:
|
||||
- 'Author'
|
||||
- 'Characters'
|
||||
- 'Company'
|
||||
- 'CreateDate'
|
||||
- 'LastModifiedBy'
|
||||
- 'Lines'
|
||||
- 'ModifyDate'
|
||||
- 'Pages'
|
||||
- 'Paragraphs'
|
||||
- 'RevisionNumber'
|
||||
- 'Software'
|
||||
- 'Template'
|
||||
- 'Title'
|
||||
- 'TotalEditTime'
|
||||
- 'Words'
|
||||
'ScanOcr':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -236,6 +348,13 @@ strelka:
|
||||
- 'application/msword'
|
||||
- 'olecf_file'
|
||||
priority: 5
|
||||
'ScanOnenote':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'application/onenote'
|
||||
- 'application/msonenote'
|
||||
- 'onenote_file'
|
||||
priority: 5
|
||||
'ScanPdf':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -285,6 +404,30 @@ strelka:
|
||||
- 'ProgramArguments'
|
||||
- 'RunAtLoad'
|
||||
- 'StartInterval'
|
||||
'ScanPngEof':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'image/png'
|
||||
- 'png_file'
|
||||
negative:
|
||||
source:
|
||||
- 'ScanTranscode'
|
||||
priority: 5
|
||||
'ScanQr':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'image/jpeg'
|
||||
- 'jpeg_file'
|
||||
- 'image/png'
|
||||
- 'png_file'
|
||||
- 'image/tiff'
|
||||
- 'type_is_tiff'
|
||||
- 'image/x-ms-bmp'
|
||||
- 'bmp_file'
|
||||
- 'image/webp'
|
||||
priority: 5
|
||||
options:
|
||||
support_inverted: True
|
||||
'ScanRar':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -309,6 +452,19 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
limit: 1000
|
||||
'ScanSevenZip':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'application/x-7z-compressed'
|
||||
- '_7zip_file'
|
||||
- "image/vnd.fpx"
|
||||
- "application/vnd.ms-msi"
|
||||
- "application/x-msi"
|
||||
priority: 5
|
||||
options:
|
||||
scanner_timeout: 150
|
||||
crack_pws: True
|
||||
log_pws: True
|
||||
'ScanSwf':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -351,6 +507,7 @@ strelka:
|
||||
flavors:
|
||||
- 'vb_file'
|
||||
- 'vbscript'
|
||||
- 'hta_file'
|
||||
priority: 5
|
||||
'ScanVba':
|
||||
- positive:
|
||||
@@ -362,6 +519,20 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
analyze_macros: True
|
||||
'ScanVhd':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'application/x-vhd'
|
||||
- 'vhd_file'
|
||||
- 'vhdx_file'
|
||||
priority: 5
|
||||
options:
|
||||
limit: 100
|
||||
'ScanVsto':
|
||||
- positive:
|
||||
flavors:
|
||||
- 'vsto_file'
|
||||
priority: 5
|
||||
'ScanX509':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -391,6 +562,12 @@ strelka:
|
||||
priority: 5
|
||||
options:
|
||||
location: '/etc/yara/'
|
||||
compiled:
|
||||
enabled: False
|
||||
filename: "rules.compiled"
|
||||
store_offset: True
|
||||
offset_meta_key: "StrelkaHexDump"
|
||||
offset_padding: 32
|
||||
'ScanZip':
|
||||
- positive:
|
||||
flavors:
|
||||
@@ -530,6 +707,20 @@ strelka:
|
||||
ttl: 1h
|
||||
response:
|
||||
log: "/var/log/strelka/strelka.log"
|
||||
broker:
|
||||
bootstrap: "PLACEHOLDER"
|
||||
protocol: "PLACEHOLDER"
|
||||
certlocation: "PLACEHOLDER"
|
||||
keylocation: "PLACEHOLDER"
|
||||
calocation: "PLACEHOLDER"
|
||||
topic: "PLACEHOLDER"
|
||||
s3redundancy: "PLACEHOLDER - This should be a boolean value"
|
||||
s3:
|
||||
accesskey: "PLACEHOLDER"
|
||||
secretkey: "PLACEHOLDER"
|
||||
bucketName: "PLACEHOLDER"
|
||||
region: "PLACEHOLDER"
|
||||
endpoint: "PLACEHOLDER"
|
||||
manager:
|
||||
enabled: False
|
||||
config:
|
||||
|
||||
@@ -84,10 +84,12 @@ suridatadir:
|
||||
- mode: 770
|
||||
- makedirs: True
|
||||
|
||||
# salt:// would resolve to /opt/so/rules/nids because of the defined file_roots and
|
||||
# not existing under /opt/so/saltstack/local/salt or /opt/so/saltstack/default/salt
|
||||
surirulesync:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/suricata/rules/
|
||||
- source: salt://suricata/rules/
|
||||
- source: salt://suri/
|
||||
- user: 940
|
||||
- group: 940
|
||||
- show_changes: False
|
||||
|
||||
@@ -13,7 +13,7 @@ ruleslink:
|
||||
- name: /opt/so/saltstack/local/salt/suricata/rules
|
||||
- user: socore
|
||||
- group: socore
|
||||
- target: /opt/so/rules/nids
|
||||
- target: /opt/so/rules/nids/suri
|
||||
|
||||
refresh_salt_master_fileserver_suricata_ruleslink:
|
||||
salt.runner:
|
||||
|
||||
@@ -36,6 +36,7 @@ telegraf:
|
||||
- suriloss.sh
|
||||
- zeekcaptureloss.sh
|
||||
- zeekloss.sh
|
||||
- features.sh
|
||||
manager:
|
||||
- influxdbsize.sh
|
||||
- lasthighstate.sh
|
||||
@@ -43,6 +44,7 @@ telegraf:
|
||||
- raid.sh
|
||||
- redis.sh
|
||||
- sostatus.sh
|
||||
- features.sh
|
||||
managersearch:
|
||||
- eps.sh
|
||||
- influxdbsize.sh
|
||||
@@ -51,6 +53,7 @@ telegraf:
|
||||
- raid.sh
|
||||
- redis.sh
|
||||
- sostatus.sh
|
||||
- features.sh
|
||||
import:
|
||||
- influxdbsize.sh
|
||||
- lasthighstate.sh
|
||||
@@ -67,6 +70,7 @@ telegraf:
|
||||
- suriloss.sh
|
||||
- zeekcaptureloss.sh
|
||||
- zeekloss.sh
|
||||
- features.sh
|
||||
heavynode:
|
||||
- checkfiles.sh
|
||||
- eps.sh
|
||||
@@ -90,6 +94,7 @@ telegraf:
|
||||
- os.sh
|
||||
- raid.sh
|
||||
- sostatus.sh
|
||||
- features.sh
|
||||
receiver:
|
||||
- eps.sh
|
||||
- lasthighstate.sh
|
||||
|
||||
17
salt/telegraf/scripts/features.sh
Normal file
17
salt/telegraf/scripts/features.sh
Normal file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
||||
# Elastic License 2.0.
|
||||
|
||||
if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then
|
||||
|
||||
FPS_ENABLED=$(cat /var/log/sostatus/fps_enabled)
|
||||
LKS_ENABLED=$(cat /var/log/sostatus/lks_enabled)
|
||||
|
||||
echo "features fps=$FPS_ENABLED"
|
||||
echo "features lks=$LKS_ENABLED"
|
||||
fi
|
||||
|
||||
exit 0
|
||||
@@ -46,6 +46,7 @@ base:
|
||||
- zeek
|
||||
- strelka
|
||||
- elasticfleet.install_agent_grid
|
||||
- stig
|
||||
|
||||
'*_eval and G@saltversion:{{saltversion}}':
|
||||
- match: compound
|
||||
@@ -110,6 +111,7 @@ base:
|
||||
- soctopus
|
||||
- playbook
|
||||
- elasticfleet
|
||||
- stig
|
||||
|
||||
'*_standalone and G@saltversion:{{saltversion}}':
|
||||
- match: compound
|
||||
@@ -146,6 +148,7 @@ base:
|
||||
- soctopus
|
||||
- playbook
|
||||
- elasticfleet
|
||||
- stig
|
||||
|
||||
'*_searchnode and G@saltversion:{{saltversion}}':
|
||||
- match: compound
|
||||
@@ -157,6 +160,7 @@ base:
|
||||
- elasticsearch
|
||||
- logstash
|
||||
- elasticfleet.install_agent_grid
|
||||
- stig
|
||||
|
||||
'*_managersearch and G@saltversion:{{saltversion}}':
|
||||
- match: compound
|
||||
@@ -187,6 +191,7 @@ base:
|
||||
- soctopus
|
||||
- playbook
|
||||
- elasticfleet
|
||||
- stig
|
||||
|
||||
'*_heavynode and G@saltversion:{{saltversion}}':
|
||||
- match: compound
|
||||
@@ -259,6 +264,7 @@ base:
|
||||
- telegraf
|
||||
- firewall
|
||||
- logstash
|
||||
- nginx
|
||||
- elasticfleet
|
||||
- elasticfleet.install_agent_grid
|
||||
- schedule
|
||||
|
||||
@@ -1413,7 +1413,7 @@ make_some_dirs() {
|
||||
mkdir -p $local_salt_dir/salt/firewall/portgroups
|
||||
mkdir -p $local_salt_dir/salt/firewall/ports
|
||||
|
||||
for THEDIR in bpf pcap elasticsearch ntp firewall redis backup influxdb strelka sensoroni soc soctopus docker zeek suricata nginx telegraf logstash soc manager kratos idstools idh elastalert global;do
|
||||
for THEDIR in bpf pcap elasticsearch ntp firewall redis backup influxdb strelka sensoroni soc soctopus docker zeek suricata nginx telegraf logstash soc manager kratos idstools idh elastalert stig global;do
|
||||
mkdir -p $local_salt_dir/pillar/$THEDIR
|
||||
touch $local_salt_dir/pillar/$THEDIR/adv_$THEDIR.sls
|
||||
touch $local_salt_dir/pillar/$THEDIR/soc_$THEDIR.sls
|
||||
@@ -1600,6 +1600,9 @@ reinstall_init() {
|
||||
salt-call -l info saltutil.kill_all_jobs --local
|
||||
fi
|
||||
|
||||
logCmd "salt-call state.apply ca.remove -linfo --local --file-root=../salt"
|
||||
logCmd "salt-call state.apply ssl.remove -linfo --local --file-root=../salt"
|
||||
|
||||
# Kill any salt processes (safely)
|
||||
for service in "${salt_services[@]}"; do
|
||||
# Stop the service in the background so we can exit after a certain amount of time
|
||||
@@ -1621,9 +1624,6 @@ reinstall_init() {
|
||||
done
|
||||
done
|
||||
|
||||
logCmd "salt-call state.apply ca.remove -linfo --local --file-root=../salt"
|
||||
logCmd "salt-call state.apply ssl.remove -linfo --local --file-root=../salt"
|
||||
|
||||
# Remove all salt configs
|
||||
rm -rf /etc/salt/engines/* /etc/salt/grains /etc/salt/master /etc/salt/master.d/* /etc/salt/minion /etc/salt/minion.d/* /etc/salt/pki/* /etc/salt/proxy /etc/salt/proxy.d/* /var/cache/salt/
|
||||
|
||||
@@ -1933,9 +1933,13 @@ saltify() {
|
||||
logCmd "dnf -y install salt-$SALTVERSION salt-master-$SALTVERSION salt-minion-$SALTVERSION"
|
||||
else
|
||||
# We just need the minion
|
||||
if [[ $is_airgap ]]; then
|
||||
logCmd "dnf -y install salt salt-minion"
|
||||
else
|
||||
logCmd "dnf -y install salt-$SALTVERSION salt-minion-$SALTVERSION"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
logCmd "mkdir -p /etc/salt/minion.d"
|
||||
salt_install_module_deps
|
||||
@@ -2148,11 +2152,12 @@ set_default_log_size() {
|
||||
esac
|
||||
|
||||
local disk_dir="/"
|
||||
if [ -d /nsm ]; then
|
||||
if mountpoint -q /nsm; then
|
||||
disk_dir="/nsm"
|
||||
fi
|
||||
if [ -d /nsm/elasticsearch ]; then
|
||||
if mountpoint -q /nsm/elasticsearch; then
|
||||
disk_dir="/nsm/elasticsearch"
|
||||
percentage=80
|
||||
fi
|
||||
|
||||
local disk_size_1k
|
||||
|
||||
BIN
sigs/securityonion-2.4.50-20240220.iso.sig
Normal file
BIN
sigs/securityonion-2.4.50-20240220.iso.sig
Normal file
Binary file not shown.
Reference in New Issue
Block a user