Merge remote-tracking branch 'origin/2.4/dev' into feature/fleet-artifacts

This commit is contained in:
Josh Brower
2024-02-08 09:45:21 -05:00
16 changed files with 361 additions and 31 deletions

View File

@@ -41,6 +41,7 @@ file_roots:
base: base:
- /opt/so/saltstack/local/salt - /opt/so/saltstack/local/salt
- /opt/so/saltstack/default/salt - /opt/so/saltstack/default/salt
- /opt/so/rules
elasticartifacts: elasticartifacts:
- /nsm/elastic-fleet/artifacts - /nsm/elastic-fleet/artifacts

View File

@@ -118,6 +118,19 @@ esingestconf:
- user: 930 - user: 930
- group: 939 - group: 939
# Auto-generate Elasticsearch ingest node pipelines from pillar
{% for pipeline, config in ELASTICSEARCHMERGED.pipelines.items() %}
es_ingest_conf_{{pipeline}}:
file.managed:
- name: /opt/so/conf/elasticsearch/ingest/{{ pipeline }}
- source: salt://elasticsearch/base-template.json.jinja
- defaults:
TEMPLATE_CONFIG: {{ config }}
- template: jinja
- onchanges_in:
- file: so-pipelines-reload
{% endfor %}
eslog4jfile: eslog4jfile:
file.managed: file.managed:
- name: /opt/so/conf/elasticsearch/log4j2.properties - name: /opt/so/conf/elasticsearch/log4j2.properties

View File

@@ -55,6 +55,87 @@ elasticsearch:
key: /usr/share/elasticsearch/config/elasticsearch.key key: /usr/share/elasticsearch/config/elasticsearch.key
verification_mode: none verification_mode: none
enabled: false enabled: false
pipelines:
custom001:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom001
- pipeline:
name: common
custom002:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom002
- pipeline:
name: common
custom003:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom003
- pipeline:
name: common
custom004:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom004
- pipeline:
name: common
custom005:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom005
- pipeline:
name: common
custom006:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom006
- pipeline:
name: common
custom007:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom007
- pipeline:
name: common
custom008:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom008
- pipeline:
name: common
custom009:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom009
- pipeline:
name: common
custom010:
description: Custom Pipeline
processors:
- set:
field: tags
value: custom010
- pipeline:
name: common
index_settings: index_settings:
global_overrides: global_overrides:
index_template: index_template:

View File

@@ -45,6 +45,28 @@ elasticsearch:
description: Max number of boolean clauses per query. description: Max number of boolean clauses per query.
global: True global: True
helpLink: elasticsearch.html helpLink: elasticsearch.html
pipelines:
custom001: &pipelines
description:
description: Description of the ingest node pipeline
global: True
advanced: True
helpLink: elasticsearch.html
processors:
description: Processors for the ingest node pipeline
global: True
advanced: True
multiline: True
helpLink: elasticsearch.html
custom002: *pipelines
custom003: *pipelines
custom004: *pipelines
custom005: *pipelines
custom006: *pipelines
custom007: *pipelines
custom008: *pipelines
custom009: *pipelines
custom010: *pipelines
index_settings: index_settings:
global_overrides: global_overrides:
index_template: index_template:

View File

@@ -63,6 +63,20 @@ lspipelinedir:
- user: 931 - user: 931
- group: 939 - group: 939
# Auto-generate Logstash pipeline config
{% for pipeline, config in LOGSTASH_MERGED.pipeline_config.items() %}
{% for assigned_pipeline in ASSIGNED_PIPELINES %}
{% set custom_pipeline = 'custom/' + pipeline + '.conf' %}
{% if custom_pipeline in LOGSTASH_MERGED.defined_pipelines[assigned_pipeline] %}
ls_custom_pipeline_conf_{{assigned_pipeline}}_{{pipeline}}:
file.managed:
- name: /opt/so/conf/logstash/pipelines/{{assigned_pipeline}}/{{ pipeline }}.conf
- contents: LOGSTASH_MERGED.pipeline_config.{{pipeline}}
{% endif %}
{% endfor %}
{% endfor %}
{% for assigned_pipeline in ASSIGNED_PIPELINES %} {% for assigned_pipeline in ASSIGNED_PIPELINES %}
{% for CONFIGFILE in LOGSTASH_MERGED.defined_pipelines[assigned_pipeline] %} {% for CONFIGFILE in LOGSTASH_MERGED.defined_pipelines[assigned_pipeline] %}
ls_pipeline_{{assigned_pipeline}}_{{CONFIGFILE.split('.')[0] | replace("/","_") }}: ls_pipeline_{{assigned_pipeline}}_{{CONFIGFILE.split('.')[0] | replace("/","_") }}:

View File

@@ -42,6 +42,24 @@ logstash:
custom2: [] custom2: []
custom3: [] custom3: []
custom4: [] custom4: []
pipeline_config:
custom001: |-
filter {
if [event][module] =~ "zeek" {
mutate {
add_tag => ["network_stuff"]
}
}
}
custom002: PLACEHOLDER
custom003: PLACEHOLDER
custom004: PLACEHOLDER
custom005: PLACEHOLDER
custom006: PLACEHOLDER
custom007: PLACEHOLDER
custom008: PLACEHOLDER
custom009: PLACEHOLDER
custom010: PLACEHOLDER
settings: settings:
lsheap: 500m lsheap: 500m
config: config:

View File

@@ -31,6 +31,22 @@ logstash:
custom2: *defined_pipelines custom2: *defined_pipelines
custom3: *defined_pipelines custom3: *defined_pipelines
custom4: *defined_pipelines custom4: *defined_pipelines
pipeline_config:
custom001: &pipeline_config
description: Pipeline configuration for Logstash
advanced: True
multiline: True
forcedType: string
helpLink: logstash.html
custom002: *pipeline_config
custom003: *pipeline_config
custom004: *pipeline_config
custom005: *pipeline_config
custom006: *pipeline_config
custom007: *pipeline_config
custom008: *pipeline_config
custom009: *pipeline_config
custom010: *pipeline_config
settings: settings:
lsheap: lsheap:
description: Heap size to use for logstash description: Heap size to use for logstash

View File

@@ -16,12 +16,14 @@ lockFile = "/tmp/so-yaml.lock"
def showUsage(args): def showUsage(args):
print('Usage: {} <COMMAND> <YAML_FILE> [ARGS...]'.format(sys.argv[0])) print('Usage: {} <COMMAND> <YAML_FILE> [ARGS...]'.format(sys.argv[0]))
print(' General commands:') print(' General commands:')
print(' append - Append a list item to a yaml key, if it exists and is a list. Requires KEY and LISTITEM args.')
print(' remove - Removes a yaml key, if it exists. Requires KEY arg.') print(' remove - Removes a yaml key, if it exists. Requires KEY arg.')
print(' help - Prints this usage information.') print(' help - Prints this usage information.')
print('') print('')
print(' Where:') print(' Where:')
print(' YAML_FILE - Path to the file that will be modified. Ex: /opt/so/conf/service/conf.yaml') print(' YAML_FILE - Path to the file that will be modified. Ex: /opt/so/conf/service/conf.yaml')
print(' KEY - YAML key, does not support \' or " characters at this time. Ex: level1.level2') print(' KEY - YAML key, does not support \' or " characters at this time. Ex: level1.level2')
print(' LISTITEM - Item to add to the list.')
sys.exit(1) sys.exit(1)
@@ -35,6 +37,35 @@ def writeYaml(filename, content):
file = open(filename, "w") file = open(filename, "w")
return yaml.dump(content, file) return yaml.dump(content, file)
def appendItem(content, key, listItem):
pieces = key.split(".", 1)
if len(pieces) > 1:
appendItem(content[pieces[0]], pieces[1], listItem)
else:
try:
content[key].append(listItem)
except AttributeError:
print("The existing value for the given key is not a list. No action was taken on the file.")
return 1
except KeyError:
print("The key provided does not exist. No action was taken on the file.")
return 1
def append(args):
if len(args) != 3:
print('Missing filename, key arg, or list item to append', file=sys.stderr)
showUsage(None)
return
filename = args[0]
key = args[1]
listItem = args[2]
content = loadYaml(filename)
appendItem(content, key, listItem)
writeYaml(filename, content)
return 0
def removeKey(content, key): def removeKey(content, key):
pieces = key.split(".", 1) pieces = key.split(".", 1)
@@ -69,6 +100,7 @@ def main():
commands = { commands = {
"help": showUsage, "help": showUsage,
"append": append,
"remove": remove, "remove": remove,
} }

View File

@@ -105,3 +105,99 @@ class TestRemove(unittest.TestCase):
self.assertEqual(actual, expected) self.assertEqual(actual, expected)
sysmock.assert_called_once_with(1) sysmock.assert_called_once_with(1)
self.assertIn(mock_stdout.getvalue(), "Missing filename or key arg\n") self.assertIn(mock_stdout.getvalue(), "Missing filename or key arg\n")
def test_append(self):
filename = "/tmp/so-yaml_test-remove.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: abc }, key2: false, key3: [a,b,c]}")
file.close()
soyaml.append([filename, "key3", "d"])
file = open(filename, "r")
actual = file.read()
file.close()
expected = "key1:\n child1: 123\n child2: abc\nkey2: false\nkey3:\n- a\n- b\n- c\n- d\n"
self.assertEqual(actual, expected)
def test_append_nested(self):
filename = "/tmp/so-yaml_test-remove.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: [a,b,c] }, key2: false, key3: [e,f,g]}")
file.close()
soyaml.append([filename, "key1.child2", "d"])
file = open(filename, "r")
actual = file.read()
file.close()
expected = "key1:\n child1: 123\n child2:\n - a\n - b\n - c\n - d\nkey2: false\nkey3:\n- e\n- f\n- g\n"
self.assertEqual(actual, expected)
def test_append_nested_deep(self):
filename = "/tmp/so-yaml_test-remove.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
file.close()
soyaml.append([filename, "key1.child2.deep2", "d"])
file = open(filename, "r")
actual = file.read()
file.close()
expected = "key1:\n child1: 123\n child2:\n deep1: 45\n deep2:\n - a\n - b\n - c\n - d\nkey2: false\nkey3:\n- e\n- f\n- g\n"
self.assertEqual(actual, expected)
def test_append_key_noexist(self):
filename = "/tmp/so-yaml_test-append.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
file.close()
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stdout', new=StringIO()) as mock_stdout:
sys.argv = ["cmd", "append", filename, "key4", "h"]
soyaml.main()
sysmock.assert_called()
self.assertEqual(mock_stdout.getvalue(), "The key provided does not exist. No action was taken on the file.\n")
def test_append_key_noexist_deep(self):
filename = "/tmp/so-yaml_test-append.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
file.close()
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stdout', new=StringIO()) as mock_stdout:
sys.argv = ["cmd", "append", filename, "key1.child2.deep3", "h"]
soyaml.main()
sysmock.assert_called()
self.assertEqual(mock_stdout.getvalue(), "The key provided does not exist. No action was taken on the file.\n")
def test_append_key_nonlist(self):
filename = "/tmp/so-yaml_test-append.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
file.close()
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stdout', new=StringIO()) as mock_stdout:
sys.argv = ["cmd", "append", filename, "key1", "h"]
soyaml.main()
sysmock.assert_called()
self.assertEqual(mock_stdout.getvalue(), "The existing value for the given key is not a list. No action was taken on the file.\n")
def test_append_key_nonlist_deep(self):
filename = "/tmp/so-yaml_test-append.yaml"
file = open(filename, "w")
file.write("{key1: { child1: 123, child2: { deep1: 45, deep2: [a,b,c] } }, key2: false, key3: [e,f,g]}")
file.close()
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stdout', new=StringIO()) as mock_stdout:
sys.argv = ["cmd", "append", filename, "key1.child2.deep1", "h"]
soyaml.main()
sysmock.assert_called()
self.assertEqual(mock_stdout.getvalue(), "The existing value for the given key is not a list. No action was taken on the file.\n")

View File

@@ -372,6 +372,17 @@ enable_highstate() {
echo "" echo ""
} }
get_soup_script_hashes() {
CURRENTSOUP=$(md5sum /usr/sbin/soup | awk '{print $1}')
GITSOUP=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/soup | awk '{print $1}')
CURRENTCMN=$(md5sum /usr/sbin/so-common | awk '{print $1}')
GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
CURRENTIMGCMN=$(md5sum /usr/sbin/so-image-common | awk '{print $1}')
GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
CURRENTSOFIREWALL=$(md5sum /usr/sbin/so-firewall | awk '{print $1}')
GITSOFIREWALL=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/so-firewall | awk '{print $1}')
}
highstate() { highstate() {
# Run a highstate. # Run a highstate.
salt-call state.highstate -l info queue=True salt-call state.highstate -l info queue=True
@@ -583,6 +594,19 @@ up_to_2.4.50() {
touch /opt/so/saltstack/local/pillar/stig/adv_stig.sls touch /opt/so/saltstack/local/pillar/stig/adv_stig.sls
touch /opt/so/saltstack/local/pillar/stig/soc_stig.sls touch /opt/so/saltstack/local/pillar/stig/soc_stig.sls
# the file_roots need to be update due to salt 3006.6 upgrade not allowing symlinks outside the file_roots
# put new so-yaml in place
echo "Updating so-yaml"
\cp -v "$UPDATE_DIR/salt/manager/tools/sbin/so-yaml.py" "$DEFAULT_SALT_DIR/salt/manager/tools/sbin/"
\cp -v "$UPDATE_DIR/salt/manager/tools/sbin/so-yaml.py" /usr/sbin/
echo "Creating a backup of the salt-master config."
# INSTALLEDVERSION is 2.4.40 at this point, but we want the backup to have the version
# so was at prior to starting upgrade. use POSTVERSION here since it doesnt change until
# post upgrade changes. POSTVERSION set to INSTALLEDVERSION at start of soup
cp -v /etc/salt/master "/etc/salt/master.so-$POSTVERSION.bak"
echo "Adding /opt/so/rules to file_roots in /etc/salt/master using so-yaml"
so-yaml.py append /etc/salt/master file_roots.base /opt/so/rules
INSTALLEDVERSION=2.4.50 INSTALLEDVERSION=2.4.50
} }
@@ -758,31 +782,32 @@ upgrade_salt() {
} }
verify_latest_update_script() { verify_latest_update_script() {
# Check to see if the update scripts match. If not run the new one. get_soup_script_hashes
CURRENTSOUP=$(md5sum /usr/sbin/soup | awk '{print $1}')
GITSOUP=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/soup | awk '{print $1}')
CURRENTCMN=$(md5sum /usr/sbin/so-common | awk '{print $1}')
GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
CURRENTIMGCMN=$(md5sum /usr/sbin/so-image-common | awk '{print $1}')
GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
CURRENTSOFIREWALL=$(md5sum /usr/sbin/so-firewall | awk '{print $1}')
GITSOFIREWALL=$(md5sum $UPDATE_DIR/salt/manager/tools/sbin/so-firewall | awk '{print $1}')
if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" && "$CURRENTSOFIREWALL" == "$GITSOFIREWALL" ]]; then if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" && "$CURRENTSOFIREWALL" == "$GITSOFIREWALL" ]]; then
echo "This version of the soup script is up to date. Proceeding." echo "This version of the soup script is up to date. Proceeding."
else else
echo "You are not running the latest soup version. Updating soup and its components. This might take multiple runs to complete." echo "You are not running the latest soup version. Updating soup and its components. This might take multiple runs to complete."
cp $UPDATE_DIR/salt/manager/tools/sbin/soup $DEFAULT_SALT_DIR/salt/common/tools/sbin/ cp $UPDATE_DIR/salt/manager/tools/sbin/soup $DEFAULT_SALT_DIR/salt/manager/tools/sbin/
cp $UPDATE_DIR/salt/common/tools/sbin/so-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/ cp $UPDATE_DIR/salt/common/tools/sbin/so-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
cp $UPDATE_DIR/salt/common/tools/sbin/so-image-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/ cp $UPDATE_DIR/salt/common/tools/sbin/so-image-common $DEFAULT_SALT_DIR/salt/common/tools/sbin/
cp $UPDATE_DIR/salt/manager/tools/sbin/so-firewall $DEFAULT_SALT_DIR/salt/common/tools/sbin/ cp $UPDATE_DIR/salt/manager/tools/sbin/so-firewall $DEFAULT_SALT_DIR/salt/manager/tools/sbin/
salt-call state.apply common.soup_scripts queue=True -linfo --file-root=$UPDATE_DIR/salt --local salt-call state.apply common.soup_scripts queue=True -linfo --file-root=$UPDATE_DIR/salt --local
# Verify that soup scripts updated as expected
get_soup_script_hashes
if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" && "$CURRENTSOFIREWALL" == "$GITSOFIREWALL" ]]; then
echo "Succesfully updated soup scripts."
else
# When STIGs are enabled soup scripts will fail to update using --file-root --local.
# After checking that the expected hashes are not present, retry updating soup scripts using salt master.
echo "There was a problem updating soup scripts.. Trying to rerun script update"
salt-call state.apply common.soup_scripts queue=True -linfo
fi
echo "" echo ""
echo "The soup script has been modified. Please run soup again to continue the upgrade." echo "The soup script has been modified. Please run soup again to continue the upgrade."
exit 0 exit 0
fi fi
}
}
# Keeping this block in case we need to do a hotfix that requires salt update # Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() { apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -925,9 +950,6 @@ main() {
systemctl_func "stop" "$cron_service_name" systemctl_func "stop" "$cron_service_name"
# update mine items prior to stopping salt-minion and salt-master
update_salt_mine
echo "Updating dockers to $NEWVERSION." echo "Updating dockers to $NEWVERSION."
if [[ $is_airgap -eq 0 ]]; then if [[ $is_airgap -eq 0 ]]; then
airgap_update_dockers airgap_update_dockers
@@ -1003,6 +1025,9 @@ main() {
salt-call state.apply salt.minion -l info queue=True salt-call state.apply salt.minion -l info queue=True
echo "" echo ""
# ensure the mine is updated and populated before highstates run, following the salt-master restart
update_salt_mine
enable_highstate enable_highstate
echo "" echo ""

View File

@@ -1,4 +1,4 @@
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched # version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
salt: salt:
master: master:
version: 3006.5 version: 3006.6

View File

@@ -1,6 +1,6 @@
# version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched # version cannot be used elsewhere in this pillar as soup is grepping for it to determine if Salt needs to be patched
salt: salt:
minion: minion:
version: 3006.5 version: 3006.6
check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default check_threshold: 3600 # in seconds, threshold used for so-salt-minion-check. any value less than 600 seconds may cause a lot of salt-minion restarts since the job to touch the file occurs every 5-8 minutes by default
service_start_delay: 30 # in seconds. service_start_delay: 30 # in seconds.

View File

@@ -9,7 +9,7 @@ soc:
icon: fa-crosshairs icon: fa-crosshairs
target: target:
links: links:
- '/#/hunt?q="{value|escape}" | groupby event.module* event.dataset' - '/#/hunt?q="{value|escape}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- name: actionAddToCase - name: actionAddToCase
description: actionAddToCaseHelp description: actionAddToCaseHelp
icon: fa-briefcase icon: fa-briefcase
@@ -23,13 +23,13 @@ soc:
icon: fab fa-searchengin icon: fab fa-searchengin
target: '' target: ''
links: links:
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* event.dataset' - '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}") | groupby event.module* event.dataset' - '/#/hunt?q=("{:log.id.fuid}" OR "{:log.id.uid}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q=("{:log.id.fuid}" OR "{:network.community_id}") | groupby event.module* event.dataset' - '/#/hunt?q=("{:log.id.fuid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q=("{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* event.dataset' - '/#/hunt?q=("{:log.id.uid}" OR "{:network.community_id}") | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q="{:log.id.fuid}" | groupby event.module* event.dataset' - '/#/hunt?q="{:log.id.fuid}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q="{:log.id.uid}" | groupby event.module* event.dataset' - '/#/hunt?q="{:log.id.uid}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- '/#/hunt?q="{:network.community_id}" | groupby event.module* event.dataset' - '/#/hunt?q="{:network.community_id}" | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby file.mime_type | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby ssl.server_name | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- name: actionPcap - name: actionPcap
description: actionPcapHelp description: actionPcapHelp
icon: fa-stream icon: fa-stream
@@ -1424,8 +1424,11 @@ soc:
- name: Zeek Notice - name: Zeek Notice
description: Zeek notice logs description: Zeek notice logs
query: 'event.dataset:zeek.notice | groupby -sankey notice.note destination.ip | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name' query: 'event.dataset:zeek.notice | groupby -sankey notice.note destination.ip | groupby notice.note | groupby notice.message | groupby notice.sub_message | groupby source.ip | groupby destination.ip | groupby destination.port | groupby destination_geo.organization_name'
- name: Connections - name: Connections and Metadata with community_id
description: Network connection metadata description: Network connections that include community_id
query: '_exists_:network.community_id | groupby event.module* | groupby -sankey event.module* event.dataset | groupby event.dataset | groupby source.ip source.port destination.ip destination.port | groupby network.protocol | groupby source_geo.organization_name source.geo.country_name | groupby destination_geo.organization_name destination.geo.country_name | groupby rule.name rule.category event.severity_label | groupby dns.query.name | groupby http.virtual_host http.uri | groupby notice.note notice.message notice.sub_message | groupby source.ip host.hostname user.name event.action event.type process.executable process.pid'
- name: Connections seen by Zeek or Suricata
description: Network connections logged by Zeek or Suricata
query: 'tags:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby -sankey destination.port network.protocol | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name | groupby client.ip_bytes | groupby server.ip_bytes | groupby client.oui' query: 'tags:conn | groupby source.ip | groupby destination.ip | groupby destination.port | groupby -sankey destination.port network.protocol | groupby network.protocol | groupby network.transport | groupby connection.history | groupby connection.state | groupby connection.state_description | groupby source.geo.country_name | groupby destination.geo.country_name | groupby client.ip_bytes | groupby server.ip_bytes | groupby client.oui'
- name: DCE_RPC - name: DCE_RPC
description: DCE_RPC (Distributed Computing Environment / Remote Procedure Calls) network metadata description: DCE_RPC (Distributed Computing Environment / Remote Procedure Calls) network metadata
@@ -1562,6 +1565,9 @@ soc:
- name: Firewall - name: Firewall
description: Firewall logs description: Firewall logs
query: 'observer.type:firewall | groupby -sankey event.action observer.ingress.interface.name | groupby event.action | groupby observer.ingress.interface.name | groupby network.type | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port' query: 'observer.type:firewall | groupby -sankey event.action observer.ingress.interface.name | groupby event.action | groupby observer.ingress.interface.name | groupby network.type | groupby network.transport | groupby source.ip | groupby destination.ip | groupby destination.port'
- name: Firewall Auth
description: Firewall authentication logs
query: 'observer.type:firewall AND event.category:authentication | groupby user.name | groupby -sankey user.name source.ip | groupby source.ip | table soc_timestamp user.name source.ip message'
- name: VLAN - name: VLAN
description: VLAN (Virtual Local Area Network) tagged logs description: VLAN (Virtual Local Area Network) tagged logs
query: '* AND _exists_:network.vlan.id | groupby network.vlan.id | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby event.dataset | groupby event.module | groupby observer.name | groupby source.geo.country_name | groupby destination.geo.country_name' query: '* AND _exists_:network.vlan.id | groupby network.vlan.id | groupby source.ip | groupby -sankey source.ip destination.ip | groupby destination.ip | groupby destination.port | groupby event.dataset | groupby event.module | groupby observer.name | groupby source.geo.country_name | groupby destination.geo.country_name'

View File

@@ -50,7 +50,7 @@ update_stig_profile:
run_initial_scan: run_initial_scan:
module.run: module.run:
- name: openscap.xccdf - name: openscap.xccdf
- params: 'eval --remediate --profile {{ OSCAP_PROFILE_NAME }} --results {{ OSCAP_OUTPUT_DIR }}/pre-oscap-results.xml --report {{ OSCAP_OUTPUT_DIR }}/pre-oscap-report.html {{ OSCAP_PROFILE_LOCATION }}' - params: 'eval --profile {{ OSCAP_PROFILE_NAME }} --results {{ OSCAP_OUTPUT_DIR }}/pre-oscap-results.xml --report {{ OSCAP_OUTPUT_DIR }}/pre-oscap-report.html {{ OSCAP_PROFILE_LOCATION }}'
{% endif %} {% endif %}
run_remediate: run_remediate:

View File

@@ -84,10 +84,12 @@ suridatadir:
- mode: 770 - mode: 770
- makedirs: True - makedirs: True
# salt:// would resolve to /opt/so/rules because of the defined file_roots and
# nids not existing under /opt/so/saltstack/local/salt or /opt/so/saltstack/default/salt
surirulesync: surirulesync:
file.recurse: file.recurse:
- name: /opt/so/conf/suricata/rules/ - name: /opt/so/conf/suricata/rules/
- source: salt://suricata/rules/ - source: salt://nids/
- user: 940 - user: 940
- group: 940 - group: 940
- show_changes: False - show_changes: False

View File

@@ -1933,7 +1933,11 @@ saltify() {
logCmd "dnf -y install salt-$SALTVERSION salt-master-$SALTVERSION salt-minion-$SALTVERSION" logCmd "dnf -y install salt-$SALTVERSION salt-master-$SALTVERSION salt-minion-$SALTVERSION"
else else
# We just need the minion # We just need the minion
logCmd "dnf -y install salt-$SALTVERSION salt-minion-$SALTVERSION" if [[ $is_airgap ]]; then
logCmd "dnf -y install salt salt-minion"
else
logCmd "dnf -y install salt-$SALTVERSION salt-minion-$SALTVERSION"
fi
fi fi
fi fi