This commit is contained in:
m0duspwnens
2020-05-13 15:02:55 -04:00
20 changed files with 5110 additions and 259 deletions

View File

@@ -0,0 +1 @@
mastersearchtab:

View File

@@ -13,6 +13,20 @@ socore:
- createhome: True
- shell: /bin/bash
# Create a state directory
statedir:
file.directory:
- name: /opt/so/state
- user: 939
- group: 939
- makedirs: True
salttmp:
file.directory:
- name: /opt/so/tmp
- user: 939
- group: 939
- makedirs: True
# Install packages needed for the sensor
sensorpkgs:

View File

@@ -1,107 +0,0 @@
# -*- coding: utf-8 -*-
# HiveAlerter modified from original at: https://raw.githubusercontent.com/Nclose-ZA/elastalert_hive_alerter/master/elastalert_hive_alerter/hive_alerter.py
import uuid
from elastalert.alerts import Alerter
from thehive4py.api import TheHiveApi
from thehive4py.models import Alert, AlertArtifact, CustomFieldHelper
class TheHiveAlerter(Alerter):
"""
Use matched data to create alerts containing observables in an instance of TheHive
"""
required_options = set(['hive_connection', 'hive_alert_config'])
def get_aggregation_summary_text(self, matches):
text = super(TheHiveAlerter, self).get_aggregation_summary_text(matches)
if text:
text = '```\n{0}```\n'.format(text)
return text
def create_artifacts(self, match):
artifacts = []
context = {'rule': self.rule, 'match': match}
for mapping in self.rule.get('hive_observable_data_mapping', []):
for observable_type, match_data_key in mapping.items():
try:
artifacts.append(AlertArtifact(dataType=observable_type, data=match_data_key.format(**context)))
except KeyError as e:
print(('format string {} fail cause no key {} in {}'.format(e, match_data_key, context)))
return artifacts
def create_alert_config(self, match):
context = {'rule': self.rule, 'match': match}
alert_config = {
'artifacts': self.create_artifacts(match),
'sourceRef': str(uuid.uuid4())[0:6],
'title': '{rule[name]}'.format(**context)
}
alert_config.update(self.rule.get('hive_alert_config', {}))
for alert_config_field, alert_config_value in alert_config.items():
if alert_config_field == 'customFields':
custom_fields = CustomFieldHelper()
for cf_key, cf_value in alert_config_value.items():
try:
func = getattr(custom_fields, 'add_{}'.format(cf_value['type']))
except AttributeError:
raise Exception('unsupported custom field type {}'.format(cf_value['type']))
value = cf_value['value'].format(**context)
func(cf_key, value)
alert_config[alert_config_field] = custom_fields.build()
elif isinstance(alert_config_value, str):
alert_config[alert_config_field] = alert_config_value.format(**context)
elif isinstance(alert_config_value, (list, tuple)):
formatted_list = []
for element in alert_config_value:
try:
formatted_list.append(element.format(**context))
except (AttributeError, KeyError, IndexError):
formatted_list.append(element)
alert_config[alert_config_field] = formatted_list
return alert_config
def send_to_thehive(self, alert_config):
connection_details = self.rule['hive_connection']
api = TheHiveApi(
connection_details.get('hive_host', ''),
connection_details.get('hive_apikey', ''),
proxies=connection_details.get('hive_proxies', {'http': '', 'https': ''}),
cert=connection_details.get('hive_verify', False))
alert = Alert(**alert_config)
response = api.create_alert(alert)
if response.status_code != 201:
raise Exception('alert not successfully created in TheHive\n{}'.format(response.text))
def alert(self, matches):
if self.rule.get('hive_alert_config_type', 'custom') != 'classic':
for match in matches:
alert_config = self.create_alert_config(match)
self.send_to_thehive(alert_config)
else:
alert_config = self.create_alert_config(matches[0])
artifacts = []
for match in matches:
artifacts += self.create_artifacts(match)
if 'related_events' in match:
for related_event in match['related_events']:
artifacts += self.create_artifacts(related_event)
alert_config['artifacts'] = artifacts
alert_config['title'] = self.create_title(matches)
alert_config['description'] = self.create_alert_body(matches)
self.send_to_thehive(alert_config)
def get_info(self):
return {
'type': 'hivealerter',
'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '')
}

View File

@@ -26,7 +26,8 @@ filter:
alert: modules.so.thehive.TheHiveAlerter
hive_connection:
hive_host: https://{{hivehost}}/thehive/
hive_host: http://{{hivehost}}
hive_port: 9000
hive_apikey: {{hivekey}}
hive_proxies:

View File

@@ -55,35 +55,35 @@ elastalogdir:
file.directory:
- name: /opt/so/log/elastalert
- user: 933
- group: 939
- group: 933
- makedirs: True
elastarules:
file.directory:
- name: /opt/so/rules/elastalert
- user: 933
- group: 939
- group: 933
- makedirs: True
elastaconfdir:
file.directory:
- name: /opt/so/conf/elastalert
- user: 933
- group: 939
- group: 933
- makedirs: True
elastasomodulesdir:
file.directory:
- name: /opt/so/conf/elastalert/modules/so
- user: 933
- group: 939
- group: 933
- makedirs: True
elastacustmodulesdir:
file.directory:
- name: /opt/so/conf/elastalert/modules/custom
- user: 933
- group: 939
- group: 933
- makedirs: True
elastasomodulesync:
@@ -91,7 +91,7 @@ elastasomodulesync:
- name: /opt/so/conf/elastalert/modules/so
- source: salt://elastalert/files/modules/so
- user: 933
- group: 939
- group: 933
- makedirs: True
elastarulesync:
@@ -99,7 +99,7 @@ elastarulesync:
- name: /opt/so/rules/elastalert
- source: salt://elastalert/files/rules/so
- user: 933
- group: 939
- group: 933
- template: jinja
elastaconf:
@@ -107,7 +107,7 @@ elastaconf:
- name: /opt/so/conf/elastalert/elastalert_config.yaml
- source: salt://elastalert/files/elastalert_config.yaml
- user: 933
- group: 939
- group: 933
- template: jinja
so-elastalert:
@@ -118,16 +118,9 @@ so-elastalert:
- user: elastalert
- detach: True
- binds:
- /opt/so/rules/elastalert:/etc/elastalert/rules/:ro
- /opt/so/rules/elastalert:/opt/rules/:ro
- /opt/so/log/elastalert:/var/log/elastalert:rw
- /opt/so/conf/elastalert/modules/:/opt/elastalert/modules/:ro
- /opt/so/conf/elastalert/elastalert_config.yaml:/etc/elastalert/conf/elastalert_config.yaml:ro
- environment:
- ELASTICSEARCH_HOST: {{ esip }}
- ELASTICSEARCH_PORT: {{ esport }}
- ELASTALERT_CONFIG: /etc/elastalert/conf/elastalert_config.yaml
- ELASTALERT_SUPERVISOR_CONF: /etc/elastalert/conf/elastalert_supervisord.conf
- RULES_DIRECTORY: /etc/elastalert/rules/
- LOG_DIR: /var/log/elastalert
- /opt/so/conf/elastalert/elastalert_config.yaml:/opt/config/elastalert_config.yaml:ro
{% endif %}

View File

@@ -1226,7 +1226,7 @@
},
{
"params": [
" / 5"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1365,7 +1365,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1504,7 +1504,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1643,7 +1643,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}

View File

@@ -290,7 +290,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -430,7 +430,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1046,7 +1046,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1186,7 +1186,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1326,7 +1326,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}

File diff suppressed because it is too large Load Diff

View File

@@ -298,7 +298,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -438,7 +438,7 @@
},
{
"params": [
" / 16"
" / {{ CPUS }}"
],
"type": "math"
}

View File

@@ -1326,7 +1326,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1465,7 +1465,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}
@@ -1604,7 +1604,7 @@
},
{
"params": [
" / 8"
" / {{ CPUS }}"
],
"type": "math"
}

View File

@@ -10,6 +10,13 @@ providers:
editable: true
options:
path: /etc/grafana/grafana_dashboards/master
- name: 'Master Search'
folder: 'Master Search'
type: file
disableDeletion: false
editable: true
options:
path: /etc/grafana/grafana_dashboards/mastersearch
- name: 'Sensor Nodes'
folder: 'Sensor Nodes'
type: file

View File

@@ -33,6 +33,13 @@ grafanadashmdir:
- group: 939
- makedirs: True
grafanadashmsdir:
file.directory:
- name: /opt/so/conf/grafana/grafana_dashboards/mastersearch
- user: 939
- group: 939
- makedirs: True
grafanadashevaldir:
file.directory:
- name: /opt/so/conf/grafana/grafana_dashboards/eval
@@ -85,6 +92,29 @@ dashboard-master:
{% endfor %}
{% endif %}
{% if salt['pillar.get']('mastersearchtab', False) %}
{% for SN, SNDATA in salt['pillar.get']('mastersearchtab', {}).items() %}
{% set NODETYPE = SN.split('_')|last %}
{% set SN = SN | regex_replace('_' ~ NODETYPE, '') %}
dashboard-master:
file.managed:
- name: /opt/so/conf/grafana/grafana_dashboards/mastersearch/{{ SN }}-MasterSearch.json
- user: 939
- group: 939
- template: jinja
- source: salt://grafana/dashboards/mastersearch/mastersearch.json
- defaults:
SERVERNAME: {{ SN }}
MANINT: {{ SNDATA.manint }}
MONINT: {{ SNDATA.manint }}
CPUS: {{ SNDATA.totalcpus }}
UID: {{ SNDATA.guid }}
ROOTFS: {{ SNDATA.rootfs }}
NSMFS: {{ SNDATA.nsmfs }}
{% endfor %}
{% endif %}
{% if salt['pillar.get']('sensorstab', False) %}
{% for SN, SNDATA in salt['pillar.get']('sensorstab', {}).items() %}
{% set NODETYPE = SN.split('_')|last %}

File diff suppressed because one or more lines are too long

View File

@@ -1,18 +1,4 @@
# Create a state directory
statedir:
file.directory:
- name: /opt/so/state
- user: 939
- group: 939
- makedirs: True
salttmp:
file.directory:
- name: /opt/so/tmp
- user: 939
- group: 939
- makedirs: True
{% if grains['os'] != 'CentOS' %}
saltpymodules:

View File

@@ -82,65 +82,65 @@
"wineventlog": ["soc_timestamp", "source.ip", "source.port", "destination.ip", "destination.port", "computer_name", "event_id", "log_name", "source_name", "task" ]
},
"queries": [
{ "name": "Default Query", "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name.keyword"},
{ "name": "Elastalerts", "description": "", "query": "_type:elastalert | groupby rule.name.keyword"},
{ "name": "Alerts", "description": "Show all alerts grouped by alert source", "query": "event.dataset.keyword: alert | groupby event.module.keyword"},
{ "name": "NIDS Alerts", "description": "Show all NIDS alerts grouped by alert name", "query": "event.category: network AND event.dataset: alert | groupby rule.name.keyword"},
{ "name": "OSSEC Alerts", "description": "", "query": "event_type:ossec AND alert | groupby rule.category.keyword"},
{ "name": "OSSEC Commands", "description": "", "query": "event_type:ossec AND alert | groupby process.command_line.keyword"},
{ "name": "OSSEC Processes", "description": "", "query": "event_type:ossec AND alert | groupby process.name.keyword"},
{ "name": "OSSEC Users", "description": "", "query": "event_type:ossec AND alert | groupby user.name.keyword"},
{ "name": "Default Query", "description": "Show all events grouped by the origin host", "query": "* | groupby observer.name"},
{ "name": "Elastalerts", "description": "", "query": "_type:elastalert | groupby rule.name"},
{ "name": "Alerts", "description": "Show all alerts grouped by alert source", "query": "event.dataset: alert | groupby event.module"},
{ "name": "NIDS Alerts", "description": "Show all NIDS alerts grouped by alert name", "query": "event.category: network AND event.dataset: alert | groupby rule.name"},
{ "name": "OSSEC Alerts", "description": "", "query": "event_type:ossec AND alert | groupby rule.category"},
{ "name": "OSSEC Commands", "description": "", "query": "event_type:ossec AND alert | groupby process.command_line"},
{ "name": "OSSEC Processes", "description": "", "query": "event_type:ossec AND alert | groupby process.name"},
{ "name": "OSSEC Users", "description": "", "query": "event_type:ossec AND alert | groupby user.name"},
{ "name": "SYSMON", "description": "", "query": "event_type:sysmon | groupby event_id"},
{ "name": "SYSMON", "description": "", "query": "event_type:sysmon | groupby username"},
{ "name": "Zeek Notice", "description": "Show notices from Zeek", "query": "event.module.keyword:zeek AND event.dataset:notice | groupby notice.note.keyword,notice.message.keyword"},
{ "name": "Connections", "description": "Connections grouped by IP and Port", "query": "event.module.keyword:zeek AND event.dataset:conn | groupby source.ip.keyword,destination.ip.keyword,network.protocol.keyword,destination.port"},
{ "name": "Connections", "description": "Connections grouped by Service", "query": "event.module.keyword:zeek AND event.dataset:conn | groupby network.protocol.keyword,destination.port"},
{ "name": "Connections", "description": "Connections grouped by destination Geo", "query": "event.module.keyword:zeek AND event.dataset:conn | groupby destination_geo.country_name"},
{ "name": "Connections", "description": "Connections grouped by source Geo", "query": "event.module.keyword:zeek AND event.dataset:conn | groupby source.geo.country_name.keyword"},
{ "name": "DCE_RPC", "description": "DCE_RPC grouped by operation", "query": "event.module.keyword:zeek AND event.dataset:dce_rpc | groupby operation.keyword"},
{ "name": "DHCP", "description": "DHCP leases", "query": "event.module.keyword:zeek AND event.dataset:dhcp | groupby host.hostname.keyword,host.domain.keyword,destination.ip.keyword"},
{ "name": "DHCP", "description": "DHCP grouped by message type", "query": "event.module.keyword:zeek AND event.dataset:dhcp | groupby message_types.keyword"},
{ "name": "DNP3", "description": "DNP3 grouped by reply", "query": "event.module.keyword:zeek AND event.dataset:dnp3 | groupby dnp3.fc_reply.keyword"},
{ "name": "DNS", "description": "DNS queries grouped by port ", "query": "event.module.keyword:zeek AND event.dataset:dns | groupby dns.query.name.keyword,destination.port"},
{ "name": "DNS", "description": "DNS queries grouped by type", "query": "event.module.keyword:zeek AND event.dataset:dns | groupby dns.query.type_name.keyword,destination.port"},
{ "name": "DNS", "description": "DNS highest registered domain", "query": "event.module.keyword:zeek AND event.dataset:dns | groupby highest_registered_domain"},
{ "name": "DNS", "description": "DNS grouped by parent domain", "query": "event.module.keyword:zeek AND event.dataset:dns | groupby parent_domain"},
{ "name": "Files", "description": "Files grouped by mimetype", "query": "event.module.keyword:zeek AND event.dataset:files | groupby file.mime_type.keyword source.ip.keyword"},
{ "name": "FTP", "description": "FTP grouped by argument", "query": "event.module.keyword:zeek AND event.dataset:ftp | groupby ftp_argument"},
{ "name": "FTP", "description": "FTP grouped by command", "query": "event.module.keyword:zeek AND event.dataset:ftp | groupby ftp.command.keyword"},
{ "name": "FTP", "description": "FTP grouped by username", "query": "event.module.keyword:zeek AND event.dataset:ftp | groupby ftp.user.keyword"},
{ "name": "HTTP", "description": "HTTP grouped by destination port", "query": "event.module.keyword:zeek AND event.dataset:http | groupby destination.port"},
{ "name": "HTTP", "description": "HTTP grouped by method", "query": "event.module.keyword:zeek AND event.dataset:http | groupby http.method.keyword"},
{ "name": "HTTP", "description": "HTTP grouped by status code", "query": "event.module.keyword:zeek AND event.dataset:http | groupby http.status_code"},
{ "name": "HTTP", "description": "HTTP grouped by status message", "query": "event.module.keyword:zeek AND event.dataset:http | groupby http.status_message.keyword"},
{ "name": "HTTP", "description": "HTTP grouped by user agent", "query": "event.module.keyword:zeek AND event.dataset:http | groupby http.useragent.keyword"},
{ "name": "HTTP", "description": "HTTP grouped by virtual host", "query": "event.module.keyword:zeek AND event.dataset:http | groupby http.virtual_host.keyword"},
{ "name": "HTTP", "description": "HTTP with exe downloads", "query": "event.module.keyword:zeek AND event.dataset:http AND resp_mime_types:dosexec | groupby http.virtual_host.keyword"},
{ "name": "Intel", "description": "Intel framework hits grouped by indicator", "query": "event.module.keyword:zeek AND event.dataset:intel | groupby intel.indicator.keyword"},
{ "name": "IRC", "description": "IRC grouped by command", "query": "event.module.keyword:zeek AND event.dataset:irc | groupby irc.command.type.keyword"},
{ "name": "KERBEROS", "description": "KERBEROS grouped by service", "query": "event.module.keyword:zeek AND event.dataset:kerberos | groupby kerberos.service.keyword"},
{ "name": "MODBUS", "description": "MODBUS grouped by function", "query": "event.module.keyword:zeek AND event.dataset:modbus | groupby modbus.function.keyword"},
{ "name": "MYSQL", "description": "MYSQL grouped by command", "query": "event.module.keyword:zeek AND event.dataset:mysql | groupby mysql.command.keyword"},
{ "name": "NOTICE", "description": "Zeek notice logs grouped by note", "query": "event.module.keyword:zeek AND event.dataset:notice | groupby notice.note.keyword"},
{ "name": "NOTICE", "description": "Zeek notice logs grouped by message", "query": "event.module.keyword:zeek AND event.dataset:notice | groupby notice.message.keyword"},
{ "name": "NTLM", "description": "NTLM grouped by computer name", "query": "event.module.keyword:zeek AND event.dataset:ntlm | groupby ntlm.server.dns.name.keyword"},
{ "name": "PE", "description": "PE files list", "query": "event.module.keyword:zeek AND event.dataset:pe | groupby file.machine.keyword,file.os.keyword,file.subsystem.keyword"},
{ "name": "RADIUS", "description": "RADIUS grouped by username", "query": "event.module.keyword:zeek AND event.dataset:radius | groupby user.name.keyword"},
{ "name": "RDP", "description": "RDP grouped by client name", "query": "event.module.keyword:zeek AND event.dataset:rdp | groupby client.name.keyword"},
{ "name": "RFB", "description": "RFB grouped by desktop name", "query": "event.module.keyword:zeek AND event.dataset:rfb | groupby rfp.desktop.name.keyword"},
{ "name": "Signatures", "description": "Zeek signatures grouped by signature id", "query": "event.module.keyword:zeek AND event.dataset:signatures | groupby signature_id"},
{ "name": "SIP", "description": "SIP grouped by user agent", "query": "event.module.keyword:zeek AND event.dataset:sip | groupby client.user_agent.keyword"},
{ "name": "SMB_Files", "description": "SMB files grouped by action", "query": "event.module.keyword:zeek AND event.dataset:smb_files | groupby file.action.keyword"},
{ "name": "SMB_Mapping", "description": "SMB mapping grouped by path", "query": "event.module.keyword:zeek AND event.dataset:smb_mapping | groupby file.path.keyword"},
{ "name": "SMTP", "description": "SMTP grouped by subject", "query": "event.module.keyword:zeek AND event.dataset:smtp | groupby smtp.subject.keyword"},
{ "name": "SNMP", "description": "SNMP grouped by version and string", "query": "event.module.keyword:zeek AND event.dataset:snmp | groupby snmp.community.keyword,snmp.version.keyword"},
{ "name": "Software", "description": "List of software seen on the network", "query": "event.module.keyword:zeek AND event.dataset:software | groupby software.type.keyword,software.name.keyword"},
{ "name": "SSH", "description": "SSH grouped by version", "query": "event.module.keyword:zeek AND event.dataset:ssh | groupby ssh.version.keyword"},
{ "name": "SSL", "description": "SSL grouped by version and server name", "query": "event.module.keyword:zeek AND event.dataset:ssl | groupby ssl.version.keyword,ssl.server_name.keyword"},
{ "name": "SYSLOG", "description": "SYSLOG grouped by severity and facility ", "query": "event.module.keyword:zeek AND event.dataset:syslog | groupby syslog.severity.keyword,syslog.facility.keyword"},
{ "name": "Tunnels", "description": "Tunnels grouped by action", "query": "event.module.keyword:zeek AND event.dataset:tunnels | groupby event.action.keyword"},
{ "name": "Weird", "description": "Zeek weird log grouped by name", "query": "event.module.keyword:zeek AND event.dataset:weird | groupby weird.name.keyword"},
{ "name": "x509", "description": "x.509 grouped by key length", "query": "event.module.keyword:zeek AND event.dataset:x509 | groupby x509.certificate.key.length"},
{ "name": "Zeek Notice", "description": "Show notices from Zeek", "query": "event.module:zeek AND event.dataset:notice | groupby notice.note,notice.message"},
{ "name": "Connections", "description": "Connections grouped by IP and Port", "query": "event.module:zeek AND event.dataset:conn | groupby source.ip,destination.ip,network.protocol,destination.port"},
{ "name": "Connections", "description": "Connections grouped by Service", "query": "event.module:zeek AND event.dataset:conn | groupby network.protocol,destination.port"},
{ "name": "Connections", "description": "Connections grouped by destination Geo", "query": "event.module:zeek AND event.dataset:conn | groupby destination_geo.country_name"},
{ "name": "Connections", "description": "Connections grouped by source Geo", "query": "event.module:zeek AND event.dataset:conn | groupby source.geo.country_name"},
{ "name": "DCE_RPC", "description": "DCE_RPC grouped by operation", "query": "event.module:zeek AND event.dataset:dce_rpc | groupby operation"},
{ "name": "DHCP", "description": "DHCP leases", "query": "event.module:zeek AND event.dataset:dhcp | groupby host.hostname,host.domain,destination.ip"},
{ "name": "DHCP", "description": "DHCP grouped by message type", "query": "event.module:zeek AND event.dataset:dhcp | groupby message_types"},
{ "name": "DNP3", "description": "DNP3 grouped by reply", "query": "event.module:zeek AND event.dataset:dnp3 | groupby dnp3.fc_reply"},
{ "name": "DNS", "description": "DNS queries grouped by port ", "query": "event.module:zeek AND event.dataset:dns | groupby dns.query.name,destination.port"},
{ "name": "DNS", "description": "DNS queries grouped by type", "query": "event.module:zeek AND event.dataset:dns | groupby dns.query.type_name,destination.port"},
{ "name": "DNS", "description": "DNS highest registered domain", "query": "event.module:zeek AND event.dataset:dns | groupby highest_registered_domain"},
{ "name": "DNS", "description": "DNS grouped by parent domain", "query": "event.module:zeek AND event.dataset:dns | groupby parent_domain"},
{ "name": "Files", "description": "Files grouped by mimetype", "query": "event.module:zeek AND event.dataset:files | groupby file.mime_type source.ip"},
{ "name": "FTP", "description": "FTP grouped by argument", "query": "event.module:zeek AND event.dataset:ftp | groupby ftp_argument"},
{ "name": "FTP", "description": "FTP grouped by command", "query": "event.module:zeek AND event.dataset:ftp | groupby ftp.command"},
{ "name": "FTP", "description": "FTP grouped by username", "query": "event.module:zeek AND event.dataset:ftp | groupby ftp.user"},
{ "name": "HTTP", "description": "HTTP grouped by destination port", "query": "event.module:zeek AND event.dataset:http | groupby destination.port"},
{ "name": "HTTP", "description": "HTTP grouped by method", "query": "event.module:zeek AND event.dataset:http | groupby http.method"},
{ "name": "HTTP", "description": "HTTP grouped by status code", "query": "event.module:zeek AND event.dataset:http | groupby http.status_code"},
{ "name": "HTTP", "description": "HTTP grouped by status message", "query": "event.module:zeek AND event.dataset:http | groupby http.status_message"},
{ "name": "HTTP", "description": "HTTP grouped by user agent", "query": "event.module:zeek AND event.dataset:http | groupby http.useragent"},
{ "name": "HTTP", "description": "HTTP grouped by virtual host", "query": "event.module:zeek AND event.dataset:http | groupby http.virtual_host"},
{ "name": "HTTP", "description": "HTTP with exe downloads", "query": "event.module:zeek AND event.dataset:http AND resp_mime_types:dosexec | groupby http.virtual_host"},
{ "name": "Intel", "description": "Intel framework hits grouped by indicator", "query": "event.module:zeek AND event.dataset:intel | groupby intel.indicator"},
{ "name": "IRC", "description": "IRC grouped by command", "query": "event.module:zeek AND event.dataset:irc | groupby irc.command.type"},
{ "name": "KERBEROS", "description": "KERBEROS grouped by service", "query": "event.module:zeek AND event.dataset:kerberos | groupby kerberos.service"},
{ "name": "MODBUS", "description": "MODBUS grouped by function", "query": "event.module:zeek AND event.dataset:modbus | groupby modbus.function"},
{ "name": "MYSQL", "description": "MYSQL grouped by command", "query": "event.module:zeek AND event.dataset:mysql | groupby mysql.command"},
{ "name": "NOTICE", "description": "Zeek notice logs grouped by note", "query": "event.module:zeek AND event.dataset:notice | groupby notice.note"},
{ "name": "NOTICE", "description": "Zeek notice logs grouped by message", "query": "event.module:zeek AND event.dataset:notice | groupby notice.message"},
{ "name": "NTLM", "description": "NTLM grouped by computer name", "query": "event.module:zeek AND event.dataset:ntlm | groupby ntlm.server.dns.name"},
{ "name": "PE", "description": "PE files list", "query": "event.module:zeek AND event.dataset:pe | groupby file.machine,file.os,file.subsystem"},
{ "name": "RADIUS", "description": "RADIUS grouped by username", "query": "event.module:zeek AND event.dataset:radius | groupby user.name"},
{ "name": "RDP", "description": "RDP grouped by client name", "query": "event.module:zeek AND event.dataset:rdp | groupby client.name"},
{ "name": "RFB", "description": "RFB grouped by desktop name", "query": "event.module:zeek AND event.dataset:rfb | groupby rfp.desktop.name"},
{ "name": "Signatures", "description": "Zeek signatures grouped by signature id", "query": "event.module:zeek AND event.dataset:signatures | groupby signature_id"},
{ "name": "SIP", "description": "SIP grouped by user agent", "query": "event.module:zeek AND event.dataset:sip | groupby client.user_agent"},
{ "name": "SMB_Files", "description": "SMB files grouped by action", "query": "event.module:zeek AND event.dataset:smb_files | groupby file.action"},
{ "name": "SMB_Mapping", "description": "SMB mapping grouped by path", "query": "event.module:zeek AND event.dataset:smb_mapping | groupby file.path"},
{ "name": "SMTP", "description": "SMTP grouped by subject", "query": "event.module:zeek AND event.dataset:smtp | groupby smtp.subject"},
{ "name": "SNMP", "description": "SNMP grouped by version and string", "query": "event.module:zeek AND event.dataset:snmp | groupby snmp.community,snmp.version"},
{ "name": "Software", "description": "List of software seen on the network", "query": "event.module:zeek AND event.dataset:software | groupby software.type,software.name"},
{ "name": "SSH", "description": "SSH grouped by version", "query": "event.module:zeek AND event.dataset:ssh | groupby ssh.version"},
{ "name": "SSL", "description": "SSL grouped by version and server name", "query": "event.module:zeek AND event.dataset:ssl | groupby ssl.version,ssl.server_name"},
{ "name": "SYSLOG", "description": "SYSLOG grouped by severity and facility ", "query": "event.module:zeek AND event.dataset:syslog | groupby syslog.severity,syslog.facility"},
{ "name": "Tunnels", "description": "Tunnels grouped by action", "query": "event.module:zeek AND event.dataset:tunnels | groupby event.action"},
{ "name": "Weird", "description": "Zeek weird log grouped by name", "query": "event.module:zeek AND event.dataset:weird | groupby weird.name"},
{ "name": "x509", "description": "x.509 grouped by key length", "query": "event.module:zeek AND event.dataset:x509 | groupby x509.certificate.key.length"},
{ "name": "Firewall", "description": "Firewall events grouped by action", "query": "event_type:firewall | groupby action"}
]
}

View File

@@ -63,7 +63,7 @@ so-soctopus:
- hostname: soctopus
- name: so-soctopus
- binds:
- /opt/so/conf/soctopus/SOCtopus.conf:/SOCtopus.conf:ro
- /opt/so/conf/soctopus/SOCtopus.conf:/SOCtopus/SOCtopus.conf:ro
- /opt/so/log/soctopus/:/var/log/SOCtopus/:rw
- /opt/so/rules/elastalert/playbook:/etc/playbook-rules:rw
- /opt/so/conf/navigator/nav_layer_playbook.json:/etc/playbook/nav_layer_playbook.json:rw

View File

@@ -616,15 +616,14 @@
# # Read stats from one or more Elasticsearch servers or clusters
{% if grains['role'] == 'so-master' or grains['role'] == 'so-eval' %}
{% if grains['role'] in ['so-master', 'so-eval', 'so-mastersearch'] %}
[[inputs.elasticsearch]]
# ## specify a list of one or more Elasticsearch servers
# # you can add username and password to your url to use basic authentication:
# # servers = ["http://user:pass@localhost:9200"]
servers = ["http://{{ MASTER }}:9200"]
{% endif %}
{% if grains['role'] == 'so-node' %}
{% elif grains['role'] in ['so-searchnode', 'so-hotnode', 'so-warmnode', 'so-heavynode'] %}
[[inputs.elasticsearch]]
servers = ["http://{{ NODEIP }}:9200"]
{% endif %}
@@ -667,14 +666,14 @@
# # Read metrics from one or more commands that can output to stdout
# ## Commands array
{% if grains['role'] == 'so-master' %}
{% if grains['role'] in ['so-master', 'so-mastersearch'] %}
[[inputs.exec]]
commands = [
"/scripts/redis.sh",
"/scripts/influxdbsize.sh"
]
data_format = "influx"
{% elif grains['role'] == 'so-sensor' %}
{% elif grains['role'] in ['so-sensor', 'so-heavynode'] %}
[[inputs.exec]]
commands = [
"/scripts/stenoloss.sh",

View File

@@ -38,3 +38,9 @@ echo "Applying cross cluster search config..."
curl -XPUT http://{{ ES }}:9200/_cluster/settings -H'Content-Type: application/json' -d '{"persistent": {"search": {"remote": {"{{ SN }}": {"skip_unavailable": "true", "seeds": ["{{ SNDATA.ip }}:9300"]}}}}}'
{%- endfor %}
{%- endif %}
{%- if salt['pillar.get']('mastersearchtab', {}) %}
{%- for SN, SNDATA in salt['pillar.get']('mastersearchtab', {}).items() %}
curl -XPUT http://{{ ES }}:9200/_cluster/settings -H'Content-Type: application/json' -d '{"persistent": {"search": {"remote": {"{{ SN }}": {"skip_unavailable": "true", "seeds": ["{{ SNDATA.ip }}:9300"]}}}}}'
{%- endfor %}
{%- endif %}

View File

@@ -91,13 +91,13 @@ wait_for_file() {
local date
date=$(date)
while [[ $cur_attempts < $max_attempts ]]; do
while [[ $cur_attempts -lt $max_attempts ]]; do
if [ -f "$filename" ]; then
echo "File $filename already exists at $date" >> "$setup_log" 2>&1
echo "File $filename found at $date" >> "$setup_log" 2>&1
return 0
else
echo "File $filename does not exist; waiting ${wait_interval}s then checking again ($cur_attempts/$max_attempts)..." >> "$setup_log" 2>&1
((cur_attempts++))
echo "File $filename does not exist; waiting ${wait_interval}s then checking again ($cur_attempts/$max_attempts)..." >> "$setup_log" 2>&1
sleep "$wait_interval"
fi
done
@@ -195,11 +195,10 @@ check_admin_pass() {
check_hive_init_then_reboot() {
local return_val
return_val="$(wait_for_file /opt/so/state/thehive.txt 20 5)"
if [[ "$return_val" != 0 ]]; then
return "$return_val"
wait_for_file /opt/so/state/thehive.txt 20 5
local return_val=$?
if [[ $return_val -ne 0 ]]; then
return $return_val
fi
docker stop so-thehive
@@ -1025,7 +1024,8 @@ saltify() {
case "$install_type" in
'MASTER' | 'EVAL' | 'MASTERSEARCH' | 'FLEET' | 'HELIXSENSOR')
reserve_group_ids >> "$setup_log" 2>&1
yum -y install epel-release sqlite3 argon2 curl mariadb-devel >> "$setup_log" 2>&1
yum -y install epel-release >> "$setup_log" 2>&1
yum -y install sqlite argon2 curl mariadb-devel >> "$setup_log" 2>&1
# Download Ubuntu Keys in case master updates = 1
mkdir -p /opt/so/gpg >> "$setup_log" 2>&1
wget --inet4-only -O /opt/so/gpg/SALTSTACK-GPG-KEY.pub https://repo.saltstack.com/apt/ubuntu/16.04/amd64/latest/SALTSTACK-GPG-KEY.pub >> "$setup_log" 2>&1
@@ -1053,8 +1053,8 @@ saltify() {
yum clean expire-cache >> "$setup_log" 2>&1
set_progress_str 8 'Installing salt-minion & python modules'
{
yum -y install epel-release\
salt-minion-2019.2.4\
yum -y install epel-release
yum -y install salt-minion-2019.2.4\
python3\
python36-docker\
python36-dateutil\
@@ -1098,7 +1098,7 @@ saltify() {
# Add saltstack repo(s)
wget --inet4-only -O - https://repo.saltstack.com"$py_ver_url_path"/ubuntu/"$ubuntu_version"/amd64/2019.2/SALTSTACK-GPG-KEY.pub | apt-key add - >> "$setup_log" 2>&1
echo "deb http://repo.saltstack.com/py3/ubuntu/$ubuntu_version/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack2019.list >> "$setup_log" 2>&1
echo "deb http://repo.saltstack.com$py_ver_url_path/ubuntu/$ubuntu_version/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack2019.list
# Add Docker repo
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add - >> "$setup_log" 2>&1
@@ -1130,7 +1130,7 @@ saltify() {
echo "Using apt-key add to add SALTSTACK-GPG-KEY.pub and GPG-KEY-WAZUH" >> "$setup_log" 2>&1
apt-key add "$temp_install_dir"/gpg/SALTSTACK-GPG-KEY.pub >> "$setup_log" 2>&1
apt-key add "$temp_install_dir"/gpg/GPG-KEY-WAZUH >> "$setup_log" 2>&1
echo "deb http://repo.saltstack.com/apt/ubuntu/$ubuntu_version/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack.list >> "$setup_log" 2>&1
echo "deb http://repo.saltstack.com$py_ver_url_path/ubuntu/$ubuntu_version/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack.list >> "$setup_log" 2>&1
echo "deb https://packages.wazuh.com/3.x/apt/ stable main" | tee /etc/apt/sources.list.d/wazuh.list >> "$setup_log" 2>&1
;;
esac
@@ -1325,7 +1325,7 @@ set_initial_firewall_policy() {
/opt/so/saltstack/pillar/data/addtotab.sh evaltab "$MINION_ID" "$MAINIP" "$num_cpu_cores" "$random_uid" "$MNIC" "$filesystem_root" "$filesystem_nsm" bond0
;;
'MASTERSEARCH')
/opt/so/saltstack/pillar/data/addtotab.sh nodestab "$MINION_ID" "$MAINIP" "$num_cpu_cores" "$random_uid" "$MNIC" "$filesystem_root" "$filesystem_nsm"
/opt/so/saltstack/pillar/data/addtotab.sh mastersearchtab "$MINION_ID" "$MAINIP" "$num_cpu_cores" "$random_uid" "$MNIC" "$filesystem_root" "$filesystem_nsm"
;;
esac
;;
@@ -1336,7 +1336,7 @@ set_initial_firewall_policy() {
;;
'SENSOR' | 'SEARCHNODE' | 'HEAVYNODE' | 'FLEET')
ssh -i /root/.ssh/so.key soremote@"$MSRV" sudo /opt/so/saltstack/pillar/firewall/addfirewall.sh minions "$MAINIP"
case "$INSTALLERTYPE" in
case "$install_type" in
'SENSOR')
ssh -i /root/.ssh/so.key soremote@"$MSRV" sudo /opt/so/saltstack/pillar/firewall/addfirewall.sh forward_nodes "$MAINIP"
ssh -i /root/.ssh/so.key soremote@"$MSRV" sudo /opt/so/saltstack/pillar/data/addtotab.sh sensorstab "$MINION_ID" "$MAINIP" "$num_cpu_cores" "$random_uid" "$MNIC" "$filesystem_root" "$filesystem_nsm" bond0

View File

@@ -282,7 +282,12 @@ export percentage=0
sensor_pillar >> "$setup_log" 2>&1
fi
set_progress_str 3 'Installing Salt and dependencies'
if [[ $is_minion ]]; then
set_progress_str 3 'Configuring firewall'
set_initial_firewall_policy 2>> "$setup_log"
fi
set_progress_str 4 'Installing Salt and dependencies'
saltify 2>> "$setup_log"
set_progress_str 7 'Installing Docker and dependencies'
@@ -311,10 +316,7 @@ export percentage=0
master_pillar 2>> "$setup_log"
fi
if [[ $is_minion ]]; then
set_progress_str 14 'Configuring firewall'
set_initial_firewall_policy 2>> "$setup_log"
fi
set_progress_str 16 'Running first Salt checkin'
salt_firstcheckin 2>> "$setup_log"
@@ -356,6 +358,13 @@ export percentage=0
set_progress_str 26 'Downloading containers from the internet'
salt-call state.apply -l info registry >> "$setup_log" 2>&1
docker_seed_registry 2>> "$setup_log" # ~ 60% when finished
set_progress_str 61 "$(print_salt_state_apply 'master')"
salt-call state.apply -l info master >> $setup_log 2>&1
set_progress_str 62 "$(print_salt_state_apply 'idstools')"
salt-call state.apply -l info idstools >> $setup_log 2>&1
fi
set_progress_str 62 "$(print_salt_state_apply 'firewall')"
@@ -432,13 +441,7 @@ export percentage=0
salt-call state.apply -l info navigator >> $setup_log 2>&1
fi
if [[ $is_master || $is_helix ]]; then
set_progress_str 79 "$(print_salt_state_apply 'master')"
salt-call state.apply -l info master >> $setup_log 2>&1
set_progress_str 80 "$(print_salt_state_apply 'idstools')"
salt-call state.apply -l info idstools >> $setup_log 2>&1
if [[ $is_master || $is_helix ]]; then
set_progress_str 81 "$(print_salt_state_apply 'utility')"
salt-call state.apply -l info utility >> $setup_log 2>&1
fi
@@ -458,6 +461,9 @@ export percentage=0
set_progress_str 86 'Updating packages'
update_packages 2>> "$setup_log"
set_progress_str 87 'Adding user to SOC'
add_web_user 2>> "$setup_log"
set_progress_str 90 'Enabling checkin at boot'
checkin_at_boot 2>> "$setup_log"
@@ -477,4 +483,4 @@ if [[ "$success" = 0 ]]; then
else
whiptail_setup_failed
shutdown -r now
fi
fi