mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge branch 'dev' of https://github.com/Security-Onion-Solutions/securityonion-saltstack into dev
This commit is contained in:
@@ -78,18 +78,19 @@ def zeek():
|
|||||||
retcode = __salt__['zeekctl.status'](verbose=False)
|
retcode = __salt__['zeekctl.status'](verbose=False)
|
||||||
logging.debug('healthcheck_module: zeekctl.status retcode: %i' % retcode)
|
logging.debug('healthcheck_module: zeekctl.status retcode: %i' % retcode)
|
||||||
if retcode:
|
if retcode:
|
||||||
zeek_restart = True
|
zeek_restart = 1
|
||||||
if calling_func != 'beacon':
|
if calling_func != 'beacon':
|
||||||
docker_stop('so-zeek')
|
docker_stop('so-zeek')
|
||||||
states_to_apply.append('zeek')
|
states_to_apply.append('zeek')
|
||||||
else:
|
else:
|
||||||
zeek_restart = False
|
zeek_restart = 0
|
||||||
|
|
||||||
|
__salt__['telegraf.send']('healthcheck zeek_restart=%i' % zeek_restart)
|
||||||
|
|
||||||
if calling_func == 'execute' and zeek_restart:
|
if calling_func == 'execute' and zeek_restart:
|
||||||
apply_states()
|
apply_states()
|
||||||
|
|
||||||
retval.append({'zeek_restart': zeek_restart})
|
retval.append({'zeek_restart': zeek_restart})
|
||||||
|
|
||||||
send_event('so/healthcheck/zeek', retval)
|
send_event('so/healthcheck/zeek', retval)
|
||||||
__salt__['telegraf.send']('healthcheck zeek_restart=%s' % str(zeek_restart))
|
|
||||||
return retval
|
return retval
|
||||||
|
|||||||
@@ -10,13 +10,13 @@ providers:
|
|||||||
editable: true
|
editable: true
|
||||||
options:
|
options:
|
||||||
path: /etc/grafana/grafana_dashboards/master
|
path: /etc/grafana/grafana_dashboards/master
|
||||||
- name: 'Forward Nodes'
|
- name: 'Sensor Nodes'
|
||||||
folder: 'Forward Nodes'
|
folder: 'Sensor Nodes'
|
||||||
type: file
|
type: file
|
||||||
disableDeletion: false
|
disableDeletion: false
|
||||||
editable: true
|
editable: true
|
||||||
options:
|
options:
|
||||||
path: /etc/grafana/grafana_dashboards/forward_nodes
|
path: /etc/grafana/grafana_dashboards/sensor_nodes
|
||||||
- name: 'Search Nodes'
|
- name: 'Search Nodes'
|
||||||
folder: 'Search Nodes'
|
folder: 'Search Nodes'
|
||||||
type: file
|
type: file
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -282,7 +282,7 @@ grafanadashevaldir:
|
|||||||
|
|
||||||
grafanadashfndir:
|
grafanadashfndir:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/conf/grafana/grafana_dashboards/forward_nodes
|
- name: /opt/so/conf/grafana/grafana_dashboards/sensor_nodes
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
@@ -331,11 +331,11 @@ dashboard-master:
|
|||||||
{% set SN = SN | regex_replace('_' ~ NODETYPE, '') %}
|
{% set SN = SN | regex_replace('_' ~ NODETYPE, '') %}
|
||||||
dashboard-{{ SN }}:
|
dashboard-{{ SN }}:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/grafana/grafana_dashboards/forward_nodes/{{ SN }}-Sensor.json
|
- name: /opt/so/conf/grafana/grafana_dashboards/sensor_nodes/{{ SN }}-Sensor.json
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
- template: jinja
|
- template: jinja
|
||||||
- source: salt://common/grafana/grafana_dashboards/forward_nodes/sensor.json
|
- source: salt://common/grafana/grafana_dashboards/sensor_nodes/sensor.json
|
||||||
- defaults:
|
- defaults:
|
||||||
SERVERNAME: {{ SN }}
|
SERVERNAME: {{ SN }}
|
||||||
MONINT: {{ SNDATA.monint }}
|
MONINT: {{ SNDATA.monint }}
|
||||||
|
|||||||
@@ -295,7 +295,7 @@ http {
|
|||||||
error_page 401 = @error401;
|
error_page 401 = @error401;
|
||||||
|
|
||||||
location @error401 {
|
location @error401 {
|
||||||
add_header Set-Cookie "AUTH_REDIRECT=$request_uri;Max-Age=14400";
|
add_header Set-Cookie "AUTH_REDIRECT=$request_uri;Path=/;Max-Age=14400";
|
||||||
return 302 /auth/self-service/browser/flows/login;
|
return 302 /auth/self-service/browser/flows/login;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,21 +8,20 @@ es_host: {{es}}
|
|||||||
es_port: 9200
|
es_port: 9200
|
||||||
name: NIDS-Alert
|
name: NIDS-Alert
|
||||||
type: frequency
|
type: frequency
|
||||||
index: "*:logstash-ids*"
|
index: "so-ids-*"
|
||||||
num_events: 1
|
num_events: 1
|
||||||
timeframe:
|
timeframe:
|
||||||
minutes: 10
|
minutes: 10
|
||||||
buffer_time:
|
buffer_time:
|
||||||
minutes: 10
|
minutes: 10
|
||||||
allow_buffer_time_overlap: true
|
allow_buffer_time_overlap: true
|
||||||
query_key: ["alert", "ips"]
|
query_key: ["rule.signature_id"]
|
||||||
realert:
|
realert:
|
||||||
days: 1
|
days: 1
|
||||||
|
|
||||||
filter:
|
filter:
|
||||||
- query:
|
- query:
|
||||||
query_string:
|
query_string:
|
||||||
query: "event_type: ids AND NOT tags: _jsonparsefailure"
|
query: "event.module: suricata"
|
||||||
|
|
||||||
alert: modules.so.thehive.TheHiveAlerter
|
alert: modules.so.thehive.TheHiveAlerter
|
||||||
|
|
||||||
@@ -35,16 +34,16 @@ hive_proxies:
|
|||||||
https: ''
|
https: ''
|
||||||
|
|
||||||
hive_alert_config:
|
hive_alert_config:
|
||||||
title: '{match[alert]}'
|
title: '{match[rule][name]}'
|
||||||
type: 'NIDS'
|
type: 'NIDS'
|
||||||
source: 'SecurityOnion'
|
source: 'SecurityOnion'
|
||||||
description: "`NIDS Dashboard:` \n\n <https://{{es}}/kibana/app/kibana#/dashboard/ed6f7e20-e060-11e9-8f0c-2ddbf5ed9290?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(_source),index:'*:logstash-*',interval:auto,query:(query_string:(analyze_wildcard:!t,query:'sid:{match[sid]}')),sort:!('@timestamp',desc))> \n\n `IPs: `{match[source_ip]}:{match[source_port]} --> {match[destination_ip]}:{match[destination_port]} \n\n `Signature:` {match[rule_signature]}"
|
description: "`NIDS Dashboard:` \n\n <https://{{es}}/kibana/app/kibana#/dashboard/ed6f7e20-e060-11e9-8f0c-2ddbf5ed9290?_g=(refreshInterval:(display:Off,pause:!f,value:0),time:(from:now-24h,mode:quick,to:now))&_a=(columns:!(_source),index:'*:logstash-*',interval:auto,query:(query_string:(analyze_wildcard:!t,query:'sid:')),sort:!('@timestamp',desc))> \n\n `IPs: `{match[source][ip]}:{match[source][port]} --> {match[destination][ip]}:{match[destination][port]} \n\n `Signature:`{match[rule][rule]}"
|
||||||
severity: 2
|
severity: 2
|
||||||
tags: ['{match[sid]}','{match[source_ip]}','{match[destination_ip]}']
|
tags: ['{match[rule][signature_id]}','{match[source][ip]}','{match[destination][ip]}']
|
||||||
tlp: 3
|
tlp: 3
|
||||||
status: 'New'
|
status: 'New'
|
||||||
follow: True
|
follow: True
|
||||||
|
|
||||||
hive_observable_data_mapping:
|
hive_observable_data_mapping:
|
||||||
- ip: '{match[source_ip]}'
|
- ip: '{match[source][ip]}'
|
||||||
- ip: '{match[destination_ip]}'
|
- ip: '{match[destination][ip]}'
|
||||||
|
|||||||
@@ -1,26 +1,55 @@
|
|||||||
{
|
{
|
||||||
"description" : "osquery",
|
"description" : "osquery",
|
||||||
"processors" : [
|
"processors" : [
|
||||||
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
|
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
|
||||||
|
{ "gsub": { "field": "message2.columns.data", "pattern": "\\\\xC2\\\\xAE", "replacement": "" } },
|
||||||
|
{ "json": { "field": "message2.columns.data", "target_field": "message2.columns.winlog", "ignore_failure": true } },
|
||||||
{
|
{
|
||||||
"script": {
|
"script": {
|
||||||
"lang": "painless",
|
"lang": "painless",
|
||||||
"source": "def dict = ['result': new HashMap()]; for (entry in ctx['message2'].entrySet()) { dict['result'][entry.getKey()] = entry.getValue(); } ctx['osquery'] = dict; "
|
"source": "def dict = ['result': new HashMap()]; for (entry in ctx['message2'].entrySet()) { dict['result'][entry.getKey()] = entry.getValue(); } ctx['osquery'] = dict; "
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{ "rename": { "field": "osquery.result.hostIdentifier", "target_field": "osquery.result.host_identifier", "ignore_missing": true } },
|
{ "rename": { "field": "osquery.result.hostIdentifier", "target_field": "osquery.result.host_identifier", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "osquery.result.calendarTime", "target_field": "osquery.result.calendar_time", "ignore_missing": true } },
|
{ "rename": { "field": "osquery.result.calendarTime", "target_field": "osquery.result.calendar_time", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "osquery.result.unixTime", "target_field": "osquery.result.unix_time", "ignore_missing": true } },
|
{ "rename": { "field": "osquery.result.unixTime", "target_field": "osquery.result.unix_time", "ignore_missing": true } },
|
||||||
{ "json": { "field": "message", "target_field": "message3", "ignore_failure": true } },
|
{ "json": { "field": "message", "target_field": "message3", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message3.columns.username", "target_field": "user.name", "ignore_missing": true } },
|
{ "gsub": { "field": "message3.columns.data", "pattern": "\\\\xC2\\\\xAE", "replacement": "" } },
|
||||||
{ "rename": { "field": "message3.columns.uid", "target_field": "user.uid", "ignore_missing": true } },
|
{ "json": { "field": "message3.columns.data", "target_field": "message3.columns.winlog", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message3.columns.gid", "target_field": "user.gid", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.username", "target_field": "user.name", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message3.columns.shell", "target_field": "user.shell", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.uid", "target_field": "user.uid", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message3.columns.cmdline", "target_field": "process.command_line", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.gid", "target_field": "user.gid", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message3.columns.pid", "target_field": "process.pid", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.shell", "target_field": "user.shell", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message3.columns.parent", "target_field": "process.ppid", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.cmdline", "target_field": "process.command_line", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message3.columns.cwd", "target_field": "process.working_directory", "ignore_missing": true } },
|
{ "rename": { "field": "message3.columns.pid", "target_field": "process.pid", "ignore_missing": true } },
|
||||||
{ "remove": { "field": [ "message3"], "ignore_failure": false } },
|
{ "rename": { "field": "message3.columns.parent", "target_field": "process.ppid", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.cwd", "target_field": "process.working_directory", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.eventid", "target_field": "event.code", "ignore_missing": true } },
|
||||||
|
{ "set": { "if": "ctx.message3.columns.data != null", "field": "dataset", "value": "wel-{{message3.columns.source}}", "override": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.SubjectUserName", "target_field": "user.name", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.destinationHostname", "target_field": "destination.hostname", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.destinationIp", "target_field": "destination.ip", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.destinationPort", "target_field": "destination.port", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.Image", "target_field": "process.executable", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ProcessID", "target_field": "process.pid", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ProcessGuid", "target_field": "process.entity_id", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.CommandLine", "target_field": "process.command_line", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.CurrentDirectory", "target_field": "process.working_directory", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.Description", "target_field": "process.pe.description", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.Product", "target_field": "process.pe.product", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.OriginalFileName", "target_field": "process.pe.original_file_name", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.FileVersion", "target_field": "process.pe.file_version", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ParentCommandLine", "target_field": "process.parent.command_line", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ParentImage", "target_field": "process.parent.executable", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ParentProcessGuid", "target_field": "process.parent.entity_id", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.ParentProcessId", "target_field": "process.ppid", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.User", "target_field": "user.name", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.parentImage", "target_field": "parent_image_path", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.sourceHostname", "target_field": "source.hostname", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.sourceIp", "target_field": "source_ip", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.sourcePort", "target_field": "source.port", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message3.columns.winlog.EventData.targetFilename", "target_field": "file.target", "ignore_missing": true } },
|
||||||
|
{ "remove": { "field": [ "message3"], "ignore_failure": false } },
|
||||||
{ "pipeline": { "name": "common" } }
|
{ "pipeline": { "name": "common" } }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -4,17 +4,17 @@
|
|||||||
{ "rename": { "field": "@timestamp", "target_field": "ingest.timestamp", "ignore_missing": true } },
|
{ "rename": { "field": "@timestamp", "target_field": "ingest.timestamp", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.uid", "target_field": "log.id.uid", "ignore_missing": true } },
|
{ "rename": { "field": "message2.uid", "target_field": "log.id.uid", "ignore_missing": true } },
|
||||||
{ "dot_expander": { "field": "id.orig_h", "path": "message2", "ignore_failure": true } },
|
{ "dot_expander": { "field": "id.orig_h", "path": "message2", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message2.id.orig_h", "target_field": "source.ip", "ignore_missing": true } },
|
|
||||||
{ "set": { "field": "client.ip", "value": "{{source.ip}}" } },
|
|
||||||
{ "dot_expander": { "field": "id.orig_p", "path": "message2", "ignore_failure": true } },
|
{ "dot_expander": { "field": "id.orig_p", "path": "message2", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message2.id.orig_p", "target_field": "source.port", "ignore_missing": true } },
|
|
||||||
{ "set": { "field": "client.port", "value": "{{source.port}}" } },
|
|
||||||
{ "dot_expander": { "field": "id.resp_h", "path": "message2", "ignore_failure": true } },
|
{ "dot_expander": { "field": "id.resp_h", "path": "message2", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message2.id.resp_h", "target_field": "destination.ip", "ignore_missing": true } },
|
|
||||||
{ "set": { "field": "server.ip", "value": "{{destination.ip}}" } },
|
|
||||||
{ "dot_expander": { "field": "id.resp_p", "path": "message2", "ignore_failure": true } },
|
{ "dot_expander": { "field": "id.resp_p", "path": "message2", "ignore_failure": true } },
|
||||||
|
{ "rename": { "field": "message2.id.orig_h", "target_field": "source.ip", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message2.id.orig_p", "target_field": "source.port", "ignore_missing": true } },
|
||||||
|
{ "rename": { "field": "message2.id.resp_h", "target_field": "destination.ip", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.id.resp_p", "target_field": "destination.port", "ignore_missing": true } },
|
{ "rename": { "field": "message2.id.resp_p", "target_field": "destination.port", "ignore_missing": true } },
|
||||||
{ "set": { "field": "server.port", "value": "{{destination.port}}" } },
|
{ "set": { "field": "client.ip", "value": "{{source.ip}}" } },
|
||||||
|
{ "set": { "if": "ctx.source.port != null", "field": "client.port", "value": "{{source.port}}" } },
|
||||||
|
{ "set": { "field": "server.ip", "value": "{{destination.ip}}" } },
|
||||||
|
{ "set": { "if": "ctx.destination.port != null", "field": "server.port", "value": "{{destination.port}}" } },
|
||||||
{ "set": { "field": "observer.name", "value": "{{agent.name}}" } },
|
{ "set": { "field": "observer.name", "value": "{{agent.name}}" } },
|
||||||
{ "date": { "field": "message2.ts", "target_field": "@timestamp", "formats": ["ISO8601", "UNIX"], "ignore_failure": true } },
|
{ "date": { "field": "message2.ts", "target_field": "@timestamp", "formats": ["ISO8601", "UNIX"], "ignore_failure": true } },
|
||||||
{ "remove": { "field": ["agent"], "ignore_failure": true } },
|
{ "remove": { "field": ["agent"], "ignore_failure": true } },
|
||||||
|
|||||||
@@ -4,12 +4,13 @@
|
|||||||
{ "remove": { "field": ["host"], "ignore_failure": true } },
|
{ "remove": { "field": ["host"], "ignore_failure": true } },
|
||||||
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
|
{ "json": { "field": "message", "target_field": "message2", "ignore_failure": true } },
|
||||||
{ "rename": { "field": "message2.fuid", "target_field": "log.id.fuid", "ignore_missing": true } },
|
{ "rename": { "field": "message2.fuid", "target_field": "log.id.fuid", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.rx_hosts", "target_field": "destination.ip", "ignore_missing": true } },
|
{ "rename": { "field": "message2.conn_uids", "target_field": "log.id.uids", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.tx_hosts", "target_field": "source.ip", "ignore_missing": true } },
|
{ "remove": { "field": "source", "ignore_missing": true } },
|
||||||
{ "set": { "field": "server.ip", "value": "{{source.ip}}", "ignore_failure": true } },
|
{ "rename": { "field": "message2.rx_hosts.0", "target_field": "destination.ip", "ignore_missing": true } },
|
||||||
{ "set": { "field": "client.ip", "value": "{{destination.ip}}", "ignore_failure": true } },
|
{ "rename": { "field": "message2.tx_hosts.0", "target_field": "source.ip", "ignore_missing": true } },
|
||||||
|
{ "remove": { "field": "message2.rx_hosts", "ignore_missing": true } },
|
||||||
|
{ "remove": { "field": "message2.tx_hosts", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.conn_uids", "target_field": "log.id.uids", "ignore_missing": true } },
|
{ "rename": { "field": "message2.conn_uids", "target_field": "log.id.uids", "ignore_missing": true } },
|
||||||
{ "remove": { "field": "source", "ignore_missing": true } },
|
|
||||||
{ "rename": { "field": "message2.source", "target_field": "file.source", "ignore_missing": true } },
|
{ "rename": { "field": "message2.source", "target_field": "file.source", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.depth", "target_field": "file.depth", "ignore_missing": true } },
|
{ "rename": { "field": "message2.depth", "target_field": "file.depth", "ignore_missing": true } },
|
||||||
{ "rename": { "field": "message2.analyzers", "target_field": "file.analyzer", "ignore_missing": true } },
|
{ "rename": { "field": "message2.analyzers", "target_field": "file.analyzer", "ignore_missing": true } },
|
||||||
|
|||||||
@@ -7,8 +7,8 @@
|
|||||||
|
|
||||||
{%- set HOSTNAME = salt['grains.get']('host', '') %}
|
{%- set HOSTNAME = salt['grains.get']('host', '') %}
|
||||||
{%- set BROVER = salt['pillar.get']('static:broversion', 'COMMUNITY') %}
|
{%- set BROVER = salt['pillar.get']('static:broversion', 'COMMUNITY') %}
|
||||||
{%- set WAZUHENABLED = salt['pillar.get']('static:wazuh_enabled', '0') %}
|
{%- set WAZUHENABLED = salt['pillar.get']('static:wazuh', '0') %}
|
||||||
{%- set STRELKAENABLED = salt['pillar.get']('static:strelka_enabled', '0') %}
|
{%- set STRELKAENABLED = salt['pillar.get']('static:strelka', '0') %}
|
||||||
{%- set FLEETMASTER = salt['pillar.get']('static:fleet_master', False) -%}
|
{%- set FLEETMASTER = salt['pillar.get']('static:fleet_master', False) -%}
|
||||||
{%- set FLEETNODE = salt['pillar.get']('static:fleet_node', False) -%}
|
{%- set FLEETNODE = salt['pillar.get']('static:fleet_node', False) -%}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
|
{% set MASTER = salt['grains.get']('master') %}
|
||||||
{% set ENROLLSECRET = salt['pillar.get']('secrets:fleet_enroll-secret') %}
|
{% set ENROLLSECRET = salt['pillar.get']('secrets:fleet_enroll-secret') %}
|
||||||
|
{% set CURRENTPACKAGEVERSION = salt['pillar.get']('static:fleet_packages-version') %}
|
||||||
|
|
||||||
so/fleet:
|
so/fleet:
|
||||||
event.send:
|
event.send:
|
||||||
@@ -7,4 +9,7 @@ so/fleet:
|
|||||||
hostname: {{ grains.host }}
|
hostname: {{ grains.host }}
|
||||||
role: {{ grains.role }}
|
role: {{ grains.role }}
|
||||||
mainip: {{ grains.host }}
|
mainip: {{ grains.host }}
|
||||||
enroll-secret: {{ ENROLLSECRET }}
|
enroll-secret: {{ ENROLLSECRET }}
|
||||||
|
current-package-version: {{ CURRENTPACKAGEVERSION }}
|
||||||
|
master: {{ MASTER }}
|
||||||
|
|
||||||
@@ -41,6 +41,8 @@ sleep 120
|
|||||||
echo "Installing launcher via salt..."
|
echo "Installing launcher via salt..."
|
||||||
salt-call state.apply fleet.install_package queue=True >> /root/fleet-setup.log
|
salt-call state.apply fleet.install_package queue=True >> /root/fleet-setup.log
|
||||||
salt-call state.apply filebeat queue=True >> /root/fleet-setup.log
|
salt-call state.apply filebeat queue=True >> /root/fleet-setup.log
|
||||||
|
docker stop so-core
|
||||||
|
salt-call state.apply common queue=True >> /root/fleet-setup.log
|
||||||
|
|
||||||
echo "Fleet Setup Complete - Login here: https://{{ MAIN_HOSTNAME }}"
|
echo "Fleet Setup Complete - Login here: https://{{ MAIN_HOSTNAME }}"
|
||||||
echo "Your username is $2 and your password is $initpw"
|
echo "Your username is $2 and your password is $initpw"
|
||||||
|
|||||||
@@ -1,38 +1,27 @@
|
|||||||
{%- set MASTER = salt['pillar.get']('static:masterip', '') %}
|
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
{%- set MASTER = salt['pillar.get']('static:masterip', '') %}
|
||||||
KIBANA_VERSION="7.6.1"
|
KIBANA_VERSION="7.6.1"
|
||||||
MAX_WAIT=120
|
MAX_WAIT=120
|
||||||
|
|
||||||
# Check to see if Kibana is available
|
# Check to see if Kibana is available
|
||||||
until curl "{{ MASTER }}:5601/nonexistenturl" 2>&1 |grep -q "Not Found" ; do
|
until curl "{{ MASTER }}:5601/nonexistenturl" 2>&1 |grep -q "Not Found" ; do
|
||||||
wait_step=$(( ${wait_step} + 1 ))
|
wait_step=$(( ${wait_step} + 1 ))
|
||||||
echo "Waiting on Kibana ({{ MASTER }}:5601)...Attempt #$wait_step"
|
echo "Waiting on Kibana ({{ MASTER }}:5601)...Attempt #$wait_step"
|
||||||
if [ ${wait_step} -gt ${MAX_WAIT} ]; then
|
if [ ${wait_step} -gt ${MAX_WAIT} ]; then
|
||||||
echo "ERROR: Kibana not available for more than ${MAX_WAIT} seconds."
|
echo "ERROR: Kibana not available for more than ${MAX_WAIT} seconds."
|
||||||
exit 5
|
exit 5
|
||||||
fi
|
fi
|
||||||
sleep 1s;
|
sleep 1s;
|
||||||
done
|
done
|
||||||
|
|
||||||
# Sleep additional JIC server is not ready
|
# Sleep additional JIC server is not ready
|
||||||
sleep 30s
|
sleep 30s
|
||||||
|
|
||||||
# Load config
|
# Sub our IP for placeholders
|
||||||
#curl -X PUT "localhost:5601/api/saved_objects/config/$KIBANA_VERSION" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d'
|
cp /opt/so/conf/kibana/saved_objects.ndjson.template /opt/so/conf/kibana/saved_objects.ndjson
|
||||||
#{ "attributes":
|
|
||||||
# {
|
|
||||||
# "defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29",
|
|
||||||
# "defaultRoute":"/app/kibana#/dashboard/a8411b30-6d03-11ea-b301-3d6c35840645",
|
|
||||||
# "discover:sampleSize":"100",
|
|
||||||
# "dashboard:defaultDarkTheme":true,
|
|
||||||
# "theme:darkMode":true,
|
|
||||||
# "timepicker:timeDefaults":"{\n \"from\": \"now-24h\",\n \"to\": \"now\",\n \"mode\": \"quick\"\n}"
|
|
||||||
# }
|
|
||||||
#}'
|
|
||||||
|
|
||||||
# Sub our IP for placholders
|
|
||||||
for i in FLEETPLACEHOLDER PCAPPLACEHOLDER SOCTOPUSPLACEHOLDER PLACEHOLDER; do
|
for i in FLEETPLACEHOLDER PCAPPLACEHOLDER SOCTOPUSPLACEHOLDER PLACEHOLDER; do
|
||||||
sed "s/$i/{{ MASTER }}/g" /opt/so/conf/kibana/saved_objects.ndjson.template > /opt/so/conf/kibana/saved_objects.ndjson
|
sed -i "s/$i/{{ MASTER }}/g" /opt/so/conf/kibana/saved_objects.ndjson
|
||||||
done
|
done
|
||||||
|
|
||||||
# Load saved objects
|
# Load saved objects
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -69,9 +69,9 @@ sensoroniagentconf:
|
|||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/steno/sensoroni.json
|
- name: /opt/so/conf/steno/sensoroni.json
|
||||||
- source: salt://pcap/files/sensoroni.json
|
- source: salt://pcap/files/sensoroni.json
|
||||||
- user: root
|
- user: stenographer
|
||||||
- group: root
|
- group: stenographer
|
||||||
- mode: 644
|
- mode: 600
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
|
||||||
stenoca:
|
stenoca:
|
||||||
|
|||||||
@@ -17,6 +17,20 @@ playbookwebhook:
|
|||||||
- db: /opt/so/conf/playbook/redmine.db
|
- db: /opt/so/conf/playbook/redmine.db
|
||||||
- sql: "update webhooks set url = 'http://{{MASTERIP}}:7000/playbook/webhook' where project_id = 1"
|
- sql: "update webhooks set url = 'http://{{MASTERIP}}:7000/playbook/webhook' where project_id = 1"
|
||||||
|
|
||||||
|
playbookapiendpoints:
|
||||||
|
module.run:
|
||||||
|
- sqlite3.modify:
|
||||||
|
- db: /opt/so/conf/playbook/redmine.db
|
||||||
|
- sql: |-
|
||||||
|
update settings set value =
|
||||||
|
"--- !ruby/hash:ActiveSupport::HashWithIndifferentAccess
|
||||||
|
project: '1'
|
||||||
|
import_trackers:
|
||||||
|
- '6'
|
||||||
|
convert_url: http://{{MASTERIP}}:7000/playbook/sigmac
|
||||||
|
create_url: http://{{MASTERIP}}:7000/playbook/play"
|
||||||
|
where id = 46;
|
||||||
|
|
||||||
navigatorconfig:
|
navigatorconfig:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/playbook/navigator_config.json
|
- name: /opt/so/conf/playbook/navigator_config.json
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ def run():
|
|||||||
if MINIONID.split('_')[-1] in ['master','eval','fleet','mastersearch']:
|
if MINIONID.split('_')[-1] in ['master','eval','fleet','mastersearch']:
|
||||||
if ACTION == 'enablefleet':
|
if ACTION == 'enablefleet':
|
||||||
logging.info('so/fleet enablefleet reactor')
|
logging.info('so/fleet enablefleet reactor')
|
||||||
|
|
||||||
# Enable Fleet
|
# Enable Fleet
|
||||||
for line in fileinput.input(STATICFILE, inplace=True):
|
for line in fileinput.input(STATICFILE, inplace=True):
|
||||||
if ROLE == 'so-fleet':
|
if ROLE == 'so-fleet':
|
||||||
@@ -47,14 +47,28 @@ def run():
|
|||||||
if ACTION == 'genpackages':
|
if ACTION == 'genpackages':
|
||||||
logging.info('so/fleet genpackages reactor')
|
logging.info('so/fleet genpackages reactor')
|
||||||
|
|
||||||
|
PACKAGEVERSION = data['data']['current-package-version']
|
||||||
|
MASTER = data['data']['master']
|
||||||
|
|
||||||
|
# Increment the package version by 1
|
||||||
|
PACKAGEVERSION += 1
|
||||||
|
|
||||||
# Run Docker container that will build the packages
|
# Run Docker container that will build the packages
|
||||||
gen_packages = subprocess.run(["docker", "run","--rm", "--mount", "type=bind,source=/opt/so/saltstack/salt/fleet/packages,target=/output", \
|
gen_packages = subprocess.run(["docker", "run","--rm", "--mount", "type=bind,source=/opt/so/saltstack/salt/fleet/packages,target=/output", \
|
||||||
"--mount", "type=bind,source=/etc/ssl/certs/intca.crt,target=/var/launcher/launcher.crt", "docker.io/soshybridhunter/so-fleet-launcher:HH1.1.0", \
|
"--mount", "type=bind,source=/etc/ssl/certs/intca.crt,target=/var/launcher/launcher.crt", f"{ MASTER }:5000/soshybridhunter/so-fleet-launcher:HH1.2.1", \
|
||||||
f"{ESECRET}", f"{HOSTNAME}:8090"], stdout=subprocess.PIPE, encoding='ascii')
|
f"{ESECRET}", f"{HOSTNAME}:8090", f"{PACKAGEVERSION}.1.1"], stdout=subprocess.PIPE, encoding='ascii')
|
||||||
|
|
||||||
# Update the 'packages-built' timestamp on the webpage (stored in the static pillar)
|
# Update the 'packages-built' timestamp on the webpage (stored in the static pillar)
|
||||||
for line in fileinput.input(STATICFILE, inplace=True):
|
for line in fileinput.input(STATICFILE, inplace=True):
|
||||||
line = re.sub(r'fleet_packages-timestamp: \S*', f"fleet_packages-timestamp: {strftime('%Y-%m-%d-%H:%M', gmtime())}", line.rstrip())
|
line = re.sub(r'fleet_packages-timestamp: \S*', f"fleet_packages-timestamp: {strftime('%Y-%m-%d-%H:%M', gmtime())}", line.rstrip())
|
||||||
print(line)
|
print(line)
|
||||||
|
|
||||||
|
# Update the Fleet Osquery package version in the static pillar
|
||||||
|
for line in fileinput.input(STATICFILE, inplace=True):
|
||||||
|
line = re.sub(r'fleet_packages-version: \S*', f"fleet_packages-version: {PACKAGEVERSION}", line.rstrip())
|
||||||
|
print(line)
|
||||||
|
|
||||||
|
# Copy over newly-built packages
|
||||||
|
copy_packages = subprocess.run(["salt-call", "state.apply","fleet"], stdout=subprocess.PIPE, encoding='ascii')
|
||||||
|
|
||||||
return {}
|
return {}
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ socsync:
|
|||||||
- source: salt://soc/files/soc
|
- source: salt://soc/files/soc
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
|
- file_mode: 600
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
|
||||||
so-soc:
|
so-soc:
|
||||||
@@ -78,6 +79,7 @@ kratossync:
|
|||||||
- source: salt://soc/files/kratos
|
- source: salt://soc/files/kratos
|
||||||
- user: 928
|
- user: 928
|
||||||
- group: 928
|
- group: 928
|
||||||
|
- file_mode: 600
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
|
||||||
so-kratos:
|
so-kratos:
|
||||||
|
|||||||
@@ -720,15 +720,25 @@ get_filesystem_nsm(){
|
|||||||
|
|
||||||
get_log_size_limit() {
|
get_log_size_limit() {
|
||||||
|
|
||||||
DISK_DIR="/"
|
case $INSTALLTYPE in
|
||||||
if [ -d /nsm ]; then
|
EVAL | HEAVYNODE)
|
||||||
DISK_DIR="/nsm"
|
PERCENTAGE=50
|
||||||
fi
|
;;
|
||||||
DISK_SIZE_K=`df $DISK_DIR |grep -v "^Filesystem" | awk '{print $2}'`
|
*)
|
||||||
PERCENTAGE=85
|
PERCENTAGE=80
|
||||||
DISK_SIZE=DISK_SIZE_K*1000
|
;;
|
||||||
PERCENTAGE_DISK_SPACE=`echo $(($DISK_SIZE*$PERCENTAGE/100))`
|
esac
|
||||||
LOG_SIZE_LIMIT=$(($PERCENTAGE_DISK_SPACE/1000000000))
|
|
||||||
|
DISK_DIR="/"
|
||||||
|
|
||||||
|
if [ -d /nsm ]; then
|
||||||
|
DISK_DIR="/nsm"
|
||||||
|
fi
|
||||||
|
|
||||||
|
DISK_SIZE_K=`df $DISK_DIR |grep -v "^Filesystem" | awk '{print $2}'`
|
||||||
|
DISK_SIZE=DISK_SIZE_K*1000
|
||||||
|
PERCENTAGE_DISK_SPACE=`echo $(($DISK_SIZE*$PERCENTAGE/100))`
|
||||||
|
LOG_SIZE_LIMIT=$(($PERCENTAGE_DISK_SPACE/1000000000))
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -909,6 +919,7 @@ master_static() {
|
|||||||
echo " fleet_master: False" >> /opt/so/saltstack/pillar/static.sls
|
echo " fleet_master: False" >> /opt/so/saltstack/pillar/static.sls
|
||||||
echo " fleet_node: False" >> /opt/so/saltstack/pillar/static.sls
|
echo " fleet_node: False" >> /opt/so/saltstack/pillar/static.sls
|
||||||
echo " fleet_packages-timestamp: N/A" >> /opt/so/saltstack/pillar/static.sls
|
echo " fleet_packages-timestamp: N/A" >> /opt/so/saltstack/pillar/static.sls
|
||||||
|
echo " fleet_packages-version: 1" >> /opt/so/saltstack/pillar/static.sls
|
||||||
echo " fleet_hostname: N/A" >> /opt/so/saltstack/pillar/static.sls
|
echo " fleet_hostname: N/A" >> /opt/so/saltstack/pillar/static.sls
|
||||||
echo " fleet_ip: N/A" >> /opt/so/saltstack/pillar/static.sls
|
echo " fleet_ip: N/A" >> /opt/so/saltstack/pillar/static.sls
|
||||||
echo " sensoronikey: $SENSORONIKEY" >> /opt/so/saltstack/pillar/static.sls
|
echo " sensoronikey: $SENSORONIKEY" >> /opt/so/saltstack/pillar/static.sls
|
||||||
@@ -1298,8 +1309,17 @@ EOF
|
|||||||
echo "Using apt-key add to add SALTSTACK-GPG-KEY.pub and GPG-KEY-WAZUH"
|
echo "Using apt-key add to add SALTSTACK-GPG-KEY.pub and GPG-KEY-WAZUH"
|
||||||
apt-key add $TMP/gpg/SALTSTACK-GPG-KEY.pub
|
apt-key add $TMP/gpg/SALTSTACK-GPG-KEY.pub
|
||||||
apt-key add $TMP/gpg/GPG-KEY-WAZUH
|
apt-key add $TMP/gpg/GPG-KEY-WAZUH
|
||||||
echo "deb http://repo.saltstack.com/apt/ubuntu/$UVER/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack.list
|
|
||||||
echo "deb https://packages.wazuh.com/3.x/apt/ stable main" | tee /etc/apt/sources.list.d/wazuh.list
|
if [ $OSVER != "xenial" ]; then
|
||||||
|
# Install the repo for salt py3 edition
|
||||||
|
wget --inet4-only -O - https://repo.saltstack.com/py3/ubuntu/$UVER/amd64/3000/SALTSTACK-GPG-KEY.pub | apt-key add -
|
||||||
|
wget --inet4-only -O - https://repo.saltstack.com/py3/ubuntu/$UVER/amd64/2019.2/SALTSTACK-GPG-KEY.pub | apt-key add -
|
||||||
|
echo "deb http://repo.saltstack.com/py3/ubuntu/$UVER/amd64/latest $OSVER main" > /etc/apt/sources.list.d/saltstack.list
|
||||||
|
echo "deb http://repo.saltstack.com/py3/ubuntu/$UVER/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack2019.list
|
||||||
|
else
|
||||||
|
echo "deb http://repo.saltstack.com/apt/ubuntu/$UVER/amd64/2019.2 $OSVER main" > /etc/apt/sources.list.d/saltstack.list
|
||||||
|
fi
|
||||||
|
echo "deb https://packages.wazuh.com/3.x/apt/ stable main" | tee /etc/apt/sources.list.d/wazuh.list
|
||||||
# Initialize the new repos
|
# Initialize the new repos
|
||||||
apt-get update >> $SETUPLOG 2>&1
|
apt-get update >> $SETUPLOG 2>&1
|
||||||
if [ $OSVER != "xenial" ]; then
|
if [ $OSVER != "xenial" ]; then
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Allow execution of SO tools during setup
|
# Allow execution of SO tools during setup
|
||||||
export PATH=$PATH:../salt/common/tools/sbin
|
export PATH=$PATH:"$SCRIPTDIR/../salt/common/tools/sbin"
|
||||||
|
|
||||||
# Global Variables
|
# Global Variables
|
||||||
HOSTNAME=$(cat /etc/hostname)
|
HOSTNAME=$(cat /etc/hostname)
|
||||||
@@ -471,7 +471,6 @@ if (whiptail_you_sure) ; then
|
|||||||
[[ $SKIP_BOND != 'yes' ]] && whiptail_bond_nics
|
[[ $SKIP_BOND != 'yes' ]] && whiptail_bond_nics
|
||||||
whiptail_management_server
|
whiptail_management_server
|
||||||
whiptail_master_updates
|
whiptail_master_updates
|
||||||
set_updates
|
|
||||||
whiptail_homenet_sensor
|
whiptail_homenet_sensor
|
||||||
whiptail_sensor_config
|
whiptail_sensor_config
|
||||||
# Calculate lbprocs so we can call it in the prompts
|
# Calculate lbprocs so we can call it in the prompts
|
||||||
@@ -485,6 +484,7 @@ if (whiptail_you_sure) ; then
|
|||||||
whiptail_basic_suri
|
whiptail_basic_suri
|
||||||
fi
|
fi
|
||||||
whiptail_make_changes
|
whiptail_make_changes
|
||||||
|
set_updates
|
||||||
set_hostname
|
set_hostname
|
||||||
clear_master
|
clear_master
|
||||||
mkdir -p /nsm
|
mkdir -p /nsm
|
||||||
@@ -759,7 +759,6 @@ if (whiptail_you_sure) ; then
|
|||||||
if [ $INSTALLTYPE == 'SEARCHNODE' ] || [ $INSTALLTYPE == 'PARSINGNODE' ] || [ $INSTALLTYPE == 'HOTNODE' ] || [ $INSTALLTYPE == 'WARMNODE' ]; then
|
if [ $INSTALLTYPE == 'SEARCHNODE' ] || [ $INSTALLTYPE == 'PARSINGNODE' ] || [ $INSTALLTYPE == 'HOTNODE' ] || [ $INSTALLTYPE == 'WARMNODE' ]; then
|
||||||
whiptail_management_server
|
whiptail_management_server
|
||||||
whiptail_master_updates
|
whiptail_master_updates
|
||||||
set_updates
|
|
||||||
get_log_size_limit
|
get_log_size_limit
|
||||||
CURCLOSEDAYS=30
|
CURCLOSEDAYS=30
|
||||||
es_heapsize
|
es_heapsize
|
||||||
@@ -783,6 +782,7 @@ if (whiptail_you_sure) ; then
|
|||||||
LSINPUTBATCHCOUNT=125
|
LSINPUTBATCHCOUNT=125
|
||||||
fi
|
fi
|
||||||
whiptail_make_changes
|
whiptail_make_changes
|
||||||
|
set_updates
|
||||||
set_hostname
|
set_hostname
|
||||||
clear_master
|
clear_master
|
||||||
mkdir -p /nsm
|
mkdir -p /nsm
|
||||||
@@ -852,7 +852,6 @@ if (whiptail_you_sure) ; then
|
|||||||
[[ $SKIP_BOND != 'yes' ]] && whiptail_bond_nics
|
[[ $SKIP_BOND != 'yes' ]] && whiptail_bond_nics
|
||||||
whiptail_management_server
|
whiptail_management_server
|
||||||
whiptail_master_updates
|
whiptail_master_updates
|
||||||
set_updates
|
|
||||||
whiptail_homenet_sensor
|
whiptail_homenet_sensor
|
||||||
whiptail_sensor_config
|
whiptail_sensor_config
|
||||||
# Calculate lbprocs so we can call it in the prompts
|
# Calculate lbprocs so we can call it in the prompts
|
||||||
@@ -889,6 +888,7 @@ if (whiptail_you_sure) ; then
|
|||||||
LSINPUTBATCHCOUNT=125
|
LSINPUTBATCHCOUNT=125
|
||||||
fi
|
fi
|
||||||
whiptail_make_changes
|
whiptail_make_changes
|
||||||
|
set_updates
|
||||||
set_hostname
|
set_hostname
|
||||||
clear_master
|
clear_master
|
||||||
mkdir -p /nsm
|
mkdir -p /nsm
|
||||||
|
|||||||
@@ -309,7 +309,7 @@ whiptail_log_size_limit() {
|
|||||||
|
|
||||||
LOG_SIZE_LIMIT=$(whiptail --title "Security Onion Setup" --inputbox \
|
LOG_SIZE_LIMIT=$(whiptail --title "Security Onion Setup" --inputbox \
|
||||||
"Please specify the amount of disk space (in GB) you would like to allocate for Elasticsearch data storage. \
|
"Please specify the amount of disk space (in GB) you would like to allocate for Elasticsearch data storage. \
|
||||||
By default, this is set to 85% of the disk space allotted for /nsm." 10 75 $LOG_SIZE_LIMIT 3>&1 1>&2 2>&3)
|
By default, this is set to 80% of the disk space allotted for /nsm." 10 75 $LOG_SIZE_LIMIT 3>&1 1>&2 2>&3)
|
||||||
|
|
||||||
local exitstatus=$?
|
local exitstatus=$?
|
||||||
whiptail_check_exitstatus $exitstatus
|
whiptail_check_exitstatus $exitstatus
|
||||||
|
|||||||
Reference in New Issue
Block a user