mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-07 17:52:46 +01:00
add files
This commit is contained in:
79
salt/elastalert/files/elastalert_config.yaml
Normal file
79
salt/elastalert/files/elastalert_config.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
{% set esip = salt['pillar.get']('master:mainip', '') %}
|
||||
{% set esport = salt['pillar.get']('master:es_port', '') %}
|
||||
# This is the folder that contains the rule yaml files
|
||||
# Any .yaml file will be loaded as a rule
|
||||
rules_folder: /etc/elastalert/rules/
|
||||
|
||||
# Sets whether or not ElastAlert should recursively descend
|
||||
# the rules directory - true or false
|
||||
scan_subdirectories: true
|
||||
|
||||
# How often ElastAlert will query Elasticsearch
|
||||
# The unit can be anything from weeks to seconds
|
||||
run_every:
|
||||
minutes: 1
|
||||
|
||||
# ElastAlert will buffer results from the most recent
|
||||
# period of time, in case some log sources are not in real time
|
||||
buffer_time:
|
||||
minutes: 1
|
||||
|
||||
# The maximum time between queries for ElastAlert to start at the most recently
|
||||
# run query. When ElastAlert starts, for each rule, it will search elastalert_metadata
|
||||
# for the most recently run query and start from that time, unless it is older than
|
||||
# old_query_limit, in which case it will start from the present time. The default is one week.
|
||||
old_query_limit:
|
||||
minutes: 5
|
||||
|
||||
# The Elasticsearch hostname for metadata writeback
|
||||
# Note that every rule can have its own Elasticsearch host
|
||||
es_host: {{ esip }}
|
||||
|
||||
# The Elasticsearch port
|
||||
es_port: {{ esport }}
|
||||
|
||||
# Sets timeout for connecting to and reading from es_host
|
||||
es_conn_timeout: 60
|
||||
|
||||
# The maximum number of documents that will be downloaded from Elasticsearch in
|
||||
# a single query. The default is 10,000, and if you expect to get near this number,
|
||||
# consider using use_count_query for the rule. If this limit is reached, ElastAlert
|
||||
# will scroll through pages the size of max_query_size until processing all results.
|
||||
max_query_size: 5000
|
||||
|
||||
# The AWS region to use. Set this when using AWS-managed elasticsearch
|
||||
#aws_region: us-east-1
|
||||
|
||||
# The AWS profile to use. Use this if you are using an aws-cli profile.
|
||||
# See http://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html
|
||||
# for details
|
||||
#profile: test
|
||||
|
||||
# Optional URL prefix for Elasticsearch
|
||||
#es_url_prefix: elasticsearch
|
||||
|
||||
# Connect with TLS to Elasticsearch
|
||||
#use_ssl: True
|
||||
|
||||
# Verify TLS certificates
|
||||
#verify_certs: True
|
||||
|
||||
# GET request with body is the default option for Elasticsearch.
|
||||
# If it fails for some reason, you can pass 'GET', 'POST' or 'source'.
|
||||
# See http://elasticsearch-py.readthedocs.io/en/master/connection.html?highlight=send_get_body_as#transport
|
||||
# for details
|
||||
#es_send_get_body_as: GET
|
||||
|
||||
# Option basic-auth username and password for Elasticsearch
|
||||
#es_username: someusername
|
||||
#es_password: somepassword
|
||||
|
||||
# The index on es_host which is used for metadata storage
|
||||
# This can be a unmapped index, but it is recommended that you run
|
||||
# elastalert-create-index to set a mapping
|
||||
writeback_index: elastalert_status
|
||||
|
||||
# If an alert fails for some reason, ElastAlert will retry
|
||||
# sending the alert until this time period has elapsed
|
||||
alert_time_limit:
|
||||
days: 2
|
||||
108
salt/elastalert/files/modules/so/hivealerter.py
Normal file
108
salt/elastalert/files/modules/so/hivealerter.py
Normal file
@@ -0,0 +1,108 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from __future__ import unicode_literals
|
||||
import uuid
|
||||
|
||||
from elastalert.alerts import Alerter
|
||||
from thehive4py.api import TheHiveApi
|
||||
from thehive4py.models import Alert, AlertArtifact, CustomFieldHelper
|
||||
|
||||
|
||||
class HiveAlerter(Alerter):
|
||||
"""
|
||||
Use matched data to create alerts containing observables in an instance of TheHive
|
||||
"""
|
||||
|
||||
required_options = set(['hive_connection', 'hive_alert_config'])
|
||||
|
||||
def get_aggregation_summary_text(self, matches):
|
||||
text = super(HiveAlerter, self).get_aggregation_summary_text(matches)
|
||||
if text:
|
||||
text = u'```\n{0}```\n'.format(text)
|
||||
return text
|
||||
|
||||
def create_artifacts(self, match):
|
||||
artifacts = []
|
||||
context = {'rule': self.rule, 'match': match}
|
||||
for mapping in self.rule.get('hive_observable_data_mapping', []):
|
||||
for observable_type, match_data_key in mapping.iteritems():
|
||||
try:
|
||||
artifacts.append(AlertArtifact(dataType=observable_type, data=match_data_key.format(**context)))
|
||||
except KeyError as e:
|
||||
print('format string {} fail cause no key {} in {}'.format(e, match_data_key, context))
|
||||
return artifacts
|
||||
|
||||
def create_alert_config(self, match):
|
||||
context = {'rule': self.rule, 'match': match}
|
||||
alert_config = {
|
||||
'artifacts': self.create_artifacts(match),
|
||||
'sourceRef': str(uuid.uuid4())[0:6],
|
||||
'title': '{rule[name]}'.format(**context)
|
||||
}
|
||||
|
||||
alert_config.update(self.rule.get('hive_alert_config', {}))
|
||||
|
||||
for alert_config_field, alert_config_value in alert_config.iteritems():
|
||||
if alert_config_field == 'customFields':
|
||||
custom_fields = CustomFieldHelper()
|
||||
for cf_key, cf_value in alert_config_value.iteritems():
|
||||
try:
|
||||
func = getattr(custom_fields, 'add_{}'.format(cf_value['type']))
|
||||
except AttributeError:
|
||||
raise Exception('unsupported custom field type {}'.format(cf_value['type']))
|
||||
value = cf_value['value'].format(**context)
|
||||
func(cf_key, value)
|
||||
alert_config[alert_config_field] = custom_fields.build()
|
||||
elif isinstance(alert_config_value, basestring):
|
||||
alert_config[alert_config_field] = alert_config_value.format(**context)
|
||||
elif isinstance(alert_config_value, (list, tuple)):
|
||||
formatted_list = []
|
||||
for element in alert_config_value:
|
||||
try:
|
||||
formatted_list.append(element.format(**context))
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
formatted_list.append(element.format(**context))
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
formatted_list.append(element)
|
||||
alert_config[alert_config_field] = formatted_list
|
||||
|
||||
return alert_config
|
||||
|
||||
def send_to_thehive(self, alert_config):
|
||||
connection_details = self.rule['hive_connection']
|
||||
api = TheHiveApi(
|
||||
connection_details.get('hive_host', ''),
|
||||
connection_details.get('hive_apikey', ''),
|
||||
proxies=connection_details.get('hive_proxies', {'http': '', 'https': ''}),
|
||||
cert=connection_details.get('hive_verify', False))
|
||||
|
||||
alert = Alert(**alert_config)
|
||||
response = api.create_alert(alert)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise Exception('alert not successfully created in TheHive\n{}'.format(response.text))
|
||||
|
||||
def alert(self, matches):
|
||||
if self.rule.get('hive_alert_config_type', 'custom') != 'classic':
|
||||
for match in matches:
|
||||
alert_config = self.create_alert_config(match)
|
||||
self.send_to_thehive(alert_config)
|
||||
else:
|
||||
alert_config = self.create_alert_config(matches[0])
|
||||
artifacts = []
|
||||
for match in matches:
|
||||
artifacts += self.create_artifacts(match)
|
||||
if 'related_events' in match:
|
||||
for related_event in match['related_events']:
|
||||
artifacts += self.create_artifacts(related_event)
|
||||
|
||||
alert_config['artifacts'] = artifacts
|
||||
alert_config['title'] = self.create_title(matches)
|
||||
alert_config['description'] = self.create_alert_body(matches)
|
||||
self.send_to_thehive(alert_config)
|
||||
|
||||
def get_info(self):
|
||||
|
||||
return {
|
||||
'type': 'hivealerter',
|
||||
'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '')
|
||||
}
|
||||
Reference in New Issue
Block a user