mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-07 17:52:46 +01:00
pushing everything at once
This commit is contained in:
58
salt/sensoroni/files/analyzers/elasticsearch/README.md
Normal file
58
salt/sensoroni/files/analyzers/elasticsearch/README.md
Normal file
@@ -0,0 +1,58 @@
|
||||
# Elasticsearch
|
||||
Elasticsearch returns an informational breakdown of the queried observable.
|
||||
|
||||
## Overview
|
||||
Elasticsearch facilitates queries within the user's database. User can use these observable type: hash, domain, file, filename, fqdn, gimphash, IP, mail, mail_subject, regexp, registry, telfhash, tlsh, uri_path, URL, and user-agent values.
|
||||
|
||||
## Description
|
||||
Configure and submit the field you want to search for in your database. Ex: domain, hash, IP, or URL
|
||||
|
||||
## Requirement
|
||||
An API key or User Credentials is necessary for utilizing Elasticsearch.
|
||||
|
||||
## Configuration Requirements
|
||||
|
||||
In SOC, navigate to `Administration`, toggle `Show all configurable settings, including advanced settings.`, and navigate to `sensoroni` -> `analyzers` -> `elasticsearch`.
|
||||
|
||||

|
||||
|
||||
|
||||
The following configuration options are available for:
|
||||
|
||||
``api_key`` - API key used for communication with the Elasticsearch API (Optional if auth_user and auth_pwd are used)
|
||||
|
||||
``auth_user`` - Username used for communication with Elasticsearch
|
||||
|
||||
``auth_pwd`` - Password used for communication with Elasticsearch
|
||||
|
||||
``base_url`` - URL that connect to Elasticsearch VM on port 9200. Example format :"https://<your IP address>:9200
|
||||
|
||||
``index`` - The index of the data in Elasticsearch database. Default value is _all.
|
||||
|
||||
``num_results`` - The max number of results will be displayed. Default value is 10.
|
||||
|
||||
``time_delta_minutes`` - Range of time the users want the data in minutes. The value is in minutes and will be converted to days. Defaults value is is 1440.
|
||||
|
||||
``timestamp_field_name`` - The name of your timestamp field name. Default value is @timestamp.
|
||||
|
||||
``map`` - This is the dictionary of the field name in the user's Elasticsearch database. Example value {"hash":"userhashfieldname"}. This value will map the Security Onion hash field name to user hash field name.
|
||||
|
||||
``cert_path`` - This is the path to the certificate in the host for authentication purpose (Required)
|
||||
|
||||
This value should be set in the ``sensoroni`` pillar, like so:
|
||||
|
||||
```
|
||||
sensoroni:
|
||||
analyzers:
|
||||
elasticsearch:
|
||||
base_url:$yourbase_url
|
||||
api_key: $yourapi_key
|
||||
numResults:$yournum_results
|
||||
auth_user:$yourauth_user
|
||||
auth_pwd:$yourauth_pwd
|
||||
index:$yourindex
|
||||
timeDeltaMinutes:$yourtime_delta_minutes
|
||||
timestampFieldName:$yourtimestamp_field_name
|
||||
cert_path:$yourcert_path
|
||||
map:$yourmap
|
||||
```
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "Elastic Search",
|
||||
"version": "0.1",
|
||||
"author": "Security Onion Solutions",
|
||||
"description": "Queries an ElasticSearch instance for specified field values.",
|
||||
"supportedTypes": ["hash", "ip", "domain"]
|
||||
}
|
||||
|
||||
|
||||
138
salt/sensoroni/files/analyzers/elasticsearch/elasticsearch.py
Normal file
138
salt/sensoroni/files/analyzers/elasticsearch/elasticsearch.py
Normal file
@@ -0,0 +1,138 @@
|
||||
from datetime import datetime, timedelta
|
||||
import argparse
|
||||
import requests
|
||||
import helpers
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
# As it stands, this analyzer does not support querying for mixed-case fields without disregarding case completely.
|
||||
# So the current version will only support querying for all-lowercase alphanumerical values.
|
||||
|
||||
# default usage is:
|
||||
# python3 elasticsearch.py '{"artifactType":"hash", "value":"*"}'
|
||||
|
||||
# To use outside of a Security Onion box, pass in '-c test.yaml' at the end
|
||||
# of the above command to give this analyzer some test values. You may edit the
|
||||
# values in the test.yaml file freely.
|
||||
|
||||
|
||||
def checkConfigRequirements(conf):
|
||||
# if the user hasn't given valid configurables, quit.
|
||||
if not conf['num_results']:
|
||||
sys.exit(126)
|
||||
if not conf['time_delta_minutes']:
|
||||
sys.exit(126)
|
||||
if (not conf['auth_user'] or not conf['auth_pwd']) and not conf['api_key']:
|
||||
sys.exit(126)
|
||||
if not conf['index']:
|
||||
sys.exit(126)
|
||||
if not conf['base_url']:
|
||||
sys.exit(126)
|
||||
if not conf['timestamp_field_name']:
|
||||
sys.exit(126)
|
||||
if not conf['cert_path']:
|
||||
sys.exit(126)
|
||||
return True
|
||||
|
||||
|
||||
def buildReq(conf, input):
|
||||
# structure a query to send to the Elasticsearch machine
|
||||
# based off of user configurable values
|
||||
num_results = conf['num_results']
|
||||
|
||||
if conf['map'] != None:
|
||||
mappings = conf['map']
|
||||
else:
|
||||
mappings = dict()
|
||||
|
||||
cur_time = datetime.now()
|
||||
start_time = cur_time - timedelta(minutes=int(conf['time_delta_minutes']))
|
||||
|
||||
if input['artifactType'] in mappings:
|
||||
type = mappings[input['artifactType']]
|
||||
else:
|
||||
type = input['artifactType']
|
||||
|
||||
query = {
|
||||
"from": 0,
|
||||
"size": num_results,
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [{
|
||||
"wildcard": {
|
||||
type: input['value'],
|
||||
},
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"range": {
|
||||
conf['timestamp_field_name']: {
|
||||
"gte": start_time.strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
"lte": cur_time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return json.dumps(query)
|
||||
|
||||
|
||||
def sendReq(conf, query):
|
||||
# send configured query with even more user specification
|
||||
headers = {}
|
||||
url = conf['base_url'] + conf['index'] + '/_search'
|
||||
uname = conf['auth_user']
|
||||
pwd = conf['auth_pwd']
|
||||
apikey = conf['api_key']
|
||||
cert_path = conf['cert_path']
|
||||
|
||||
if pwd and uname:
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
response = requests.post(str(url), auth=(
|
||||
uname, pwd), verify=cert_path, data=query, headers=headers)
|
||||
elif apikey:
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': f"Apikey {apikey}"
|
||||
}
|
||||
response = requests.post(
|
||||
str(url), verify=cert_path, data=query, headers=headers)
|
||||
|
||||
return response.json()
|
||||
|
||||
|
||||
def prepareResults(raw):
|
||||
# returns raw API response, amount of hits found, and status of request in order
|
||||
summary = f"Documents returned: {len(raw['hits']['hits'])}"
|
||||
status = 'info'
|
||||
return {'response': raw, 'summary': summary, 'status': status}
|
||||
|
||||
|
||||
def analyze(conf, input):
|
||||
checkConfigRequirements(conf)
|
||||
data = json.loads(input)
|
||||
query = buildReq(conf, data)
|
||||
response = sendReq(conf, query)
|
||||
return prepareResults(response)
|
||||
|
||||
|
||||
def main():
|
||||
dir = os.path.dirname(os.path.realpath(__file__))
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Search Elastic Search for a given artifact?')
|
||||
parser.add_argument('artifact', help='required artifact')
|
||||
parser.add_argument('-c', '--config', metavar='CONFIG_FILE', default=dir + '/elasticsearch.yaml',
|
||||
help='optional config file to use instead of the default config file')
|
||||
args = parser.parse_args()
|
||||
if args.artifact:
|
||||
results = analyze(helpers.loadConfig(args.config), args.artifact)
|
||||
print(json.dumps(results))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,10 @@
|
||||
base_url: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:base_url', '') }}"
|
||||
auth_user: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:auth_user', '') }}"
|
||||
auth_pwd: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:auth_pwd', '') }}"
|
||||
num_results: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:num_results', 10) }}"
|
||||
api_key: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:api_key', '') }}"
|
||||
index: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:index', '_all') }}"
|
||||
time_delta_minutes: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:time_delta_minutes', 14400) }}"
|
||||
timestamp_field_name: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:timestamp_field_name', '@timestamp') }}"
|
||||
map: {{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:map', '') }}
|
||||
cert_path: "{{ salt['pillar.get']('sensoroni:analyzers:elasticsearch:cert_path', '') }}"
|
||||
@@ -0,0 +1,194 @@
|
||||
from io import StringIO
|
||||
import sys
|
||||
from unittest.mock import patch, MagicMock
|
||||
import unittest
|
||||
import elasticsearch
|
||||
import helpers
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
class TestElasticSearchMethods(unittest.TestCase):
|
||||
'''Test that the analyzer main method work as expect when not given enough input'''
|
||||
def test_main_missing_input(self):
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stderr', new=StringIO()) as mock_stderr:
|
||||
sys.argv = ["cmd"]
|
||||
elasticsearch.main()
|
||||
self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n")
|
||||
sysmock.assert_called_once_with(2)
|
||||
|
||||
'''Test that analyzer main method work as expect when all required input is given'''
|
||||
def test_main_success(self):
|
||||
conf = {"base_url":"test", "auth_user":"test", "auth_pwd":"test", "num_results":10,"api_key":"test","index":"test","time_delta_minutes": 14400,"timestamp_field_name":"test", "map":{}, "cert_path":""}
|
||||
with patch('elasticsearch.helpers.loadConfig', new=MagicMock(return_value=conf))as mock_yaml:
|
||||
with patch('sys.stdout', new=StringIO()) as mock_cmd:
|
||||
with patch('elasticsearch.analyze', new=MagicMock(return_value={'foo': 'bar'})) as mock:
|
||||
sys.argv = ["cmd", "conf"]
|
||||
elasticsearch.main()
|
||||
expected = '{"foo": "bar"}\n'
|
||||
self.assertEqual(mock_cmd.getvalue(), expected)
|
||||
mock.assert_called_once()
|
||||
mock_yaml.assert_called_once()
|
||||
|
||||
'''Test that checks for empty and none values in configurables'''
|
||||
def test_checkConfigRequirements(self):
|
||||
conf = {"base_url":"", "auth_user":"", "auth_pwd":"", "num_results":None,"api_key":"","index":"","time_delta_minutes": None,"timestamp_field_name":"", "map":{}, "cert_path":""}
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
elasticsearch.checkConfigRequirements(conf)
|
||||
self.assertEqual(cm.exception.code, 126)
|
||||
|
||||
|
||||
'''Test that checks buildReq method, by comparing a mock buildReq result with an expectedQuery, used a mock object to simulate an expectedQuery
|
||||
since Elasticsearch buildReq uses values in the config'''
|
||||
def test_buildReq(self):
|
||||
numberOfResults = 1
|
||||
observableType = "hash"
|
||||
expectedQuery = {
|
||||
"from": 0,
|
||||
"size": numberOfResults,
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [{
|
||||
"wildcard": {
|
||||
observableType: observableType,
|
||||
},
|
||||
}
|
||||
],
|
||||
"filter": {
|
||||
"range": {
|
||||
"@timestamp": {
|
||||
"gte": ('2023-11-29T14:23:45'),
|
||||
"lte": ('2023-11-29T14:23:45')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
with patch('elasticsearch.buildReq', new=MagicMock(return_value=expectedQuery)) as mock:
|
||||
response = elasticsearch.buildReq(observableType,numberOfResults)
|
||||
self.assertEqual(json.dumps(response), json.dumps(expectedQuery))
|
||||
mock.assert_called_once()
|
||||
|
||||
def test_wrongbuildReq(self):
|
||||
result={'map':'123','artifactType':'hash','timestamp_field_name':'abc', 'time_delta_minutes':14400, 'num_results':10,'value':'0' }
|
||||
cur_time = datetime.now()
|
||||
start_time = cur_time - timedelta(minutes=result['time_delta_minutes'])
|
||||
query=elasticsearch.buildReq(result, result)
|
||||
comparequery=json.dumps({
|
||||
"from": 0,
|
||||
"size":10,
|
||||
"query": {
|
||||
"bool":{
|
||||
"must": [{
|
||||
"wildcard": {
|
||||
'hash': result['value'],
|
||||
},
|
||||
}
|
||||
],
|
||||
"filter":{
|
||||
"range":{
|
||||
result['timestamp_field_name']:{
|
||||
"gte": start_time.strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
"lte": cur_time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
self.assertEqual(query, comparequery )
|
||||
|
||||
def test_rightbuildReq(self):
|
||||
result={'map':{'hash':'testingHash'},'artifactType':'hash','timestamp_field_name':'abc', 'time_delta_minutes':14400, 'num_results':10,'value':'0'}
|
||||
cur_time = datetime.now()
|
||||
start_time = cur_time - timedelta(minutes=result['time_delta_minutes'])
|
||||
query=elasticsearch.buildReq(result, result)
|
||||
comparequery=json.dumps({
|
||||
"from": 0,
|
||||
"size": 10,
|
||||
"query": {
|
||||
"bool":{
|
||||
"must":[{
|
||||
"wildcard": {
|
||||
result['map'][result['artifactType']]: result['value'],
|
||||
},
|
||||
}
|
||||
]
|
||||
,
|
||||
"filter":{
|
||||
"range":{
|
||||
result['timestamp_field_name']:{
|
||||
"gte": start_time.strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
"lte": cur_time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
self.assertEqual(query, comparequery )
|
||||
|
||||
def test_rightbuildReq100result(self):
|
||||
result={'map':{'hash':'testingHash'},'artifactType':'hash','timestamp_field_name':'abc', 'time_delta_minutes':14400, 'num_results':100,'value':'0'}
|
||||
cur_time = datetime.now()
|
||||
start_time = cur_time - timedelta(minutes=result['time_delta_minutes'])
|
||||
query=elasticsearch.buildReq(result, result)
|
||||
comparequery=json.dumps({
|
||||
"from": 0,
|
||||
"size": 100,
|
||||
"query": {
|
||||
"bool":{
|
||||
"must":[{
|
||||
"wildcard": {
|
||||
result['map'][result['artifactType']]: result['value'],
|
||||
},
|
||||
}
|
||||
]
|
||||
,
|
||||
"filter":{
|
||||
"range":{
|
||||
result['timestamp_field_name']:{
|
||||
"gte": start_time.strftime('%Y-%m-%dT%H:%M:%S'),
|
||||
"lte": cur_time.strftime('%Y-%m-%dT%H:%M:%S')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
self.assertEqual(query, comparequery )
|
||||
|
||||
|
||||
'''Test that checks sendReq method to expect a response from a requests.post'''
|
||||
def test_sendReq(self):
|
||||
conf = {"base_url":"test", "auth_user":"test", "auth_pwd":"test", "api_key":"test","index":"test", "cert_path":""}
|
||||
with patch('requests.post', new=MagicMock(return_value=MagicMock())) as mock:
|
||||
response = elasticsearch.sendReq(conf, 'example_query')
|
||||
self.assertIsNotNone(response)
|
||||
|
||||
'''Test that checks prepareResults method, by comparing a mock prepareResults return_value with an expectedResult'''
|
||||
def test_prepareResults(self):
|
||||
summary = "Documents returned: 5"
|
||||
status = 'info'
|
||||
raw = {'_id': "0", "hash": "123"}
|
||||
expectedResult = {'response': raw, 'summary': summary, 'status': status}
|
||||
|
||||
with patch('elasticsearch.prepareResults', new=MagicMock(return_value=expectedResult)) as mock:
|
||||
response = elasticsearch.prepareResults(raw)
|
||||
self.assertEqual(expectedResult, response)
|
||||
mock.assert_called_once()
|
||||
|
||||
'''Test that checks analyze method, simulated sendReq and prepareResults with 2 mock objects and variables sendReqOutput and prepareResultOutput,
|
||||
input created for analyze method call and then we compared results['summary'] with 'Documents returned: 5' '''
|
||||
def test_analyze(self):
|
||||
sendReqOutput = {'_id': "0", "hash": "123"}
|
||||
input = '{"artifactType":"hash", "value":"123"}'
|
||||
prepareResultOutput = {'response': {'_id': "0", "hash": "123"},'summary': "Documents returned: 5", 'status': 'info'}
|
||||
conf = {"base_url":"test", "auth_user":"test", "auth_pwd":"test", "num_results":10,"api_key":"test","index":"test","time_delta_minutes": 14400,"timestamp_field_name":"test", "map":{}, "cert_path":"test"}
|
||||
with patch('elasticsearch.sendReq', new=MagicMock(return_value=sendReqOutput)) as mock:
|
||||
with patch('elasticsearch.prepareResults', new=MagicMock(return_value=prepareResultOutput)) as mock2:
|
||||
results = elasticsearch.analyze(conf, input)
|
||||
self.assertEqual(results["summary"], "Documents returned: 5")
|
||||
mock.assert_called_once()
|
||||
@@ -0,0 +1,3 @@
|
||||
requests>=2.31.0
|
||||
pyyaml>=6.0
|
||||
urllib3>=2.1.0
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user