Merge remote-tracking branch 'remotes/origin/2.4/dev' into 2.4/elasticfleet-ag

This commit is contained in:
Josh Brower
2022-11-15 12:13:05 -05:00
13 changed files with 147 additions and 37 deletions

View File

@@ -33,15 +33,15 @@ socore:
soconfperms:
file.directory:
- name: /opt/so/conf
- uid: 939
- gid: 939
- user: 939
- group: 939
- dir_mode: 770
sostatusconf:
file.directory:
- name: /opt/so/conf/so-status
- uid: 939
- gid: 939
- user: 939
- group: 939
- dir_mode: 770
so-status.conf:
@@ -52,8 +52,8 @@ so-status.conf:
sosaltstackperms:
file.directory:
- name: /opt/so/saltstack
- uid: 939
- gid: 939
- user: 939
- group: 939
- dir_mode: 770
so_log_perms:

View File

@@ -18,6 +18,7 @@ commonpkgs:
- createrepo
- python3-lxml
- python3-packaging
- python3-watchdog
- yum-utils
- device-mapper-persistent-data
- lvm2
@@ -50,6 +51,7 @@ commonpkgs:
- python36-mysql
- python36-packaging
- python36-lxml
- securityonion-python36-watchdog
- yum-utils
- device-mapper-persistent-data
- lvm2

View File

@@ -53,7 +53,7 @@ def analyze(conf, input):
def main():
dir = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact')
parser = argparse.ArgumentParser(description='Search EmailRep for a given artifact')
parser.add_argument('artifact', help='the artifact represented in JSON format')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/emailrep.yaml", help='optional config file to use instead of the default config file')

View File

@@ -5,7 +5,7 @@ Search Pulsedive for a domain, hash, IP, URI, URL, or User Agent.
## Configuration Requirements
``api_key`` - API key used for communication with the Virustotal API
``api_key`` - API key used for communication with the Pulsedive API
This value should be set in the ``sensoroni`` pillar, like so:

View File

@@ -91,7 +91,7 @@ def analyze(conf, input):
def main():
dir = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser(description='Search VirusTotal for a given artifact')
parser = argparse.ArgumentParser(description='Search Pulsedive for a given artifact')
parser.add_argument('artifact', help='the artifact represented in JSON format')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/pulsedive.yaml", help='optional config file to use instead of the default config file')

View File

@@ -5,7 +5,7 @@ Submit a URL to Urlscan for analysis.
## Configuration Requirements
``api_key`` - API key used for communication with the Virustotal API
``api_key`` - API key used for communication with the urlscan API
``enabled`` - Determines whether or not the analyzer is enabled. Defaults to ``False``
``visibility`` - Determines whether or not scan results are visibile publicly. Defaults to ``public``
``timeout`` - Time to wait for scan results. Defaults to ``180``s

View File

@@ -77,7 +77,7 @@ def analyze(conf, input):
def main():
dir = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser(description='Search Alienvault OTX for a given artifact')
parser = argparse.ArgumentParser(description='Search urlscan for a given artifact')
parser.add_argument('artifact', help='the artifact represented in JSON format')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/urlscan.yaml", help='optional config file to use instead of the default config file')

View File

@@ -0,0 +1,75 @@
#!/usr/bin/env python3
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
# https://securityonion.net/license; you may not use this file except in compliance with the
# Elastic License 2.0.
import os
import time
import hashlib
import logging
import yaml
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
with open("/opt/so/conf/strelka/filecheck.yaml", "r") as ymlfile:
cfg = yaml.load(ymlfile)
extract_path = cfg["filecheck"]["extract_path"]
historypath = cfg["filecheck"]["historypath"]
strelkapath = cfg["filecheck"]["strelkapath"]
logfile = cfg["filecheck"]["logfile"]
logging.basicConfig(filename=logfile, filemode='w', format='%(asctime)s - %(message)s', datefmt='%d-%b-%y %H:%M:%S', level=logging.INFO)
def checkexisting():
for file in os.listdir(extract_path):
filename = os.path.join(extract_path, file)
logging.info("Processing existing file " + filename)
checksum(filename)
def checksum(filename):
with open(filename, 'rb') as afile:
shawnuff = hashlib.sha1()
buf = afile.read(8192)
while len(buf) > 0:
shawnuff.update(buf)
buf = afile.read(8192)
hizash=shawnuff.hexdigest()
process(filename, hizash)
def process(filename, hizash):
if os.path.exists(historypath + hizash):
logging.info(filename + " Already exists.. removing")
os.remove(filename)
else:
# Write the file
logging.info(filename + " is new. Creating a record and sending to Strelka")
with open(os.path.join(historypath + hizash), 'w') as fp:
pass
head, tail = os.path.split(filename)
# Move the file
os.rename(filename, strelkapath + tail)
class CreatedEventHandler(FileSystemEventHandler):
def on_created(self, event):
filename = event.src_path
logging.info("Found new file")
checksum(filename)
if __name__ == "__main__":
checkexisting()
event_handler =CreatedEventHandler()
observer = Observer()
observer.schedule(event_handler, extract_path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()

View File

@@ -0,0 +1,10 @@
{%- set ENGINE = salt['pillar.get']('global:mdengine', '') %}
filecheck:
{%- if ENGINE == "SURICATA" %}
extract_path: '/nsm/suricata/extracted'
{%- else %}
extract_path: '/nsm/zeek/extracted/complete'
{%- endif %}
historypath: '/nsm/strelka/history/'
strelkapath: '/nsm/strelka/unprocessed/'
logfile: '/opt/so/log/strelka/filecheck.log'

View File

@@ -105,6 +105,49 @@ strelkaportavailable:
cmd.run:
- name: netstat -utanp | grep ":57314" | grep -qvE 'docker|TIME_WAIT' && PROCESS=$(netstat -utanp | grep ":57314" | uniq) && echo "Another process ($PROCESS) appears to be using port 57314. Please terminate this process, or reboot to ensure a clean state so that Strelka can start properly." && exit 1 || exit 0
# Filecheck Section
filecheck_logdir:
file.directory:
- name: /opt/so/log/strelka
- user: 939
- group: 939
- makedirs: True
filecheck_history:
file.directory:
- name: /nsm/strelka/history
- user: 939
- group: 939
- makedirs: True
filecheck_conf:
file.managed:
- name: /opt/so/conf/strelka/filecheck.yaml
- source: salt://strelka/filecheck/filecheck.yaml
- template: jinja
filecheck_script:
file.managed:
- name: /opt/so/conf/strelka/filecheck
- source: salt://strelka/filecheck/filecheck
- user: 939
- group: 939
- mode: 755
filecheck_run:
cmd.run:
- name: 'python3 /opt/so/conf/strelka/filecheck'
- bg: True
- runas: socore
- unless: ps -ef | grep filecheck | grep -v grep
filcheck_history_clean:
cron.present:
- name: '/usr/bin/find /nsm/strelka/history/ -type f -mtime +2 -exec rm {} + > /dev/null 2>&1>'
- minute: '33'
# End Filecheck Section
strelka_coordinator:
docker_container.running:
- image: {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-redis:{{ GLOBALS.so_version }}
@@ -190,30 +233,6 @@ append_so-strelka-filestream_so-status.conf:
- name: /opt/so/conf/so-status/so-status.conf
- text: so-strelka-filestream
strelka_zeek_extracted_sync_old:
cron.absent:
- user: root
- name: '[ -d /nsm/zeek/extracted/complete/ ] && mv /nsm/zeek/extracted/complete/* /nsm/strelka/ > /dev/null 2>&1'
- minute: '*'
{% if GLOBALS.md_engine == "SURICATA" %}
strelka_suricata_extracted_sync:
cron.present:
- user: root
- identifier: zeek-extracted-strelka-sync
- name: '[ -d /nsm/suricata/extracted/ ] && find /nsm/suricata/extracted/* -not \( -path /nsm/suricata/extracted/tmp -prune \) -type f -print0 | xargs -0 -I {} mv {} /nsm/strelka/unprocessed/ > /dev/null 2>&1'
- minute: '*'
{% else %}
strelka_zeek_extracted_sync:
cron.present:
- user: root
- identifier: zeek-extracted-strelka-sync
- name: '[ -d /nsm/zeek/extracted/complete/ ] && mv /nsm/zeek/extracted/complete/* /nsm/strelka/unprocessed/ > /dev/null 2>&1'
- minute: '*'
{% endif %}
{% else %}
{{sls}}_state_not_allowed:

View File

@@ -57,6 +57,7 @@ suridatadir:
- name: /nsm/suricata/extracted
- user: 940
- group: 939
- mode: 770
- makedirs: True
surirulesync:

View File

@@ -56,12 +56,15 @@ zeekextractdir:
- name: /nsm/zeek/extracted
- user: 937
- group: 939
- mode: 770
- makedirs: True
zeekextractcompletedir:
file.directory:
- name: /nsm/zeek/extracted/complete
- user: 937
- group: 939
- mode: 770
- makedirs: True
# Sync the policies

View File

@@ -45,7 +45,7 @@ event file_state_remove(f: fa_file)
# Delete the file if it didn't pass our requirements check.
local nuke = fmt("rm %s/%s", FileExtract::prefix, f$info$extracted);
when ( local nukeit = Exec::run([$cmd=nuke]) )
when [nuke] ( local nukeit = Exec::run([$cmd=nuke]) )
{
}
return;
@@ -56,7 +56,7 @@ event file_state_remove(f: fa_file)
local dest = fmt("%scomplete/%s-%s-%s.%s", FileExtract::prefix, f$source, f$id, f$info$md5, extension);
# Copy it to the $prefix/complete folder then delete it. I got some weird results with moving when it came to watchdog in python.
local cmd = fmt("cp %s/%s %s && rm %s/%s", FileExtract::prefix, orig, dest, FileExtract::prefix, orig);
when ( local result = Exec::run([$cmd=cmd]) )
when [cmd] ( local result = Exec::run([$cmd=cmd]) )
{
}
f$info$extracted = dest;