This commit is contained in:
Mike Reeves
2021-06-30 11:38:15 -04:00
20 changed files with 14106 additions and 9063 deletions

View File

@@ -277,7 +277,7 @@ lookup_role() {
require_manager() {
if is_manager_node; then
echo "This is a manager, We can proceed."
echo "This is a manager, so we can proceed."
else
echo "Please run this command on the manager; the manager controls the grid."
exit 1

View File

@@ -1,6 +1,3 @@
{%- set mainint = salt['pillar.get']('host:mainint') %}
{%- set MYIP = salt['grains.get']('ip_interfaces:' ~ mainint)[0] %}
#!/bin/bash
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
#
@@ -17,6 +14,9 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{%- set mainint = salt['pillar.get']('host:mainint') %}
{%- set MYIP = salt['grains.get']('ip_interfaces:' ~ mainint)[0] %}
default_conf_dir=/opt/so/conf
ELASTICSEARCH_HOST="{{ MYIP }}"
ELASTICSEARCH_PORT=9200

View File

@@ -132,6 +132,8 @@ for PCAP in "$@"; do
PCAP_FIXED=`mktemp /tmp/so-import-pcap-XXXXXXXXXX.pcap`
echo "- attempting to recover corrupted PCAP file"
pcapfix "${PCAP}" "${PCAP_FIXED}"
# Make fixed file world readable since the Suricata docker container will runas a non-root user
chmod a+r "${PCAP_FIXED}"
PCAP="${PCAP_FIXED}"
TEMP_PCAPS+=(${PCAP_FIXED})
fi

View File

@@ -15,6 +15,8 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. /usr/sbin/so-common
wdurregex="^[0-9]+w$"
ddurregex="^[0-9]+d$"

View File

@@ -15,19 +15,21 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. /usr/sbin/so-common
echo -e "\nThis script is used to reduce the size of InfluxDB by downsampling old data into the so_long_term retention policy."
echo -e "\nInfluxDB will now be migrated. This could take a few hours depending on how large the database is and hardware resources available."
echo -e "\nInfluxDB will now be downsampled. This could take a few hours depending on how large the database is and hardware resources available."
read -r -p "Are you sure you want to continue? [y/N] " yorn
if [[ "$yorn" =~ ^([yY][eE][sS]|[yY])$ ]]; then
echo -e "\nMigrating InfluxDb started at `date`. This may take several hours depending on how much data needs to be moved."
echo -e "\nMigrating InfluxDb started at `date`. This may take several hours depending on how much data needs to be downsampled."
day=0
startdate=`date`
while docker exec -t so-influxdb /bin/bash -c "influx -ssl -unsafeSsl -database telegraf -execute \"SELECT mean(*) INTO \"so_long_term\".:MEASUREMENT FROM \"autogen\"./.*/ WHERE \"time\" >= '2020-07-21T00:00:00.0000000Z' + ${day}d AND \"time\" <= '2020-07-21T00:00:00.0000000Z' + $((day+1))d GROUP BY time(5m),*\""; do
# why 2020-07-21?
migrationdate=`date -d "2020-07-21 + ${day} days" +"%y-%m-%d"`
echo "Migration of $migrationdate started at $startdate and completed at `date`."
echo "Downsampling of $migrationdate started at $startdate and completed at `date`."
newdaytomigrate=$(date -d "$migrationdate + 1 days" +"%s")
today=$(date +"%s")
@@ -36,11 +38,11 @@ if [[ "$yorn" =~ ^([yY][eE][sS]|[yY])$ ]]; then
else
((day=day+1))
startdate=`date`
echo -e "\nMigrating the next day's worth of data."
echo -e "\nDownsampling the next day's worth of data."
fi
done
echo -e "\nInfluxDb data migration complete."
echo -e "\nInfluxDb data downsampling complete."
else
echo -e "\nExiting as requested."

View File

@@ -0,0 +1,34 @@
#!/bin/bash
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
. /usr/sbin/so-common
echo -e "\nThis script is used to reduce the size of InfluxDB by dropping the autogen retention policy."
echo "If you want to retain historical data prior to 2.3.60, then this should only be run after you have downsampled your data using so-influxdb-downsample."
echo -e "\nThe autogen retention policy will now be dropped from InfluxDB."
read -r -p "Are you sure you want to continue? [y/N] " yorn
if [[ "$yorn" =~ ^([yY][eE][sS]|[yY])$ ]]; then
echo -e "\nDropping autogen retention policy."
if docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -execute "drop retention policy autogen on telegraf"; then
echo -e "\nAutogen retention policy dropped from InfluxDb."
else
echo -e "\nSomething went wrong dropping then autogen retention policy from InfluxDB. Please verify that the so-influxdb Docker container is running, and check the log at /opt/so/log/influxdb/influxdb.log for any details."
fi
else
echo -e "\nExiting as requested."
fi

View File

@@ -768,7 +768,7 @@ main() {
echo ""
require_manager
set_minionid
echo "Checking to see if this is an airgap install"
echo "Checking to see if this is an airgap install."
echo ""
check_airgap
echo "Found that Security Onion $INSTALLEDVERSION is currently installed."

View File

@@ -540,6 +540,146 @@
"zeek":{
"type":"object",
"dynamic": true
},
"aws":{
"type":"object",
"dynamic": true
},
"azure":{
"type":"object",
"dynamic": true
},
"barracuda":{
"type":"object",
"dynamic": true
},
"bluecoat":{
"type":"object",
"dynamic": true
},
"cef":{
"type":"object",
"dynamic": true
},
"checkpoint":{
"type":"object",
"dynamic": true
},
"cisco":{
"type":"object",
"dynamic": true
},
"cyberark":{
"type":"object",
"dynamic": true
},
"cylance":{
"type":"object",
"dynamic": true
},
"f5":{
"type":"object",
"dynamic": true
},
"fortinet":{
"type":"object",
"dynamic": true
},
"gcp":{
"type":"object",
"dynamic": true
},
"google_workspace":{
"type":"object",
"dynamic": true
},
"imperva":{
"type":"object",
"dynamic": true
},
"infoblox":{
"type":"object",
"dynamic": true
},
"juniper":{
"type":"object",
"dynamic": true
},
"microsoft":{
"type":"object",
"dynamic": true
},
"misp":{
"type":"object",
"dynamic": true
},
"netflow":{
"type":"object",
"dynamic": true
},
"netscout":{
"type":"object",
"dynamic": true
},
"o365":{
"type":"object",
"dynamic": true
},
"okta":{
"type":"object",
"dynamic": true
},
"proofpoint":{
"type":"object",
"dynamic": true
},
"radware":{
"type":"object",
"dynamic": true
},
"snort":{
"type":"object",
"dynamic": true
},
"snyk":{
"type":"object",
"dynamic": true
},
"sonicwall":{
"type":"object",
"dynamic": true
},
"sophos":{
"type":"object",
"dynamic": true
},
"squid":{
"type":"object",
"dynamic": true
},
"tomcat":{
"type":"object",
"dynamic": true
},
"zcaler":{
"type":"object",
"dynamic": true
},
"elasticsearch":{
"type":"object",
"dynamic": true
},
"kibana":{
"type":"object",
"dynamic": true
},
"logstash":{
"type":"object",
"dynamic": true
},
"redis":{
"type":"object",
"dynamic": true
}
}
}

View File

@@ -129,7 +129,8 @@ so-filebeat:
{% for module in THIRDPARTY.modules.keys() %}
{% for submodule in THIRDPARTY.modules[module] %}
{% if THIRDPARTY.modules[module][submodule].enabled and THIRDPARTY.modules[module][submodule]["var.syslog_port"] is defined %}
- {{ THIRDPARTY.modules[module][submodule].get("var.syslog_host", "0.0.0.0") }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}/{{ THIRDPARTY.modules[module][submodule]["var.input"] }}
- {{ THIRDPARTY.modules[module][submodule].get("var.syslog_host", "0.0.0.0") }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}/tcp
- {{ THIRDPARTY.modules[module][submodule].get("var.syslog_host", "0.0.0.0") }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}:{{ THIRDPARTY.modules[module][submodule]["var.syslog_port"] }}/udp
{% endif %}
{% endfor %}
{% endfor %}

View File

@@ -42,39 +42,32 @@ third_party_filebeat:
cef:
log:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9003
checkpoint:
firewall:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9505
cisco:
asa:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9001
ftd:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9003
ios:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9002
nexus:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9506
meraki:
enabled: false
var.input: udp
var.syslog_host: 0.0.0.0
var.syslog_port: 9525
umbrella:

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,5 @@
{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log') %}
influxdb:
retention_policies:
so_short_term:
@@ -10,4 +12,10 @@ influxdb:
shard_duration: 7d
downsample:
so_long_term:
resolution: 5m
resolution: 5m
{% if measurements|length > 0 %}
measurements:
{% for measurement in measurements.splitlines() %}
- {{ measurement }}
{% endfor %}
{% endif %}

View File

@@ -113,11 +113,12 @@ telegraf_database:
{% endfor %}
{% for dest_rp in influxdb.downsample.keys() %}
so_downsample_cq:
{% for measurement in influxdb.downsample[dest_rp].get('measurements', []) %}
so_downsample_{{measurement}}_cq:
influxdb_continuous_query.present:
- name: so_downsample_cq
- name: so_downsample_{{measurement}}_cq
- database: telegraf
- query: SELECT mean(*) INTO "{{dest_rp}}".:MEASUREMENT FROM /.*/ GROUP BY time({{influxdb.downsample[dest_rp].resolution}}),*
- query: SELECT mean(*) INTO "{{dest_rp}}"."{{measurement}}" FROM "{{measurement}}" GROUP BY time({{influxdb.downsample[dest_rp].resolution}}),*
- ssl: True
- verify_ssl: /etc/pki/ca.crt
- cert: ['/etc/pki/influxdb.crt', '/etc/pki/influxdb.key']
@@ -126,7 +127,7 @@ so_downsample_cq:
- docker_container: so-influxdb
- influxdb_database: telegraf_database
- file: influxdb_continuous_query.present_patch
- sls: salt.python3-influxdb
{% endfor %}
{% endfor %}
{% endif %}

View File

@@ -220,7 +220,7 @@ whiptail_create_web_user() {
[ -n "$TESTING" ] && return
WEBUSER=$(whiptail --title "$whiptail_title" --inputbox \
"Please enter an email address to create an administrator account for the web interface.\n\nThis will also be used for TheHive, Cortex, and Fleet." 12 60 "$1" 3>&1 1>&2 2>&3)
"Please enter an email address to create an administrator account for the web interface.\n\nThis will also be used for Elasticsearch, Kibana, TheHive, Cortex, and Fleet." 12 60 "$1" 3>&1 1>&2 2>&3)
local exitstatus=$?
whiptail_check_exitstatus $exitstatus