mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2026-01-06 16:23:09 +01:00
Merge branch 'dev' into kilo
This commit is contained in:
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
salt-call state.highstate
|
||||
salt-call state.highstate -linfo
|
||||
|
||||
67
salt/common/tools/sbin/so-filebeat-module-setup
Executable file
67
salt/common/tools/sbin/so-filebeat-module-setup
Executable file
@@ -0,0 +1,67 @@
|
||||
{%- set mainint = salt['pillar.get']('host:mainint') %}
|
||||
{%- set MYIP = salt['grains.get']('ip_interfaces:' ~ mainint)[0] %}
|
||||
|
||||
#!/bin/bash
|
||||
# Copyright 2014,2015,2016,2017,2018,2019 Security Onion Solutions, LLC
|
||||
#
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
default_conf_dir=/opt/so/conf
|
||||
ELASTICSEARCH_HOST="{{ MYIP }}"
|
||||
ELASTICSEARCH_PORT=9200
|
||||
#ELASTICSEARCH_AUTH=""
|
||||
|
||||
# Define a default directory to load pipelines from
|
||||
FB_MODULE_YML="/usr/share/filebeat/module-setup.yml"
|
||||
|
||||
|
||||
# Wait for ElasticSearch to initialize
|
||||
echo -n "Waiting for ElasticSearch..."
|
||||
COUNT=0
|
||||
ELASTICSEARCH_CONNECTED="no"
|
||||
while [[ "$COUNT" -le 240 ]]; do
|
||||
curl -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
||||
if [ $? -eq 0 ]; then
|
||||
ELASTICSEARCH_CONNECTED="yes"
|
||||
echo "connected!"
|
||||
break
|
||||
else
|
||||
((COUNT+=1))
|
||||
sleep 1
|
||||
echo -n "."
|
||||
fi
|
||||
done
|
||||
if [ "$ELASTICSEARCH_CONNECTED" == "no" ]; then
|
||||
echo
|
||||
echo -e "Connection attempt timed out. Unable to connect to ElasticSearch. \nPlease try: \n -checking log(s) in /var/log/elasticsearch/\n -running 'sudo docker ps' \n -running 'sudo so-elastic-restart'"
|
||||
echo
|
||||
fi
|
||||
echo "Testing to see if the pipelines are already applied"
|
||||
ESVER=$(curl -sk https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT" |jq .version.number |tr -d \")
|
||||
PIPELINES=$(curl -sk https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"/_ingest/pipeline/filebeat-$ESVER-suricata-eve-pipeline | jq . | wc -c)
|
||||
|
||||
if [[ "$PIPELINES" -lt 5 ]]; then
|
||||
echo "Setting up ingest pipeline(s)"
|
||||
|
||||
for MODULE in activemq apache auditd aws azure barracuda bluecoat cef checkpoint cisco coredns crowdstrike cyberark cylance elasticsearch envoyproxy f5 fortinet gcp google_workspace googlecloud gsuite haproxy ibmmq icinga iis imperva infoblox iptables juniper kafka kibana logstash microsoft misp mongodb mssql mysql nats netscout nginx o365 okta osquery panw postgresql rabbitmq radware redis santa snort snyk sonicwall sophos squid suricata system tomcat traefik zeek zscaler
|
||||
do
|
||||
echo "Loading $MODULE"
|
||||
docker exec -i so-filebeat filebeat setup modules -pipelines -modules $MODULE -c $FB_MODULE_YML
|
||||
sleep 2
|
||||
done
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
|
||||
|
||||
@@ -14,7 +14,6 @@ whiptail_manager_adv_service_zeeklogs() {
|
||||
"conn" "Connection Logging" ON \
|
||||
"dce_rpc" "RPC Logs" ON \
|
||||
"dhcp" "DHCP Logs" ON \
|
||||
"dhcpv6" "DHCP IPv6 Logs" ON \
|
||||
"dnp3" "DNP3 Logs" ON \
|
||||
"dns" "DNS Logs" ON \
|
||||
"dpd" "DPD Logs" ON \
|
||||
@@ -25,25 +24,20 @@ whiptail_manager_adv_service_zeeklogs() {
|
||||
"irc" "IRC Chat Logs" ON \
|
||||
"kerberos" "Kerberos Logs" ON \
|
||||
"modbus" "MODBUS Logs" ON \
|
||||
"mqtt" "MQTT Logs" ON \
|
||||
"notice" "Zeek Notice Logs" ON \
|
||||
"ntlm" "NTLM Logs" ON \
|
||||
"openvpn" "OPENVPN Logs" ON \
|
||||
"pe" "PE Logs" ON \
|
||||
"radius" "Radius Logs" ON \
|
||||
"rfb" "RFB Logs" ON \
|
||||
"rdp" "RDP Logs" ON \
|
||||
"signatures" "Signatures Logs" ON \
|
||||
"sip" "SIP Logs" ON \
|
||||
"smb_files" "SMB Files Logs" ON \
|
||||
"smb_mapping" "SMB Mapping Logs" ON \
|
||||
"smtp" "SMTP Logs" ON \
|
||||
"snmp" "SNMP Logs" ON \
|
||||
"software" "Software Logs" ON \
|
||||
"ssh" "SSH Logs" ON \
|
||||
"ssl" "SSL Logs" ON \
|
||||
"syslog" "Syslog Logs" ON \
|
||||
"telnet" "Telnet Logs" ON \
|
||||
"tunnel" "Tunnel Logs" ON \
|
||||
"weird" "Zeek Weird Logs" ON \
|
||||
"mysql" "MySQL Logs" ON \
|
||||
|
||||
@@ -319,14 +319,6 @@ masterunlock() {
|
||||
fi
|
||||
}
|
||||
|
||||
preupgrade_changes_2.3.50_repo() {
|
||||
# We made repo changes in 2.3.50 and this prepares for that on upgrade
|
||||
echo "Checking to see if 2.3.50 repo changes are needed."
|
||||
|
||||
[[ "$INSTALLEDVERSION" == 2.3.30 || "$INSTALLEDVERSION" == 2.3.40 ]] && up_2.3.3X_to_2.3.50_repo
|
||||
true
|
||||
}
|
||||
|
||||
preupgrade_changes() {
|
||||
# This function is to add any new pillar items if needed.
|
||||
echo "Checking to see if changes are needed."
|
||||
@@ -517,8 +509,8 @@ up_2.3.2X_to_2.3.30() {
|
||||
INSTALLEDVERSION=2.3.30
|
||||
}
|
||||
|
||||
up_2.3.3X_to_2.3.50_repo() {
|
||||
echo "Performing 2.3.50 repo actions."
|
||||
upgrade_to_2.3.50_repo() {
|
||||
echo "Performing repo changes."
|
||||
if [[ "$OS" == "centos" ]]; then
|
||||
# Import GPG Keys
|
||||
gpg_rpm_import
|
||||
@@ -787,6 +779,12 @@ main() {
|
||||
verify_latest_update_script
|
||||
echo ""
|
||||
|
||||
echo "Stopping Salt Master service."
|
||||
systemctl stop salt-master
|
||||
echo ""
|
||||
|
||||
upgrade_to_2.3.50_repo
|
||||
|
||||
echo "Generating new repo archive"
|
||||
generate_and_clean_tarballs
|
||||
if [ -f /usr/sbin/so-image-common ]; then
|
||||
|
||||
Reference in New Issue
Block a user