From a39edad3f680a210dc3a4b5092d5f006d7897e91 Mon Sep 17 00:00:00 2001 From: m0duspwnens Date: Tue, 21 Jan 2020 16:39:42 -0500 Subject: [PATCH] changes for multipipelines / mastersearch node - https://github.com/Security-Onion-Solutions/securityonion-saltstack/issues/124 --- pillar/logstash/mastersearch.sls | 6 + pillar/top.sls | 6 +- salt/common/nginx/nginx.conf.so-mastersearch | 262 ++++++++++++++++ salt/elastalert/init.sls | 2 +- salt/elasticsearch/init.sls | 3 +- salt/firewall/init.sls | 4 +- .../conf/conf.enabled.txt.so-mastersearch | 18 ++ salt/logstash/conf/pipelines/6000_bro.conf | 228 ++++++++++++++ .../conf/pipelines/6001_bro_import.conf | 16 + .../8000_postprocess_bro_cleanup.conf | 17 ++ .../conf/pipelines/8006_postprocess_dns.conf | 47 +++ .../pipelines/helix}/0010_input_hhbeats.conf | 0 .../helix/1033_preprocess_snort.conf | 181 +++++++++++ .../helix/1100_preprocess_bro_conn.conf | 77 +++++ .../helix/1101_preprocess_bro_dhcp.conf | 56 ++++ .../helix/1102_preprocess_bro_dns.conf | 74 +++++ .../helix/1103_preprocess_bro_dpd.conf | 42 +++ .../helix/1104_preprocess_bro_files.conf | 64 ++++ .../helix/1105_preprocess_bro_ftp.conf | 56 ++++ .../helix/1106_preprocess_bro_http.conf | 77 +++++ .../helix/1107_preprocess_bro_irc.conf | 46 +++ .../helix/1108_preprocess_bro_kerberos.conf | 56 ++++ .../helix/1109_preprocess_bro_notice.conf | 56 ++++ .../helix/1110_preprocess_bro_rdp.conf | 52 ++++ .../helix/1111_preprocess_bro_signatures.conf | 43 +++ .../helix/1112_preprocess_bro_smtp.conf | 65 ++++ .../helix/1113_preprocess_bro_snmp.conf | 47 +++ .../helix/1114_preprocess_bro_software.conf | 49 +++ .../helix/1115_preprocess_bro_ssh.conf | 66 ++++ .../helix/1116_preprocess_bro_ssl.conf | 186 ++++++++++++ .../helix/1117_preprocess_bro_syslog.conf | 41 +++ .../helix/1118_preprocess_bro_tunnel.conf | 40 +++ .../helix/1119_preprocess_bro_weird.conf | 42 +++ .../helix/1121_preprocess_bro_mysql.conf | 57 ++++ .../helix/1122_preprocess_bro_socks.conf | 62 ++++ .../helix/1123_preprocess_bro_x509.conf | 154 ++++++++++ .../helix/1124_preprocess_bro_intel.conf | 46 +++ .../helix/1125_preprocess_bro_modbus.conf | 49 +++ .../helix/1126_preprocess_bro_sip.conf | 66 ++++ .../helix/1127_preprocess_bro_radius.conf | 73 +++++ .../helix/1128_preprocess_bro_pe.conf | 46 +++ .../helix/1129_preprocess_bro_rfb.conf | 65 ++++ .../helix/1130_preprocess_bro_dnp3.conf | 51 ++++ .../helix/1131_preprocess_bro_smb_files.conf | 46 +++ .../1132_preprocess_bro_smb_mapping.conf | 40 +++ .../helix/1133_preprocess_bro_ntlm.conf | 50 ++++ .../helix/1134_preprocess_bro_dce_rpc.conf | 54 ++++ ...01_postprocess_common_ip_augmentation.conf | 58 ++++ .../pipelines/helix/9997_output_helix.conf | 142 +++++++++ .../pipelines/master/0010_input_hhbeats.conf | 40 +++ .../master/templates}/9999_output_redis.conf | 0 .../search/1000_preprocess_log_elapsed.conf | 13 + .../search/1001_preprocess_syslogng.conf | 33 ++ .../search/1002_preprocess_json.conf | 18 ++ .../search/1004_preprocess_syslog_types.conf | 19 ++ .../search/1026_preprocess_dhcp.conf | 140 +++++++++ .../search/1029_preprocess_esxi.conf | 31 ++ .../search/1030_preprocess_greensql.conf | 21 ++ .../pipelines/search/1031_preprocess_iis.conf | 21 ++ .../search/1032_preprocess_mcafee.conf | 26 ++ .../search/1033_preprocess_snort.conf | 181 +++++++++++ .../search/1034_preprocess_syslog.conf | 16 + .../pipelines/search/2000_network_flow.conf | 59 ++++ .../conf/pipelines/search/6002_syslog.conf | 11 + .../pipelines/search/6101_switch_brocade.conf | 33 ++ .../search/6200_firewall_fortinet.conf | 281 ++++++++++++++++++ .../search/6201_firewall_pfsense.conf | 56 ++++ .../conf/pipelines/search/6300_windows.conf | 161 ++++++++++ .../pipelines/search/6301_dns_windows.conf | 49 +++ .../conf/pipelines/search/6400_suricata.conf | 92 ++++++ .../conf/pipelines/search/6500_ossec.conf | 160 ++++++++++ .../pipelines/search/6501_ossec_sysmon.conf | 118 ++++++++ .../pipelines/search/6502_ossec_autoruns.conf | 43 +++ .../search/6600_winlogbeat_sysmon.conf | 23 ++ .../pipelines/search/6700_winlogbeat.conf | 17 ++ .../pipelines/search/7100_osquery_wel.conf | 23 ++ ...01_postprocess_common_ip_augmentation.conf | 58 ++++ .../search/8007_postprocess_http.conf | 27 ++ .../search/8200_postprocess_tagging.conf | 63 ++++ .../search/8998_postprocess_log_elapsed.conf | 19 ++ .../search/8999_postprocess_rename_type.conf | 8 + .../search/templates}/0900_input_redis.conf | 0 .../search/templates/9000_output_bro.conf | 31 ++ .../search/templates/9001_output_switch.conf | 27 ++ .../search/templates/9002_output_import.conf | 27 ++ .../search/templates/9004_output_flow.conf | 27 ++ .../search/templates/9026_output_dhcp.conf | 26 ++ .../search/templates/9029_output_esxi.conf | 25 ++ .../templates/9030_output_greensql.conf | 25 ++ .../search/templates/9031_output_iis.conf | 26 ++ .../search/templates/9032_output_mcafee.conf | 26 ++ .../search/templates/9033_output_snort.conf | 29 ++ .../search/templates/9034_output_syslog.conf | 28 ++ .../search/templates/9100_output_osquery.conf | 19 ++ .../templates/9200_output_firewall.conf | 29 ++ .../search/templates/9300_output_windows.conf | 27 ++ .../templates/9301_output_dns_windows.conf | 27 ++ .../templates/9400_output_suricata.conf | 27 ++ .../search/templates/9500_output_beats.conf | 25 ++ .../search/templates/9600_output_ossec.conf | 29 ++ salt/logstash/defaults.yml | 6 + salt/logstash/etc/logstash.yml | 4 + salt/logstash/etc/pipelines.yml.jinja | 4 + salt/logstash/init.sls | 62 +++- salt/master/init.sls | 6 +- salt/ssl/init.sls | 4 +- salt/top.sls | 41 +++ salt/utility/bin/crossthestreams.sh | 6 +- salt/utility/init.sls | 4 +- salt/wazuh/files/agent/ossec.conf | 4 +- salt/wazuh/files/agent/wazuh-register-agent | 12 +- setup/functions.sh | 37 ++- setup/so-setup.sh | 71 +++-- 113 files changed, 5613 insertions(+), 72 deletions(-) create mode 100644 pillar/logstash/mastersearch.sls create mode 100644 salt/common/nginx/nginx.conf.so-mastersearch create mode 100644 salt/logstash/conf/conf.enabled.txt.so-mastersearch create mode 100644 salt/logstash/conf/pipelines/6000_bro.conf create mode 100644 salt/logstash/conf/pipelines/6001_bro_import.conf create mode 100644 salt/logstash/conf/pipelines/8000_postprocess_bro_cleanup.conf create mode 100644 salt/logstash/conf/pipelines/8006_postprocess_dns.conf rename salt/logstash/{files/dynamic => conf/pipelines/helix}/0010_input_hhbeats.conf (100%) create mode 100644 salt/logstash/conf/pipelines/helix/1033_preprocess_snort.conf create mode 100644 salt/logstash/conf/pipelines/helix/1100_preprocess_bro_conn.conf create mode 100644 salt/logstash/conf/pipelines/helix/1101_preprocess_bro_dhcp.conf create mode 100644 salt/logstash/conf/pipelines/helix/1102_preprocess_bro_dns.conf create mode 100644 salt/logstash/conf/pipelines/helix/1103_preprocess_bro_dpd.conf create mode 100644 salt/logstash/conf/pipelines/helix/1104_preprocess_bro_files.conf create mode 100644 salt/logstash/conf/pipelines/helix/1105_preprocess_bro_ftp.conf create mode 100644 salt/logstash/conf/pipelines/helix/1106_preprocess_bro_http.conf create mode 100644 salt/logstash/conf/pipelines/helix/1107_preprocess_bro_irc.conf create mode 100644 salt/logstash/conf/pipelines/helix/1108_preprocess_bro_kerberos.conf create mode 100644 salt/logstash/conf/pipelines/helix/1109_preprocess_bro_notice.conf create mode 100644 salt/logstash/conf/pipelines/helix/1110_preprocess_bro_rdp.conf create mode 100644 salt/logstash/conf/pipelines/helix/1111_preprocess_bro_signatures.conf create mode 100644 salt/logstash/conf/pipelines/helix/1112_preprocess_bro_smtp.conf create mode 100644 salt/logstash/conf/pipelines/helix/1113_preprocess_bro_snmp.conf create mode 100644 salt/logstash/conf/pipelines/helix/1114_preprocess_bro_software.conf create mode 100644 salt/logstash/conf/pipelines/helix/1115_preprocess_bro_ssh.conf create mode 100644 salt/logstash/conf/pipelines/helix/1116_preprocess_bro_ssl.conf create mode 100644 salt/logstash/conf/pipelines/helix/1117_preprocess_bro_syslog.conf create mode 100644 salt/logstash/conf/pipelines/helix/1118_preprocess_bro_tunnel.conf create mode 100644 salt/logstash/conf/pipelines/helix/1119_preprocess_bro_weird.conf create mode 100644 salt/logstash/conf/pipelines/helix/1121_preprocess_bro_mysql.conf create mode 100644 salt/logstash/conf/pipelines/helix/1122_preprocess_bro_socks.conf create mode 100644 salt/logstash/conf/pipelines/helix/1123_preprocess_bro_x509.conf create mode 100644 salt/logstash/conf/pipelines/helix/1124_preprocess_bro_intel.conf create mode 100644 salt/logstash/conf/pipelines/helix/1125_preprocess_bro_modbus.conf create mode 100644 salt/logstash/conf/pipelines/helix/1126_preprocess_bro_sip.conf create mode 100644 salt/logstash/conf/pipelines/helix/1127_preprocess_bro_radius.conf create mode 100644 salt/logstash/conf/pipelines/helix/1128_preprocess_bro_pe.conf create mode 100644 salt/logstash/conf/pipelines/helix/1129_preprocess_bro_rfb.conf create mode 100644 salt/logstash/conf/pipelines/helix/1130_preprocess_bro_dnp3.conf create mode 100644 salt/logstash/conf/pipelines/helix/1131_preprocess_bro_smb_files.conf create mode 100644 salt/logstash/conf/pipelines/helix/1132_preprocess_bro_smb_mapping.conf create mode 100644 salt/logstash/conf/pipelines/helix/1133_preprocess_bro_ntlm.conf create mode 100644 salt/logstash/conf/pipelines/helix/1134_preprocess_bro_dce_rpc.conf create mode 100644 salt/logstash/conf/pipelines/helix/8001_postprocess_common_ip_augmentation.conf create mode 100644 salt/logstash/conf/pipelines/helix/9997_output_helix.conf create mode 100644 salt/logstash/conf/pipelines/master/0010_input_hhbeats.conf rename salt/logstash/{files/dynamic => conf/pipelines/master/templates}/9999_output_redis.conf (100%) create mode 100644 salt/logstash/conf/pipelines/search/1000_preprocess_log_elapsed.conf create mode 100644 salt/logstash/conf/pipelines/search/1001_preprocess_syslogng.conf create mode 100644 salt/logstash/conf/pipelines/search/1002_preprocess_json.conf create mode 100644 salt/logstash/conf/pipelines/search/1004_preprocess_syslog_types.conf create mode 100644 salt/logstash/conf/pipelines/search/1026_preprocess_dhcp.conf create mode 100644 salt/logstash/conf/pipelines/search/1029_preprocess_esxi.conf create mode 100644 salt/logstash/conf/pipelines/search/1030_preprocess_greensql.conf create mode 100644 salt/logstash/conf/pipelines/search/1031_preprocess_iis.conf create mode 100644 salt/logstash/conf/pipelines/search/1032_preprocess_mcafee.conf create mode 100644 salt/logstash/conf/pipelines/search/1033_preprocess_snort.conf create mode 100644 salt/logstash/conf/pipelines/search/1034_preprocess_syslog.conf create mode 100644 salt/logstash/conf/pipelines/search/2000_network_flow.conf create mode 100644 salt/logstash/conf/pipelines/search/6002_syslog.conf create mode 100644 salt/logstash/conf/pipelines/search/6101_switch_brocade.conf create mode 100644 salt/logstash/conf/pipelines/search/6200_firewall_fortinet.conf create mode 100644 salt/logstash/conf/pipelines/search/6201_firewall_pfsense.conf create mode 100644 salt/logstash/conf/pipelines/search/6300_windows.conf create mode 100644 salt/logstash/conf/pipelines/search/6301_dns_windows.conf create mode 100644 salt/logstash/conf/pipelines/search/6400_suricata.conf create mode 100644 salt/logstash/conf/pipelines/search/6500_ossec.conf create mode 100644 salt/logstash/conf/pipelines/search/6501_ossec_sysmon.conf create mode 100644 salt/logstash/conf/pipelines/search/6502_ossec_autoruns.conf create mode 100644 salt/logstash/conf/pipelines/search/6600_winlogbeat_sysmon.conf create mode 100644 salt/logstash/conf/pipelines/search/6700_winlogbeat.conf create mode 100644 salt/logstash/conf/pipelines/search/7100_osquery_wel.conf create mode 100644 salt/logstash/conf/pipelines/search/8001_postprocess_common_ip_augmentation.conf create mode 100644 salt/logstash/conf/pipelines/search/8007_postprocess_http.conf create mode 100644 salt/logstash/conf/pipelines/search/8200_postprocess_tagging.conf create mode 100644 salt/logstash/conf/pipelines/search/8998_postprocess_log_elapsed.conf create mode 100644 salt/logstash/conf/pipelines/search/8999_postprocess_rename_type.conf rename salt/logstash/{files/dynamic => conf/pipelines/search/templates}/0900_input_redis.conf (100%) create mode 100644 salt/logstash/conf/pipelines/search/templates/9000_output_bro.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9001_output_switch.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9002_output_import.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9004_output_flow.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9026_output_dhcp.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9029_output_esxi.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9030_output_greensql.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9031_output_iis.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9032_output_mcafee.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9033_output_snort.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9034_output_syslog.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9100_output_osquery.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9200_output_firewall.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9300_output_windows.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9301_output_dns_windows.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9400_output_suricata.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9500_output_beats.conf create mode 100644 salt/logstash/conf/pipelines/search/templates/9600_output_ossec.conf create mode 100644 salt/logstash/defaults.yml create mode 100644 salt/logstash/etc/pipelines.yml.jinja diff --git a/pillar/logstash/mastersearch.sls b/pillar/logstash/mastersearch.sls new file mode 100644 index 000000000..2fbc5be5f --- /dev/null +++ b/pillar/logstash/mastersearch.sls @@ -0,0 +1,6 @@ +logstash: + pipelines: + master: + config: "/usr/share/logstash/pipelines/master/*.conf" + search: + config: "/usr/share/logstash/pipelines/search/*.conf" diff --git a/pillar/top.sls b/pillar/top.sls index d8c519eac..3a37fa861 100644 --- a/pillar/top.sls +++ b/pillar/top.sls @@ -8,7 +8,11 @@ base: - firewall.* - brologs - 'G@role:so-master': + 'G@role:so-mastersearch': + - logstash.mastersearch + + 'G@role:so-master or G@role:so-mastersearch': + - match: compound - minions.{{ grains.id }} - static - firewall.* diff --git a/salt/common/nginx/nginx.conf.so-mastersearch b/salt/common/nginx/nginx.conf.so-mastersearch new file mode 100644 index 000000000..265413fa2 --- /dev/null +++ b/salt/common/nginx/nginx.conf.so-mastersearch @@ -0,0 +1,262 @@ +{%- set masterip = salt['pillar.get']('master:mainip', '') %} +# For more information on configuration, see: +# * Official English Documentation: http://nginx.org/en/docs/ +# * Official Russian Documentation: http://nginx.org/ru/docs/ + +user nginx; +worker_processes auto; +error_log /var/log/nginx/error.log; +pid /run/nginx.pid; + +# Load dynamic modules. See /usr/share/nginx/README.dynamic. +include /usr/share/nginx/modules/*.conf; + +events { + worker_connections 1024; +} + +http { + log_format main '$remote_addr - $remote_user [$time_local] "$request" ' + '$status $body_bytes_sent "$http_referer" ' + '"$http_user_agent" "$http_x_forwarded_for"'; + + access_log /var/log/nginx/access.log main; + + sendfile on; + tcp_nopush on; + tcp_nodelay on; + keepalive_timeout 65; + types_hash_max_size 2048; + + include /etc/nginx/mime.types; + default_type application/octet-stream; + + # Load modular configuration files from the /etc/nginx/conf.d directory. + # See http://nginx.org/en/docs/ngx_core_module.html#include + # for more information. + include /etc/nginx/conf.d/*.conf; + + #server { + # listen 80 default_server; + # listen [::]:80 default_server; + # server_name _; + # root /opt/socore/html; + # index index.html; + + # Load configuration files for the default server block. + #include /etc/nginx/default.d/*.conf; + + # location / { + # } + + # error_page 404 /404.html; + # location = /40x.html { + # } + + # error_page 500 502 503 504 /50x.html; + # location = /50x.html { + # } + #} + server { + listen 80 default_server; + server_name _; + return 301 https://$host$request_uri; + } + + +# Settings for a TLS enabled server. + + server { + listen 443 ssl http2 default_server; + #listen [::]:443 ssl http2 default_server; + server_name _; + root /opt/socore/html; + index index.html; + + ssl_certificate "/etc/pki/nginx/server.crt"; + ssl_certificate_key "/etc/pki/nginx/server.key"; + ssl_session_cache shared:SSL:1m; + ssl_session_timeout 10m; + ssl_ciphers HIGH:!aNULL:!MD5; + ssl_prefer_server_ciphers on; + + # Load configuration files for the default server block. + #include /etc/nginx/default.d/*.conf; + + #location / { + # try_files $uri $uri.html /index.html; + # } + + location /grafana/ { + rewrite /grafana/(.*) /$1 break; + proxy_pass http://{{ masterip }}:3000/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /kibana/ { + auth_basic "Security Onion"; + auth_basic_user_file /opt/so/conf/nginx/.htpasswd; + rewrite /kibana/(.*) /$1 break; + proxy_pass http://{{ masterip }}:5601/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /playbook/ { + proxy_pass http://{{ masterip }}:3200/playbook/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /navigator/ { + auth_basic "Security Onion"; + auth_basic_user_file /opt/so/conf/nginx/.htpasswd; + proxy_pass http://{{ masterip }}:4200/navigator/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /api/ { + proxy_pass https://{{ masterip }}:8080/api/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /fleet/ { + rewrite /fleet/(.*) /$1 break; + auth_basic "Security Onion"; + auth_basic_user_file /opt/so/conf/nginx/.htpasswd; + proxy_pass https://{{ masterip }}:8080/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /thehive/ { + proxy_pass http://{{ masterip }}:9000/thehive/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_http_version 1.1; # this is essential for chunked responses to work + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /cortex/ { + proxy_pass http://{{ masterip }}:9001/cortex/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_http_version 1.1; # this is essential for chunked responses to work + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /cyberchef/ { + proxy_pass http://{{ masterip }}:9080/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_http_version 1.1; # this is essential for chunked responses to work + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /soctopus/ { + proxy_pass http://{{ masterip }}:7000/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + location /sensoroni/ { + auth_basic "Security Onion"; + auth_basic_user_file /opt/so/conf/nginx/.htpasswd; + proxy_pass http://{{ masterip }}:9822/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + + } + + location /kibana/app/sensoroni/ { + rewrite ^/kibana/app/sensoroni/(.*) /sensoroni/$1 permanent; + } + + location /kibana/app/fleet/ { + rewrite ^/kibana/app/fleet/(.*) /fleet/$1 permanent; + } + + location /kibana/app/soctopus/ { + rewrite ^/kibana/app/soctopus/(.*) /soctopus/$1 permanent; + } + + + location /sensoroniagents/ { + proxy_pass http://{{ masterip }}:9822/; + proxy_read_timeout 90; + proxy_connect_timeout 90; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Proxy ""; + + } + + error_page 404 /404.html; + location = /40x.html { + } + + error_page 500 502 503 504 /50x.html; + location = /50x.html { + } + } + +} diff --git a/salt/elastalert/init.sls b/salt/elastalert/init.sls index 999bbbd91..7f5819a15 100644 --- a/salt/elastalert/init.sls +++ b/salt/elastalert/init.sls @@ -20,7 +20,7 @@ {% set esport = salt['pillar.get']('master:es_port', '') %} -{% elif grains['role'] == 'so-eval' %} +{% elif grains['role'] in ['so-eval','so-mastersearch'] %} {% set esalert = salt['pillar.get']('master:elastalert', '1') %} {% set esip = salt['pillar.get']('master:mainip', '') %} diff --git a/salt/elasticsearch/init.sls b/salt/elasticsearch/init.sls index 4c5d3e644..cde242a85 100644 --- a/salt/elasticsearch/init.sls +++ b/salt/elasticsearch/init.sls @@ -17,7 +17,7 @@ {% set esclustername = salt['pillar.get']('master:esclustername', '') %} {% set esheap = salt['pillar.get']('master:esheap', '') %} -{% elif grains['role'] == 'so-eval' %} +{% elif grains['role'] in ['so-eval','so-mastersearch'] %} {% set esclustername = salt['pillar.get']('master:esclustername', '') %} {% set esheap = salt['pillar.get']('master:esheap', '') %} @@ -143,4 +143,3 @@ so-elasticsearch-pipelines: # Tell the main cluster I am here #curl -XPUT http://\$ELASTICSEARCH_HOST:\$ELASTICSEARCH_PORT/_cluster/settings -H'Content-Type: application/json' -d '{"persistent": {"search": {"remote": {"$HOSTNAME": {"skip_unavailable": "true", "seeds": ["$DOCKER_INTERFACE:$REVERSE_PORT"]}}}}}' - diff --git a/salt/firewall/init.sls b/salt/firewall/init.sls index f70632c9f..a016a9767 100644 --- a/salt/firewall/init.sls +++ b/salt/firewall/init.sls @@ -1,5 +1,5 @@ # Firewall Magic for the grid -{%- if grains['role'] in ['so-eval','so-master','so-helix'] %} +{%- if grains['role'] in ['so-eval','so-master','so-helix','so-mastersearch'] %} {%- set ip = salt['pillar.get']('static:masterip', '') %} {%- elif grains['role'] == 'so-node' %} {%- set ip = salt['pillar.get']('node:mainip', '') %} @@ -131,7 +131,7 @@ enable_wazuh_manager_1514_udp_{{ip}}: - save: True # Rules if you are a Master -{% if grains['role'] == 'so-master' or grains['role'] == 'so-eval' or grains['role'] == 'so-helix'%} +{% if grains['role'] == 'so-master' or grains['role'] == 'so-eval' or grains['role'] == 'so-helix' or grains['role'] == 'so-mastersearch' %} #This should be more granular iptables_allow_master_docker: iptables.insert: diff --git a/salt/logstash/conf/conf.enabled.txt.so-mastersearch b/salt/logstash/conf/conf.enabled.txt.so-mastersearch new file mode 100644 index 000000000..6464496fa --- /dev/null +++ b/salt/logstash/conf/conf.enabled.txt.so-mastersearch @@ -0,0 +1,18 @@ +# This is where can specify which LogStash configs get loaded. +# +# The custom folder on the master gets automatically synced to each logstash +# node. +# +# To enable a custom configuration see the following example and uncomment: +# /usr/share/logstash/pipeline.custom/1234_input_custom.conf +## +# All of the defaults are loaded. +/usr/share/logstash/pipeline.so/0000_input_syslogng.conf +/usr/share/logstash/pipeline.so/0001_input_json.conf +/usr/share/logstash/pipeline.so/0002_input_windows_json.conf +/usr/share/logstash/pipeline.so/0003_input_syslog.conf +/usr/share/logstash/pipeline.so/0005_input_suricata.conf +#/usr/share/logstash/pipeline.dynamic/0006_input_beats.conf +/usr/share/logstash/pipeline.dynamic/0010_input_hhbeats.conf +/usr/share/logstash/pipeline.so/0007_input_import.conf +/usr/share/logstash/pipeline.dynamic/9999_output_redis.conf diff --git a/salt/logstash/conf/pipelines/6000_bro.conf b/salt/logstash/conf/pipelines/6000_bro.conf new file mode 100644 index 000000000..4ba3d3989 --- /dev/null +++ b/salt/logstash/conf/pipelines/6000_bro.conf @@ -0,0 +1,228 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/10/2018 +# +filter { + if "bro" in [tags] { + + # Bro logs have a high quality timestamp, so let's copy that to @timestamp. + # Before we do, let's copy the existing logstash @timestamp to timestamp. + mutate { + add_field => { "logstash_timestamp" => "%{@timestamp}" } + } + mutate { + convert => { "logstash_timestamp" => "string" } + } + mutate { + convert => { "timestamp" => "string" } + } + # New Bro JSON logs use ISO8601 timestamps. + # Old Bro TSV logs use UNIX timestamps. + date { + match => [ "timestamp", "ISO8601", "UNIX" ] + } + mutate { + rename => { "logstash_timestamp" => "timestamp" } + } + + if [duration] == "-" { + mutate { + replace => [ "duration", "0" ] + } + } + if [original_bytes] == "-" { + mutate { + replace => [ "original_bytes", "0" ] + } + } + # If MissedBytes is unspecified set it to zero so it is an integer + if [missed_bytes] == "-" { + mutate { + replace => [ "missed_bytes", "0" ] + } + } + # If OriginalIPBytes is unspecified set it to zero so it is an integer + if [original_ip_bytes] == "-" { + mutate { + replace => [ "original_ip_bytes", "0" ] + } + } + # If RespondBytes is unspecified set it to zero so it is an integer + if [respond_bytes] == "-" { + mutate { + replace => [ "respond_bytes", "0" ] + } + } + # If RespondIPBytes is unspecified set it to zero so it is an integer + if [respond_ip_bytes] == "-" { + mutate { + replace => [ "respond_ip_bytes", "0" ] + } + } + if [request_body_length] == "-" { + mutate { + replace => [ "request_body_length", "0" ] + } + } + if [response_body_length] == "-" { + mutate { + replace => [ "response_body_length", "0" ] + } + } + if [source_port] == "-" { + mutate { + remove_field => ["source_port"] + } + } + if [destination_port] == "-" { + mutate { + remove_field => ["destination_port"] + } + } + if [virtual_host] == "-" { + mutate { + remove_field => ["virtual_host"] + } + } + if [x_originating_ip] == "-" { + mutate { + remove_field => ["x_originating_ip"] + } + } + if [basic_constraints_path_length] == "-" { + mutate { + remove_field => ["basic_constraints_path_length"] + } + } + if [data_channel_source_ip] == "-" { + mutate { + remove_field => ["data_channel_source_ip"] + } + } + if [data_channel_destination_ip] == "-" { + mutate { + remove_field => ["data_channel_destination_ip"] + } + } + if [desktop_width] == "-" { + mutate { + remove_field => ["desktop_width"] + } + } + if [desktop_height] == "-" { + mutate { + remove_field => ["desktop_height"] + } + } + if [height] == "-" { + mutate { + remove_field => ["height"] + } + } + + + # I renamed conn_uids to uid so that it is easy to pivot to all things tied to a connection + mutate { + rename => [ "connection_uids", "uid" ] + } + # If total_bytes is set to "-" change it to 0 so it is an integer + if [total_bytes] == "-" { + mutate { + replace => [ "total_bytes", "0" ] + } + } + # If seen_bytes is set to "-" change it to 0 so it is an integer + if [seen_bytes] == "-" { + mutate { + replace => [ "seen_bytes", "0" ] + } + } + # If missing_bytes is set to "-" change it to 0 so it is an integer + if [missing_bytes] == "-" { + mutate { + replace => [ "missing_bytes", "0" ] + } + } + # If overflow_bytes is set to "-" change it to 0 so it is an integer + if [overflow_bytes] == "-" { + mutate { + replace => [ "overflow_bytes", "0" ] + } + } + if [dcc_file_size] == "-" { + mutate { + replace => [ "dcc_file_size", "0" ] + } + } + if [authentication_attempts] == "-" { + mutate { + replace => [ "authentication_attempts", "0" ] + } + } + if [file_size] == "-" { + mutate { + replace => [ "file_size", "0" ] + } + } + if [original_ip_bytes] == "-" { + mutate { + replace => [ "original_ip_bytes", "0" ] + } + } + + # I recommend changing the field types below to integer or floats so searches can do greater than or less than + # and also so math functions can be ran against them + mutate { + convert => [ "bound_port", "integer" ] + convert => [ "data_channel_destination_port", "integer" ] + convert => [ "destination_port", "integer" ] + convert => [ "depth", "integer" ] + #convert => [ "duration", "float" ] + convert => [ "info_code", "integer" ] + convert => [ "missed_bytes", "integer" ] + convert => [ "missing_bytes", "integer" ] + convert => [ "n", "integer" ] + convert => [ "original_bytes", "integer" ] + convert => [ "original_packets", "integer" ] + convert => [ "original_ip_bytes", "integer" ] + convert => [ "overflow_bytes", "integer" ] + convert => [ "p", "integer" ] + convert => [ "query_class", "integer" ] + convert => [ "query_type", "integer" ] + convert => [ "rcode", "integer" ] + convert => [ "request_body_length", "integer" ] + convert => [ "request_port", "integer" ] + convert => [ "respond_bytes", "integer" ] + convert => [ "respond_packets", "integer" ] + convert => [ "respond_ip_bytes", "integer" ] + convert => [ "response_body_length", "integer" ] + convert => [ "seen_bytes", "integer" ] + convert => [ "source_port", "integer" ] + convert => [ "status_code", "integer" ] + #convert => [ "suppress_for", "float" ] + convert => [ "total_bytes", "integer" ] + convert => [ "trans_depth", "integer" ] + convert => [ "transaction_id", "integer" ] + # convert the following boolean to text for now + convert => [ "local_respond", "string" ] + convert => [ "tc", "string" ] + convert => [ "is_orig", "string" ] + convert => [ "local_orig", "string" ] + lowercase => [ "query" ] + #remove_field => [ "timestamp" ] + } + + # Combine OriginalBytes and RespondBytes and save the value to total_bytes + if [original_bytes] { + if [respond_bytes] { + ruby { + code => "event.set('total_bytes', event.get('original_bytes') + event.get('respond_bytes'))" + } + } + } + mutate { + #add_tag => [ "conf_file_6000"] + } + } +} diff --git a/salt/logstash/conf/pipelines/6001_bro_import.conf b/salt/logstash/conf/pipelines/6001_bro_import.conf new file mode 100644 index 000000000..34c43f6ae --- /dev/null +++ b/salt/logstash/conf/pipelines/6001_bro_import.conf @@ -0,0 +1,16 @@ +# Updated by: Doug Burks +# Last Update: 2/10/2018 +# +filter { + if "import" in [tags] and "bro" in [tags] { + + # we're setting timestamp in 6000 now + #date { + # match => [ "timestamp", "UNIX" ] + #} + + mutate { + #add_tag => [ "conf_file_6001"] + } + } +} diff --git a/salt/logstash/conf/pipelines/8000_postprocess_bro_cleanup.conf b/salt/logstash/conf/pipelines/8000_postprocess_bro_cleanup.conf new file mode 100644 index 000000000..3998df8a4 --- /dev/null +++ b/salt/logstash/conf/pipelines/8000_postprocess_bro_cleanup.conf @@ -0,0 +1,17 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Email: justin@hasecuritysolution.com +# Last Update: 12/9/2016 + +filter { + if "bro" in [tags] { + if "_grokparsefailure" not in [tags] and "_csvparsefailure" not in [tags] and "_jsonparsefailure" not in [tags] { + #mutate { + # remove_field => [ "message" ] + #} + } + mutate { + #add_tag => [ "conf_file_8000"] + } + } +} diff --git a/salt/logstash/conf/pipelines/8006_postprocess_dns.conf b/salt/logstash/conf/pipelines/8006_postprocess_dns.conf new file mode 100644 index 000000000..a1520e6dc --- /dev/null +++ b/salt/logstash/conf/pipelines/8006_postprocess_dns.conf @@ -0,0 +1,47 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 5/13/2017 + +filter { + if [type] == "bro_dns" or "dns" in [tags] { + # Used for whois lookups - can create log loop + if [query] =~ "^whois\." { + drop { } + } + # REPLACE test.int with your internal domain + if [query] and [query] !~ "\.test\.int$" { + mutate { + lowercase => [ "query" ] + } + if [query_type_name] != "NB" and [query_type_name] != "TKEY" and [query_type_name] != "NBSTAT" and [query_type_name] != "PTR" { + tld { + source => "query" + } + ruby { + code => "event.set('query_length', event.get('query').length)" + } + mutate { + rename => { "[SubLog][sessionid]" => "sub_session_id" } + rename => { "[tld][domain]" => "highest_registered_domain" } + rename => { "[tld][trd]" => "subdomain" } + rename => { "[tld][tld]" => "top_level_domain" } + rename => { "[tld][sld]" => "parent_domain" } + } + if [parent_domain] { + ruby { + code => "event.set('parent_domain_length', event.get('parent_domain').length)" + } + } + if [subdomain] { + ruby { + code => "event.set('subdomain_length', event.get('subdomain').length)" + } + } + } + } + mutate { + #add_tag => [ "conf_file_8006"] + } + } +} diff --git a/salt/logstash/files/dynamic/0010_input_hhbeats.conf b/salt/logstash/conf/pipelines/helix/0010_input_hhbeats.conf similarity index 100% rename from salt/logstash/files/dynamic/0010_input_hhbeats.conf rename to salt/logstash/conf/pipelines/helix/0010_input_hhbeats.conf diff --git a/salt/logstash/conf/pipelines/helix/1033_preprocess_snort.conf b/salt/logstash/conf/pipelines/helix/1033_preprocess_snort.conf new file mode 100644 index 000000000..897a8ae4b --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1033_preprocess_snort.conf @@ -0,0 +1,181 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 3/15/2018 + +filter { + if [type] == "ids" { + # This is the initial parsing of the log + if [engine] == "suricata" { + json { + source => "message" + } + mutate { + rename => { "alert" => "orig_alert" } + rename => { "[orig_alert][gid]" => "gid" } + rename => { "[orig_alert][signature_id]" => "sid" } + rename => { "[orig_alert][rev]" => "rev" } + rename => { "[orig_alert][signature]" => "alert" } + rename => { "[orig_alert][category]" => "classification" } + rename => { "[orig_alert][severity]" => "priority" } + rename => { "[orig_alert][rule]" => "rule_signature" } + rename => { "app_proto" => "application_protocol" } + rename => { "dest_ip" => "destination_ip" } + rename => { "dest_port" => "destination_port" } + rename => { "in_iface" => "interface" } + rename => { "proto" => "protocol" } + rename => { "src_ip" => "source_ip" } + rename => { "src_port" => "source_port" } + #rename => { "[fileinfo][filename]" => "filename" } + #rename => { "[fileinfo][gaps]" => "gaps" } + #rename => { "[fileinfo][size]" => "size" } + #rename => { "[fileinfo][state]" => "state" } + #rename => { "[fileinfo][stored]" => "stored" } + #rename => { "[fileinfo][tx_id]" => "tx_id" } + #rename => { "[flow][age]" => "duration" } + #rename => { "[flow][alerted]" => "flow_alerted" } + #rename => { "[flow][bytes_toclient]" => "bytes_to_client" } + #rename => { "[flow][bytes_toserver]" => "bytes_to_server" } + #rename => { "[flow][end]" => "flow_end" } + #rename => { "[flow][pkts_toclient]" => "packets_to_client" } + #rename => { "[flow][pkts_toserver]" => "packets_to_server" } + #rename => { "[flow][reason]" => "reason" } + #rename => { "[flow][start]" => "flow_start" } + #rename => { "[flow][state]" => "state" } + #rename => { "[netflow][age]" => "duration" } + #rename => { "[netflow][bytes]" => "bytes" } + #rename => { "[netflow][end]" => "netflow_end" } + #rename => { "[netflow][start]" => "netflow_start" } + #rename => { "[netflow][pkts]" => "packets" } + rename => { "[alert][action]" => "action" } + rename => { "[alert][category]" => "category" } + rename => { "[alert][gid]" => "gid" } + rename => { "[alert][rev]" => "rev" } + rename => { "[alert][severity]" => "severity" } + rename => { "[alert][signature]" => "signature" } + rename => { "[alert][signature_id]" => "sid" } + #rename => { "[dns][aa]" => "aa" } + #rename => { "[dns][flags]" => "flags" } + #rename => { "[dns][id]" => "id" } + #rename => { "[dns][qr]" => "qr" } + #rename => { "[dns][rcode]" => "rcode_name" } + #rename => { "[dns][rrname]" => "rrname" } + #rename => { "[dns][rrtype]" => "rrtype" } + #rename => { "[dns][tx_id]" => "tx_id" } + #rename => { "[dns][type]" => "record_type" } + #rename => { "[dns][version]" => "version" } + rename => { "[http][hostname]" => "virtual_host" } + rename => { "[http][http_content_type]" => "content_type" } + rename => { "[http][http_port]" => "http_port" } + rename => { "[http][http_method]" => "method" } + rename => { "[http][http_user_agent]" => "useragent" } + #rename => { "[http][length]" => "payload_length" } + #rename => { "[http][protocol]" => "http_version" } + rename => { "[http][status]" => "status_message" } + rename => { "[http][url]" => "url" } + #rename => { "[metadata][flowbits]" => "flowbits" } + rename => { "[tls][fingerprint]" => "certificate_serial_number" } + rename => { "[tls][issuerdn]" => "issuer_distinguished_name" } + rename => { "[tls][notafter]" => "certificate_not_valid_after" } + rename => { "[tls][notbefore]" => "certificate_not_valid_before" } + rename => { "[tls][subject]" => "certificate_common_name" } + rename => { "[tls][version]" => "tls_version" } + rename => { "event_type" => "ids_event_type" } + remove_field => [ "offset", "orig_alert", "beat", "input", "prospector" ] + remove_tag => [ "beats_input_codec_plain_applied" ] + add_tag => [ "eve" ] + + } + } else { + grok { + match => ["message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s+(?:%{IPV4:source_ip}|%{IPV6:source_ip}):%{INT:source_port}\s+->\s+(?:%{IPV4:destination_ip}|%{IPV6:destination_ip}):%{INT:destination_port}", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+<%{DATA:interface}>\s+{%{DATA:protocol}}\s(?:%{IPV4:source_ip}|%{IPV6:source_ip})\s+->\s+(?:%{IPV4:destination_ip}|%{IPV6:destination_ip})", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s+(?:%{IPV4:source_ip}|%{IPV6:source_ip}):%{INT:source_port}\s+->\s+%{IPV4:destination_ip}:%{INT:destination_port}", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s(?:%{IPV4:source_ip}|%{IPV6:source_ip})\s+->\s+(?:%{IPV4:destination_ip}|%{IPV6:destination_ip})", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s+(?:%{IPV4:source_ip}|%{IPV6:source_ip}):%{INT:source_port}\s+->\s+(?:%{IPV4:destination_ip}|%{IPV6:destination_ip}):%{INT:destination_port}", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}\s(?:%{IPV4:source_ip}|%{IPV6:source_ip})\s+->\s+(?:%{IPV4:source_ip}|%{IPV6:source_ip})", + "message", "\[%{INT:gid}:%{INT:sid}:%{INT:rev}\]\s%{DATA:alert}\[Classification:\s+%{DATA:classification}\]\s+\[Priority:\s+%{INT:priority}\]:\s+{%{DATA:protocol}}", + "message", "\A%{TIME} pid\(%{INT}\) Alert Received: %{INT} %{INT:priority} %{DATA:classification} %{DATA:interface} \{%{DATA:timestamp}} %{INT} %{INT} \{%{DATA:alert}} %{IP:source_ip} %{IP:destination_ip} %{INT:protocol} %{INT:source_port} %{INT:destination_port} %{INT:gid} %{INT:sid} %{INT:rev} %{INT} %{INT}\Z", + "message", "%{GREEDYDATA:alert}"] + } + } + if [timestamp] { + mutate { + add_field => { "logstash_timestamp" => "%{@timestamp}" } + } + mutate { + convert => { "logstash_timestamp" => "string" } + } + date { + match => [ "timestamp", "ISO8601" ] + } + mutate { + rename => { "logstash_timestamp" => "timestamp" } + } + } + + # If the alert is a Snort GPL alert break it apart for easier reading and categorization + if [alert] =~ "GPL " { + # This will parse out the category type from the alert + grok { + match => { "alert" => "GPL\s+%{DATA:category}\s" } + } + # This will store the category + mutate { + add_field => { "rule_type" => "Snort GPL" } + lowercase => [ "category"] + } + } + # If the alert is an Emerging Threat alert break it apart for easier reading and categorization + if [alert] =~ "ET " { + # This will parse out the category type from the alert + grok { + match => { "alert" => "ET\s+%{DATA:category}\s" } + } + # This will store the category + mutate { + add_field => { "rule_type" => "Emerging Threats" } + lowercase => [ "category"] + } + } + # I recommend changing the field types below to integer so searches can do greater than or less than + # and also so math functions can be ran against them + mutate { + convert => [ "source_port", "integer" ] + convert => [ "destination_port", "integer" ] + convert => [ "gid", "integer" ] + convert => [ "sid", "integer" ] + # remove_field => [ "message"] + } + # This will translate the priority field into a severity field of either High, Medium, or Low + if [priority] == 1 { + mutate { + add_field => { "severity" => "High" } + } + } + if [priority] == 2 { + mutate { + add_field => { "severity" => "Medium" } + } + } + if [priority] == 3 { + mutate { + add_field => { "severity" => "Low" } + } + } + # This section adds URLs to lookup information about a rule online + if [sid] and [sid] > 0 and [sid] < 1000000 { + mutate { + add_field => [ "signature_info", "https://www.snort.org/search?query=%{gid}-%{sid}" ] + } + } + if [sid] and [sid] > 1999999 and [sid] < 2999999 { + mutate { + add_field => [ "signature_info", "http://doc.emergingthreats.net/%{sid}" ] + } + } +# mutate { + #add_tag => [ "conf_file_1033"] +# } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1100_preprocess_bro_conn.conf b/salt/logstash/conf/pipelines/helix/1100_preprocess_bro_conn.conf new file mode 100644 index 000000000..b64b56bbe --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1100_preprocess_bro_conn.conf @@ -0,0 +1,77 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# +# This conf file is based on accepting logs for conn.log from Bro systems +filter { + if [type] == "bro_conn" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "proto" => "protocol" } + #service + #duration + rename => { "orig_bytes" => "original_bytes" } + rename => { "resp_bytes" => "respond_bytes" } + rename => { "conn_state" => "connection_state" } + #local_orig + rename => { "local_resp" => "local_respond" } + #missed_bytes + #history + rename => { "orig_pkts" => "original_packets" } + rename => { "orig_ip_bytes" => "original_ip_bytes" } + rename => { "resp_pkts" => "respond_packets" } + rename => { "resp_ip_bytes" => "respond_ip_bytes" } + #tunnel_parents + rename => { "orig_cc" => "original_country_code" } + rename => { "resp_cc" => "respond_country_code" } + rename => { "sensorname" => "sensor_name" } + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","service","duration","original_bytes","respond_bytes","connection_state","local_orig","local_respond","missed_bytes","history","original_packets","original_ip_bytes","respond_packets","respond_ip_bytes","tunnel_parents","sensor_name"] + + # If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces. + separator => " " + } + } + + translate { + field => "connection_state" + + destination => "connection_state_description" + + dictionary => [ + "S0", "Connection attempt seen, no reply", + "S1", "Connection established, not terminated", + "S2", "Connection established and close attempt by originator seen (but no reply from responder)", + "S3", "Connection established and close attempt by responder seen (but no reply from originator)", + "SF", "Normal SYN/FIN completion", + "REJ", "Connection attempt rejected", + "RSTO", "Connection established, originator aborted (sent a RST)", + "RSTR", "Established, responder aborted", + "RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder", + "RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator", + "SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)", + "SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator", + "OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)" + ] + } + mutate { + #add_tag => [ "conf_file_1100"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1101_preprocess_bro_dhcp.conf b/salt/logstash/conf/pipelines/helix/1101_preprocess_bro_dhcp.conf new file mode 100644 index 000000000..e7e7f12c0 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1101_preprocess_bro_dhcp.conf @@ -0,0 +1,56 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks and Wes Lambert +# Last Update: 1/3/2019 +# +# This conf file is based on accepting logs for dhcp.log from Bro systems +filter { + if [type] == "bro_dhcp" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #mac + #assigned_ip + #lease_time + rename => { "trans_id" => "transaction_id" } + # new dhcp log format + rename => { "assigned_addr" => "assigned_ip" } + rename => { "client_addr" => "source_ip" } + rename => { "server_addr" => "destination_ip" } + rename => { "requested_addr" => "requested_ip" } + rename => { "domain" => "domain_name" } + rename => { "host_name" => "hostname" } + rename => { "msg_types" => "message_types" } + rename => { "uids" => "uid" } + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + # Bro logs in TSV format + csv { + columns => [ "timestamp", "uid", "source_ip", "destination_ip", "mac", "hostname", "client_fqdn", "domain_name", "requested_ip", "assigned_ip", "lease_time","client_message", "server_message", "message_types", "duration" ] + separator => " " + } + # Remove fields with empty values (-) to prevent field data type conflict + ruby { + code =>" + hash = event.to_hash.each do |key,value| + if value == '-' + event.remove(key) + end + end" + } + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1102_preprocess_bro_dns.conf b/salt/logstash/conf/pipelines/helix/1102_preprocess_bro_dns.conf new file mode 100644 index 000000000..340cdafbc --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1102_preprocess_bro_dns.conf @@ -0,0 +1,74 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for dns.log from Bro systems +filter { + if [type] == "bro_dns" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "proto" => "protocol" } + rename => { "trans_id" => "transaction_id" } + #rtt field + #query field + rename => { "qclass" => "query_class" } + rename => { "qclass_name" => "query_class_name" } + rename => { "qtype" => "query_type" } + rename => { "qtype_name" => "query_type_name" } + #rcode + #rcode_name + rename => { "AA" => "aa" } + rename => { "TC" => "tc" } + rename => { "RD" => "rd" } + rename => { "RA" => "ra" } + rename => { "Z" => "z" } + #answers + rename => { "TTLs" => "ttls" } + #rejected + } + } else { + + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","transaction_id","rtt","query","query_class","query_class_name","query_type","query_type_name","rcode","rcode_name","aa","tc","rd","ra","z","answers","ttls","rejected"] + + #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces. + separator => " " + } + } + + mutate { + add_tag => [ "dns" ] + } + if [ttls] == "-" { + mutate { + remove_field => [ "ttls" ] + } + } + if [rtt] == "-" { + mutate { + remove_field => [ "rtt" ] + } + } + #mutate { + #convert => [ "rtt", "float" ] + #} + mutate { + #add_tag => [ "conf_file_1102"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1103_preprocess_bro_dpd.conf b/salt/logstash/conf/pipelines/helix/1103_preprocess_bro_dpd.conf new file mode 100644 index 000000000..cc3b6ad39 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1103_preprocess_bro_dpd.conf @@ -0,0 +1,42 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for dpd.log from Bro systems +filter { + if [type] == "bro_dpd" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "proto" => "protocol" } + #analyzer + #failure_reason + } + } else { + + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","analyzer","failure_reason"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1103"] + } + + } +} diff --git a/salt/logstash/conf/pipelines/helix/1104_preprocess_bro_files.conf b/salt/logstash/conf/pipelines/helix/1104_preprocess_bro_files.conf new file mode 100644 index 000000000..88c524ea5 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1104_preprocess_bro_files.conf @@ -0,0 +1,64 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for files.log from Bro systems +filter { + if [type] == "bro_files" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #fuid + rename => { "tx_hosts" => "file_ip" } + rename => { "rx_hosts" => "destination_ip" } + rename => { "conn_uids" => "connection_uids" } + #source field + #depth field + rename => { "analyzers" => "analyzer" } + rename => { "mime_type" => "mimetype" } + rename => { "filename" => "file_name" } + #duration + #local_orig + #is_orig + #seen_bytes + #total_bytes + #missing_bytes + #overflow_bytes + rename => { "timedout" => "timed_out" } + #parent_fuid + #md5 + #sha1 + #sha256 + #extracted + #extracted_cutoff + #extracted_size + } + } else { + + csv { + columns => ["timestamp","fuid","file_ip","destination_ip","connection_uids","source","depth","analyzer","mimetype","file_name","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timed_out","parent_fuid","md5","sha1","sha256","extracted","extracted_cutoff","extracted_size"] + separator => " " + } + if [destination_ip] =~ /,/ { + mutate { + split => { "destination_ip" => "," } + } + } + if [file_ip] =~ /,/ { + mutate { + split => { "file_ip" => "," } + } + } + } + + mutate { + #add_tag => [ "conf_file_1104"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1105_preprocess_bro_ftp.conf b/salt/logstash/conf/pipelines/helix/1105_preprocess_bro_ftp.conf new file mode 100644 index 000000000..c37ac71a0 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1105_preprocess_bro_ftp.conf @@ -0,0 +1,56 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for ftp.log from Bro systems +filter { + if [type] == "bro_ftp" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "user" => "username" } + #password + rename => { "command" => "ftp_command" } + rename => { "arg" => "ftp_argument" } + rename => { "mime_type" => "mimetype" } + #file_size + #reply_code + rename => { "reply_msg" => "reply_message" } + rename => { "data_channel.passive" => "data_channel_passive" } + rename => { "data_channel.orig_h" => "data_channel_source_ip" } + rename => { "data_channel.resp_h" => "data_channel_destination_ip" } + rename => { "data_channel.resp_p" => "data_channel_destination_port" } + #fuid + } + + mutate { + convert => { "reply" => "string" } + } + + } else { + + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","username","password","ftp_command","ftp_argument","mimetype","file_size","reply_code","reply_message","data_channel_passive","data_channel_source_ip","data_channel_destination_ip","data_channel_destination_port","fuid"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1105"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1106_preprocess_bro_http.conf b/salt/logstash/conf/pipelines/helix/1106_preprocess_bro_http.conf new file mode 100644 index 000000000..3cff8faa7 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1106_preprocess_bro_http.conf @@ -0,0 +1,77 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# +# This conf file is based on accepting logs for http.log from Bro systems +filter { + if [type] == "bro_http" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + + # Rename logstash tags field to avoid being overwritten by Bro's http tags field + mutate { + rename => { "tags" => "tags-orig" } + } + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #trans_depth + #method + rename => { "host" => "virtual_host" } + #uri + #referrer + #version + #convert => { "version" => "string" } + rename => { "user_agent" => "useragent" } + #origin + rename => { "request_body_len" => "request_body_length" } + rename => { "response_body_len" => "response_body_length" } + #status_code + #status_message + rename => { "status_msg" => "status_message" } + #info_code + rename => { "info_msg" => "info_message" } + #tags + # Rename http tags field to http-tags + rename => { "tags" => "http-tags" } + # Rename logstash tags field to tags + rename => { "tags-orig" => "tags" } + #username + #password + #proxied + #orig_fuids + #orig_filenames + #orig_mime_types + #resp_fuids + #resp_filenames + #resp_mime_types + } + if [http-tags] { + mutate { + remove_field => [ "http-tags" ] + } + } + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*?))\t(?(.*))\t(?(.*))\t(?(.*?))\t(?(.*))" ] + } + } + + if [useragent] == "-" { + mutate { + remove_field => [ "useragent" ] + } + } + mutate { + #add_tag => [ "conf_file_1106"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1107_preprocess_bro_irc.conf b/salt/logstash/conf/pipelines/helix/1107_preprocess_bro_irc.conf new file mode 100644 index 000000000..841c4aa44 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1107_preprocess_bro_irc.conf @@ -0,0 +1,46 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for irc.log from Bro systems +filter { + if [type] == "bro_irc" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #nick + rename => { "user" => "irc_username" } + rename => { "command" => "irc_command" } + #value + rename => { "addl" => "additional_info" } + #dcc_file_name + #dcc_file_size + #dcc_mime_type + #fuid + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","nick","irc_username","irc_command","value","additional_info","dcc_file_name","dcc_file_size","dcc_mime_type","fuid"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1107"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1108_preprocess_bro_kerberos.conf b/salt/logstash/conf/pipelines/helix/1108_preprocess_bro_kerberos.conf new file mode 100644 index 000000000..89754126a --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1108_preprocess_bro_kerberos.conf @@ -0,0 +1,56 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for kerberos.log from Bro systems +filter { + if [type] == "bro_kerberos" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #request_type + #client + #service + rename => { "success" => "kerberos_success" } + rename => { "error_msg" => "error_message" } + rename => { "from" => "valid_from" } + rename => { "till" => "valid_till" } + #cipher + #forwardable + #renewable + rename => { "client_cert_subject" => "client_certificate_subject" } + rename => { "client_cert_fuid" => "client_certificate_fuid" } + rename => { "server_cert_subject" => "server_certificate_subject" } + rename => { "server_cert_fuid" => "server_certificate_fuid" } + } + + mutate { + convert => { "kerberos_success" => "string" } + convert => { "renewable" => "string" } + } + + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","request_type","client","service","kerberos_success","error_message","valid_from","valid_till","cipher","forwardable","renewable","client_certificate_subject","client_certificate_fuid","server_certificate_subject","server_certificate_fuid"] + separator => " " + } + } + mutate { + #add_tag => [ "conf_file_1108"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1109_preprocess_bro_notice.conf b/salt/logstash/conf/pipelines/helix/1109_preprocess_bro_notice.conf new file mode 100644 index 000000000..2c22896d8 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1109_preprocess_bro_notice.conf @@ -0,0 +1,56 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for notice.log from Bro systems +filter { + if [type] == "bro_notice" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #fuid + rename => { "mime" => "file_mime_type" } + rename => { "desc" => "file_description" } + rename => { "proto" => "protocol" } + rename => { "note" => "note" } + rename => { "msg" => "msg" } + rename => { "sub" => "sub_msg" } + rename => { "src" => "source_ip" } + rename => { "dst" => "destination_ip" } + #p + #n + rename => { "peer_descr" => "peer_description" } + rename => { "actions" => "action" } + #suppress_for + #destination_country_code + #destination_region + #destination_city + #destination_latitude + #destination_longitude + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fuid","file_mime_type","file_description","protocol","note","msg","sub_msg","source_ip","destination_ip","p","n","peer_description","action","suppress_for","destination_country_code","destination_region","destination_city","destination_latitude","destination_longitude"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1109"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1110_preprocess_bro_rdp.conf b/salt/logstash/conf/pipelines/helix/1110_preprocess_bro_rdp.conf new file mode 100644 index 000000000..435a2ca3e --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1110_preprocess_bro_rdp.conf @@ -0,0 +1,52 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# +# This conf file is based on accepting logs for rdp.log from Bro systems +filter { + if [type] == "bro_rdp" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #cookie + #result + #security_protocol + #client_channels + #keyboard_layout + #client_build + #client_name + rename => { "client_dig_product_id" => "client_digital_product_id" } + #desktop_width + #desktop_height + #requested_color_depth + rename => { "cert_type" => "certificate_type" } + rename => { "cert_count" => "certificate_count" } + rename => { "cert_permanent" => "certificate_permanent" } + #encryption_level + #encryption_method + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","cookie","result","security_protocol","client_channels","keyboard_layout","client_build","client_name","client_digital_product_id","desktop_width","desktop_height","requested_color_depth","certificate_type","certificate_count","certificate_permanent","encryption_level","encryption_method"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1110"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1111_preprocess_bro_signatures.conf b/salt/logstash/conf/pipelines/helix/1111_preprocess_bro_signatures.conf new file mode 100644 index 000000000..0d3c1dc57 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1111_preprocess_bro_signatures.conf @@ -0,0 +1,43 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for signatures.log from Bro systems +filter { + if [type] == "bro_signatures" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #note + rename => { "sig_id" => "signature_id" } + rename => { "event_msg" => "event_message" } + rename => { "sub_msg" => "sub_message" } + rename => { "sig_count" => "signature_count" } + #host_count + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","note","signature_id","event_message","sub_message","signature_count","host_count"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1111"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1112_preprocess_bro_smtp.conf b/salt/logstash/conf/pipelines/helix/1112_preprocess_bro_smtp.conf new file mode 100644 index 000000000..743bd5716 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1112_preprocess_bro_smtp.conf @@ -0,0 +1,65 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for smtp.log from Bro systems +filter { + if [type] == "bro_smtp" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #trans_depth + #helo + rename => { "mailfrom" => "mail_from" } + rename => { "rcptto" => "recipient_to" } + rename => { "date" => "mail_date" } + #from + #to + #cc + #reply_to + rename => { "msg_id" => "message_id" } + #in_reply_to + #subject + #x_originating_ip + #first_received + #second_received + #last_reply + #path + rename => { "user_agent" => "useragent" } + #tls + #fuids + #is_webmail + } + + mutate { + convert => { "tls" => "string" } + convert => { "is_webmail" => "string" } + } + + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))" ] + } + } + + if [useragent] == "-" { + mutate { + remove_field => [ "useragent" ] + } + } + mutate { + #add_tag => [ "conf_file_1112"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1113_preprocess_bro_snmp.conf b/salt/logstash/conf/pipelines/helix/1113_preprocess_bro_snmp.conf new file mode 100644 index 000000000..6a00a5244 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1113_preprocess_bro_snmp.conf @@ -0,0 +1,47 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for snmp.log from Bro systems +filter { + if [type] == "bro_snmp" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #duration + #version + #convert => { "version" => "string" } + #community + #get_requests + #get_bulk_requests + #get_responses + #set_requests + #display_string + #up_since + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","duration","version","community","get_requests","get_bulk_requests","get_responses","set_requests","display_string","up_since"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1113"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1114_preprocess_bro_software.conf b/salt/logstash/conf/pipelines/helix/1114_preprocess_bro_software.conf new file mode 100644 index 000000000..ef7eded01 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1114_preprocess_bro_software.conf @@ -0,0 +1,49 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for software.log from Bro systems +filter { + if [type] == "bro_software" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "host" => "source_ip" } + rename => { "host_p" => "source_port" } + #software_type + #name + rename => { "version.major" => "version_major" } + rename => { "version.minor" => "version_minor" } + rename => { "version.minor2" => "version_minor2" } + rename => { "version.minor3" => "version_minor3" } + rename => { "version.addl" => "version_additional_info" } + #unparsed_version + } + + mutate { + convert => { "version_major" => "string" } + convert => { "version_minor" => "string" } + } + + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","source_ip","source_port","software_type","name","version_major","version_minor","version_minor2","version_minor3","version_additional_info","unparsed_version"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1114"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1115_preprocess_bro_ssh.conf b/salt/logstash/conf/pipelines/helix/1115_preprocess_bro_ssh.conf new file mode 100644 index 000000000..a08d11e66 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1115_preprocess_bro_ssh.conf @@ -0,0 +1,66 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks and Wes Lambert +# Last Update: 10/30/2018 +# +# This conf file is based on accepting logs for ssh.log from Bro systems +filter { + if [type] == "bro_ssh" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #version + #convert => { "version" => "string" } + rename => { "auth_success" => "authentication_success" } + rename => { "auth_attempts" => "authentication_attempts" } + #direction + #client + #server + rename => { "cipher_alg" => "cipher_algorithm" } + rename => { "compression_alg" => "compression_algorithm" } + rename => { "cshka" => "client_host_key_algorithms" } + rename => { "host_key_alg" => "host_key_algorithm" } + rename => { "hasshAlgorithms" => "hassh_algorithms" } + rename => { "hasshServer" => "hassh_server" } + rename => { "hasshServerAlgorithms" => "hassh_server_algorithms" } + rename => { "hasshVersion" => "hassh_version" } + rename => { "kex_alg" => "kex_algorithm" } + rename => { "mac_alg" => "mac_algorithm" } + rename => { "sshka" => "server_host_key_algorithms" } + #host_key + #destination_country_code + #destination_region + #destination_city + #destination_latitude + #destination_longitude + } + + mutate { + convert => { "authentication_success" => "string" } + } + + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","authentication_success","authentication_attempts","direction","client","server","cipher_algorithm","mac_algorithm","compression_algorithm","kex_algorithm","host_key_algorithm","host_key","destination_country_code","destination_region","destination_city","destination_latitude","destination_longitude","hassh_version","hassh","hassh_server","client_host_key_algorithms","hassh_algorithms","server_host_key_algorithms","hassh_server_algorithms"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1115"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1116_preprocess_bro_ssl.conf b/salt/logstash/conf/pipelines/helix/1116_preprocess_bro_ssl.conf new file mode 100644 index 000000000..930a670e9 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1116_preprocess_bro_ssl.conf @@ -0,0 +1,186 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 10/30/2018 +# +# This conf file is based on accepting logs for ssl.log from Bro systems +filter { + if [type] == "bro_ssl" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #version + #convert => { "version" => "string" } + #cipher + #curve + #server_name + #resumed + #last_alert + #next_protocol + #established + rename => { "cert_chain_fuids" => "certificate_chain_fuids" } + rename => { "client_cert_chain_fuids" => "client_certificate_chain_fuids" } + rename => { "subject" => "certificate_subject" } + rename => { "issuer" => "certificate_issuer" } + #client_subject + #client_issuer + #validation_status + #ja3 + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","cipher","curve","server_name","resumed","last_alert","next_protocol","established","certificate_chain_fuids","client_certificate_chain_fuids","certificate_subject","certificate_issuer","client_subject","client_issuer","validation_status","ja3","ja3s"] + separator => " " + } + } + + mutate { + gsub => [ "subject", "\\\\,", "|" ] + } + kv { + include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "DC", "GN", "pseudonym", "serialNumber", "title", "initials" ] + field_split => "," + source => "certificate_issuer" + } + mutate { + rename => { "CN" => "issuer_common_name"} + rename => { "C" => "issuer_country_code"} + rename => { "O" => "issuer_organization"} + rename => { "OU" => "issuer_organization_unit"} + rename => { "ST" => "issuer_state"} + rename => { "SN" => "issuer_surname"} + rename => { "L" => "issuer_locality"} + rename => { "DC" => "issuer_distinguished_name"} + rename => { "GN" => "issuer_given_name"} + rename => { "pseudonym" => "issuer_pseudonym"} + rename => { "serialNumber" => "issuer_serial_number"} + rename => { "title" => "issuer_title"} + rename => { "initials" => "issuer_initials"} + } + kv { + include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "GN", "pseudonym", "serialNumber", "title", "initials" ] + field_split => "," + source => "certificate_subject" + } + mutate { + rename => { "CN" => "certificate_common_name"} + rename => { "C" => "certificate_country_code"} + rename => { "O" => "certificate_organization"} + rename => { "OU" => "certificate_organization_unit"} + rename => { "ST" => "certificate_state"} + rename => { "SN" => "certificate_surname"} + rename => { "L" => "certificate_locality"} + rename => { "GN" => "certificate_given_name"} + rename => { "pseudonym" => "certificate_pseudonym"} + rename => { "serialNumber" => "certificate_serial_number"} + rename => { "title" => "certificate_title"} + rename => { "initials" => "certificate_initials"} + } + if [certificate_subject] == "-" { + mutate { + remove_field => [ "certificate_subject" ] + } + } + if [certificate_issuer] == "-" { + mutate { + remove_field => [ "certificate_issuer" ] + } + } + if [certificate_common_name] { + ruby { + code => "event.set('certificate_common_name_length', event.get('certificate_common_name').length)" + } + } + if [issuer_common_name] { + ruby { + code => "event.set('issuer_common_name_length', event.get('issuer_common_name').length)" + } + } + if [server_name] { + if [server_name] == "-" { + mutate { + remove_field => [ "server_name" ] + } + } else { + ruby { + code => "event.set('server_name_length', event.get('server_name').length)" + } + } + } + if [certificate_chain_fuids] { + if [certificate_chain_fuids] == "-" { + mutate { + remove_field => [ "certificate_chain_fuids" ] + } + } else { + ruby { + code => "event.set('certificate_chain_count', event.get('certificate_chain_fuids').count(',') + 1)" + } + mutate { + convert => [ "certificate_chain_length", "integer" ] + } + } + } + if [client_certificate_chain_fuids] == "-" { + mutate { + remove_field => [ "client_certificate_chain_fuids" ] + } + } + if [client_issuer] == "-" { + mutate { + remove_field => [ "client_issuer" ] + } + } + if [client_subject] == "-" { + mutate { + remove_field => [ "client_subject" ] + } + } + if [curve] == "-" { + mutate { + remove_field => [ "curve" ] + } + } + if [issuer] == "-" { + mutate { + remove_field => [ "issuer" ] + } + } + if [query] == "-" { + mutate { + remove_field => [ "query" ] + } + } + if [subject] == "-" { + mutate { + remove_field => [ "subject" ] + } + } + if [validation_status] == "-" { + mutate { + remove_field => [ "validation_status" ] + } + } + if [ja3] == "-" { + mutate { + remove_field => [ "ja3" ] + } + } + mutate { + #add_tag => [ "conf_file_1116"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1117_preprocess_bro_syslog.conf b/salt/logstash/conf/pipelines/helix/1117_preprocess_bro_syslog.conf new file mode 100644 index 000000000..c9e52df0f --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1117_preprocess_bro_syslog.conf @@ -0,0 +1,41 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for syslog.log from Bro systems +filter { + if [type] == "bro_syslog" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "proto" => "protocol" } + #facility + #severity + #message + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","facility","severity","message"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1117"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1118_preprocess_bro_tunnel.conf b/salt/logstash/conf/pipelines/helix/1118_preprocess_bro_tunnel.conf new file mode 100644 index 000000000..5ae07508c --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1118_preprocess_bro_tunnel.conf @@ -0,0 +1,40 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for tunnel.log from Bro systems +# Security Onion syslog-ng.conf sets type to "bro_tunnels" +filter { + if [type] == "bro_tunnels" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #tunnel_type + #action + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","tunnel_type","action"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1118"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1119_preprocess_bro_weird.conf b/salt/logstash/conf/pipelines/helix/1119_preprocess_bro_weird.conf new file mode 100644 index 000000000..156a25786 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1119_preprocess_bro_weird.conf @@ -0,0 +1,42 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for weird.log from Bro systems +filter { + if [type] == "bro_weird" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #name + rename => { "addl" => "additional_info" } + #notice + #peer + } + + mutate { + convert => { "notice" => "string" } + } + + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))" ] + } + } + + mutate { + #add_tag => [ "conf_file_1119"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1121_preprocess_bro_mysql.conf b/salt/logstash/conf/pipelines/helix/1121_preprocess_bro_mysql.conf new file mode 100644 index 000000000..97f0d6e28 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1121_preprocess_bro_mysql.conf @@ -0,0 +1,57 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for mysql.log from Bro systems +# +# Parse using grok +filter { + if [type] == "bro_mysql" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "cmd" => "mysql_command" } + rename => { "arg" => "mysql_argument" } + rename => { "success" => "mysql_success" } + #rows + #response + } + + mutate { + convert => { "mysql_success" => "string" } + } + + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))" ] + } + } + + mutate { + #add_tag => [ "conf_file_1121"] + } + } +} + +# Reverting to grok for now, due to double-quoted values in log file +# Parse using csv filter +#filter { +# if [type] == "bro_mysql" { +# csv { +# columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","mysql_command","mysql_argument","mysql_success","rows","response"] +# separator => " " +# quote_char= +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1122_preprocess_bro_socks.conf b/salt/logstash/conf/pipelines/helix/1122_preprocess_bro_socks.conf new file mode 100644 index 000000000..1b2876eb4 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1122_preprocess_bro_socks.conf @@ -0,0 +1,62 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for socks.log from Bro systems + +# Parse using csv +filter { + if [type] == "bro_socks" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #version + #convert => { "version" => "string" } + rename => { "user" => "username" } + #password + rename => { "status" => "server_status" } + rename => { "request.host" => "request_host" } + rename => { "request.name" => "request_name" } + rename => { "request_p" => "request_port" } + rename => { "bound.host" => "bound_host" } + rename => { "bound.name" => "bound_name" } + rename => { "bound_p" => "bound_port" } + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","username","password","server_status","request_host","request_name","request_port","bound_host","bound_name","bound_port"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1122"] + } + } +} +# Parse using grok +#filter { +# if [type] == "bro_socks" { +# # This is the initial parsing of the log +# grok { +# match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))" ] +# } +# mutate { +# #add_tag => [ "conf_file_1122"] +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1123_preprocess_bro_x509.conf b/salt/logstash/conf/pipelines/helix/1123_preprocess_bro_x509.conf new file mode 100644 index 000000000..37d4393e7 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1123_preprocess_bro_x509.conf @@ -0,0 +1,154 @@ +# Original Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for x509.log from Bro systems + +filter { + if [type] == "bro_x509" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #id + rename => { "certificate.version" => "certificate_version" } + rename => { "certificate.serial" => "certificate_serial" } + rename => { "certificate.subject" => "certificate_subject" } + rename => { "certificate.issuer" => "certificate_issuer" } + rename => { "certificate.not_valid_before" => "certificate_not_valid_before" } + rename => { "certificate.not_valid_after" => "certificate_not_valid_after" } + rename => { "certificate.key_alg" => "certificate_key_algorithm" } + rename => { "certificate.sig_alg" => "certificate_signing_algorithm" } + rename => { "certificate.key_type" => "certificate_key_type" } + rename => { "certificate.key_length" => "certificate_key_length" } + rename => { "certificate.exponent" => "certificate_exponent" } + rename => { "certificate.curve" => "certificate_curve" } + rename => { "id" => "fuid" } + rename => { "san.dns" => "san_dns" } + rename => { "san.uri" => "san_uri" } + rename => { "san.email" => "san_email" } + rename => { "san.ip" => "san_ip" } + rename => { "basic_constraints.ca" => "basic_constraints_ca" } + rename => { "basic_constraints.path_length" => "basic_constraints_path_length" } + } + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))\t(?(.*))" ] + } + } + + mutate { + gsub => [ "certificate_issuer", "\\\\,", "|" ] + gsub => [ "certificate_subject", "\\\\,", "|" ] + } + + kv { + include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "DC", "GN", "pseudonym", "serialNumber", "title", "initials" ] + field_split => "," + source => "certificate_issuer" + } + mutate { + rename => { "CN" => "issuer_common_name"} + rename => { "C" => "issuer_country_code"} + rename => { "O" => "issuer_organization"} + rename => { "OU" => "issuer_organization_unit"} + rename => { "ST" => "issuer_state"} + rename => { "SN" => "issuer_surname"} + rename => { "L" => "issuer_locality"} + rename => { "DC" => "issuer_distinguished_name"} + rename => { "GN" => "issuer_given_name"} + rename => { "pseudonym" => "issuer_pseudonym"} + rename => { "serialNumber" => "issuer_serial_number"} + rename => { "title" => "issuer_title"} + rename => { "initials" => "issuer_initials"} + } + kv { + include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "GN", "pseudonym", "serialNumber", "title", "initials" ] + field_split => "," + source => "certificate_subject" + } + mutate { + rename => { "CN" => "certificate_common_name"} + rename => { "C" => "certificate_country_code"} + rename => { "O" => "certificate_organization"} + rename => { "OU" => "certificate_organization_unit"} + rename => { "ST" => "certificate_state"} + rename => { "SN" => "certificate_surname"} + rename => { "L" => "certificate_locality"} + rename => { "GN" => "certificate_given_name"} + rename => { "pseudonym" => "certificate_pseudonym"} + rename => { "serialNumber" => "certificate_serial_number"} + rename => { "title" => "certificate_title"} + rename => { "initials" => "certificate_initials"} + convert => [ "certificate_key_length", "integer" ] + convert => [ "certificate_not_valid_after", "integer" ] + convert => [ "certificate_not_valid_before", "integer" ] + } + if [query] == "-" { + mutate { + remove_field => [ "query" ] + } + } + if [san_dns] == "-" { + mutate { + remove_field => [ "san_dns" ] + } + } + if [san_email] == "-" { + mutate { + remove_field => [ "san_email" ] + } + } + if [san_uri] == "-" { + mutate { + remove_field => [ "san_uri" ] + } + } + if [san_ip] == "-" { + mutate { + remove_field => [ "san_ip" ] + } + } + if [certificate_common_name] { + ruby { + code => "event.set('certificate_common_name_length', event.get('certificate_common_name').length)" + } + } + if [issuer_common_name] { + ruby { + code => "event.set('issuer_common_name_length', event.get('issuer_common_name').length)" + } + } + if [certificate_not_valid_after] == "-" { + mutate { + remove_field => [ "certificate_not_valid_after" ] + } + } + if [certificate_not_valid_before] == "-" { + mutate { + remove_field => [ "certificate_not_valid_before" ] + } + } + if [certificate_not_valid_after] and [certificate_not_valid_before] { + ruby { + code => "event.set('certificate_number_days_valid', ((event.get('certificate_not_valid_after') - event.get('certificate_not_valid_before')) / 86400).ceil)" + } + date { + match => [ "certificate_not_valid_after", "UNIX" ] + target => "certificate_not_valid_after" + } + date { + match => [ "certificate_not_valid_before", "UNIX" ] + target => "certificate_not_valid_before" + } + } + mutate { + #add_tag => [ "conf_file_1123"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1124_preprocess_bro_intel.conf b/salt/logstash/conf/pipelines/helix/1124_preprocess_bro_intel.conf new file mode 100644 index 000000000..0f1c53134 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1124_preprocess_bro_intel.conf @@ -0,0 +1,46 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for intel.log from Bro systems +filter { + if [type] == "bro_intel" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "seen.indicator" => "indicator" } + rename => { "seen.indicator_type" => "indicator_type" } + rename => { "seen.where" => "seen_where" } + rename => { "seen.node" => "seen_node" } + #matched + #sources + #fuid + rename => { "file_mime_type" => "mimetype" } + rename => { "file_desc" => "file_description" } + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","indicator","indicator_type","seen_where","seen_node","matched","sources","fuid","mimetype","file_description"] + separator => " " + } + } + + mutate { + #add_tag => [ "conf_file_1124"] + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1125_preprocess_bro_modbus.conf b/salt/logstash/conf/pipelines/helix/1125_preprocess_bro_modbus.conf new file mode 100644 index 000000000..6d6d48ad2 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1125_preprocess_bro_modbus.conf @@ -0,0 +1,49 @@ +# Author: Wes Lambert +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for modbus.log from Bro systems +# +filter { + if [type] == "bro_modbus" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + rename => { "func" => "function" } + #exception + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","function","exception"] + separator => " " + } + } + } +} + +# Parse using grok +#filter { +# if [type] == "bro_modbus" { +# grok { +# match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] +# } + #mutate { + #add_tag => [ "conf_file_1125"] + #} +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1126_preprocess_bro_sip.conf b/salt/logstash/conf/pipelines/helix/1126_preprocess_bro_sip.conf new file mode 100644 index 000000000..0f1cf4c46 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1126_preprocess_bro_sip.conf @@ -0,0 +1,66 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for sip.log from Bro systems +# +filter { + if [type] == "bro_sip" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #trans_depth + #method + #uri + #date + #request_from + #request_to + #response_from + #response_to + #reply_to + #call_id + #seq + #subject + #request_path + #response_path + #user_agent + #status_code + #status_msg + #warning + rename => { "request_body_len" => "request_body_length" } + rename => { "response_body_len" => "response_body_length" } + #content_type + } + } else { + grok { + match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] + } + } + + mutate { + add_tag => [ "conf_file_1126"] + } + } +} +# Parse using csv filter +#filter { +# if [type] == "bro_sip" { +# csv { +# columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","trans_depth","method","uri","date","request_from","request_to","response_from","response_to","reply_to","call_id","seq","subject","request_path","response_path","user_agent","status_code","status_msg","warning","request_body_len","response_body_len","content_type"] +# separator => " " +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1127_preprocess_bro_radius.conf b/salt/logstash/conf/pipelines/helix/1127_preprocess_bro_radius.conf new file mode 100644 index 000000000..732efb23c --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1127_preprocess_bro_radius.conf @@ -0,0 +1,73 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# +# This conf file is based on accepting logs for radius.log from Bro systems +# +filter { + if [type] == "bro_radius" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #username + #mac + #framed_addr + #tunnel_client + #connect_info + rename => { "reply_msg" => "reply_message" } + #result + #ttl + #logged + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","username","mac","framed_addr","tunnel_client","connect_info","reply_message","result","ttl","logged"] + separator => " " + } + if [tunnel_client] == "-" { + mutate { + remove_field => [ "tunnel_client" ] + } + } + + } + # Remove the ttl and framed_addr fields + if [ttl] { + mutate { + remove_field => [ "ttl" ] + } + } + if [framed_addr] { + mutate { + remove_field => [ "framed_addr" ] + } + } + } +} + +# Parse using grok +#filter { +# if [type] == "bro_radius" { +# grok { +# match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] +# } +# mutate { +# #add_tag => [ "conf_file_1127"] +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1128_preprocess_bro_pe.conf b/salt/logstash/conf/pipelines/helix/1128_preprocess_bro_pe.conf new file mode 100644 index 000000000..7770de12d --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1128_preprocess_bro_pe.conf @@ -0,0 +1,46 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# +# This conf file is based on accepting logs for pe.log from Bro systems +# +filter { + if [type] == "bro_pe" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + rename => { "id" => "fuid" } + #machine + #compile_ts + #os + #subsystem + #is_exe + #is_64bit + #uses_aslr + #uses_dep + #uses_code_integrity + #uses_seh + #has_import_table + #has_export_table + #has_cert_table + #has_debug_data + #section_names + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","fuid","machine","compile_ts","os","subsystem","is_exe","is_64bit","uses_aslr","uses_dep","uses_code_integrity","uses_seh","has_import_table","has_export_table","has_cert_table","has_debug_data","section_names"] + separator => " " + } + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1129_preprocess_bro_rfb.conf b/salt/logstash/conf/pipelines/helix/1129_preprocess_bro_rfb.conf new file mode 100644 index 000000000..21ecac78f --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1129_preprocess_bro_rfb.conf @@ -0,0 +1,65 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for rfb.log from Bro systems +# +# Parse using csv filter +filter { + if [type] == "bro_rfb" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #client_major_version + #client_minor_version + #server_major_version + #server_minor_version + #authentication_method + #auth + #share_flag + #desktop_name + #width + #height + } + + mutate { + convert => { "auth" => "string" } + convert => { "share_flag" => "string" } + } + + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","client_major_version","client_minor_version","server_major_version","server_minor_version","authentication_method","auth","share_flag","desktop_name","width","height"] + separator => " " + } + } + } +} + +# Parse using grok +#filter { +# if [type] == "bro_rfb" { +# grok { +# match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] +# } +# mutate { +# #add_tag => [ "conf_file_1129"] +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1130_preprocess_bro_dnp3.conf b/salt/logstash/conf/pipelines/helix/1130_preprocess_bro_dnp3.conf new file mode 100644 index 000000000..a2c10babf --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1130_preprocess_bro_dnp3.conf @@ -0,0 +1,51 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for dnp3.log from Bro systems +# +filter { + if [type] == "bro_dnp3" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #fc_request + #fc_reply + #iin + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fc_request","fc_reply","iin"] + separator => " " + } + } + } +} + +# Parse using grok +#filter { +# if [type] == "bro_dnp3" { +# grok { +# match => [ "message", "(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))\t(?(.*?))$" ] +# } +# mutate { +# #add_tag => [ "conf_file_1130"] +# } +# } +#} diff --git a/salt/logstash/conf/pipelines/helix/1131_preprocess_bro_smb_files.conf b/salt/logstash/conf/pipelines/helix/1131_preprocess_bro_smb_files.conf new file mode 100644 index 000000000..ca6cfe8db --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1131_preprocess_bro_smb_files.conf @@ -0,0 +1,46 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for smb_files.log from Bro systems +# +filter { + if [type] == "bro_smb_files" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #fuid + #action + #path + #name + #size + #prev_name + rename => { "times.modified" => "times_modified" } + rename => { "times.accessed" => "times_accessed" } + rename => { "times.created" => "times_created" } + rename => { "times.changed" => "times_changed" } + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fuid","action","path","name","size","prev_name","times_modified","times_accessed","times_created","times_changed"] + separator => " " + } + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1132_preprocess_bro_smb_mapping.conf b/salt/logstash/conf/pipelines/helix/1132_preprocess_bro_smb_mapping.conf new file mode 100644 index 000000000..84256ed0e --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1132_preprocess_bro_smb_mapping.conf @@ -0,0 +1,40 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for smb_mapping.log from Bro systems +# +filter { + if [type] == "bro_smb_mapping" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #path + #service + #native_file_system + #share_type + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","path","service","native_file_system","share_type"] + separator => " " + } + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1133_preprocess_bro_ntlm.conf b/salt/logstash/conf/pipelines/helix/1133_preprocess_bro_ntlm.conf new file mode 100644 index 000000000..3b5fd6384 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1133_preprocess_bro_ntlm.conf @@ -0,0 +1,50 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks and Wes Lambert +# Last Update: 1/2/2019 +# +# This conf file is based on accepting logs for ntlm.log from Bro systems +# +filter { + if [type] == "bro_ntlm" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #hostname + rename => { "domainname" => "domain_name" } + rename => { "success" => "ntlm_success" } + #status + } + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => [ "timestamp", "uid", "source_ip", "source_port", "destination_ip", "destination_port", "username", "hostname", "domain_name", "server_nb_computer_name", "server_dns_computer_name", "server_tree_name", "ntlm_success"] + separator => " " + } + ruby { + code =>" + hash = event.to_hash.each do |key,value| + if value == '-' + event.remove(key) + end + end" + } + + + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/1134_preprocess_bro_dce_rpc.conf b/salt/logstash/conf/pipelines/helix/1134_preprocess_bro_dce_rpc.conf new file mode 100644 index 000000000..1b0e56a67 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/1134_preprocess_bro_dce_rpc.conf @@ -0,0 +1,54 @@ +# Author: Wes Lambert +# +# Adapted from existing filters provided by Justin Henderson +# +# Updated by: Doug Burks +# Last Update: 2/7/2018 +# +# This conf file is based on accepting logs for dce_rpc.log from Bro systems +# +filter { + if [type] == "bro_dce_rpc" { + # If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok. + if [message] =~ /^{.*}$/ { + json { + source => "message" + } + + mutate { + rename => { "ts" => "timestamp" } + #uid + rename => { "id.orig_h" => "source_ip" } + rename => { "id.orig_p" => "source_port" } + rename => { "id.resp_h" => "destination_ip" } + rename => { "id.resp_p" => "destination_port" } + #rtt + #named_pipe + #endpoint + #operation + } + + #mutate { + #convert => { "rtt" => "float" } + #} + } else { + mutate { + gsub => [ "message", "[\"']", "" ] + } + csv { + columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","rtt","named_pipe","endpoint","operation"] + separator => " " + } + + if [rtt] == "-" { + mutate { + remove_field => [ "rtt" ] + } + } + + #mutate { + #convert => [ "rtt", "float" ] + #} + } + } +} diff --git a/salt/logstash/conf/pipelines/helix/8001_postprocess_common_ip_augmentation.conf b/salt/logstash/conf/pipelines/helix/8001_postprocess_common_ip_augmentation.conf new file mode 100644 index 000000000..d28449da6 --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/8001_postprocess_common_ip_augmentation.conf @@ -0,0 +1,58 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Updated by: Doug Burks +# Last Update: 5/20/2017 + +filter { + if [source_ip] { + if [source_ip] == "-" { + mutate { + replace => { "source_ip" => "0.0.0.0" } + } + } + if [source_ip] =~ "10\." or [source_ip] =~ "192\.168\." or [source_ip] =~ "172\.(1[6-9]|2[0-9]|3[0-1])\." or [source_ip] =~ "fe80::20c:29ff:fe19:f7d" or [source_ip] =~ "::1" { + mutate { + } + } else { + geoip { + source => "[source_ip]" + target => "source_geo" + } + } + if [source_ip] { + mutate { + add_field => { "ips" => "%{source_ip}" } + add_field => { "source_ips" => [ "%{source_ip}" ] } + } + } + } + if [destination_ip] { + if [destination_ip] == "-" { + mutate { + replace => { "destination_ip" => "0.0.0.0" } + } + } + if [destination_ip] =~ "10\." or [destination_ip] =~ "192\.168\." or [destination_ip] =~ "172\.(1[6-9]|2[0-9]|3[0-1])\." or [destination_ip] =~ "239.255.255.250" or [destination_ip] =~ "224\.0\.0\." or [destination_ip] =~ "255.255.255.255" or [destination_ip] =~ "ff02::fb" or [destination_ip] =~ "fe80::20c:29ff:fe19:f7d" or [destination_ip] =~ "224\.0\.1\." { + mutate { + } + } + else { + geoip { + source => "[destination_ip]" + target => "destination_geo" + } + } + } + if [destination_ip] { + mutate { + add_field => { "ips" => "%{destination_ip}" } + add_field => { "destination_ips" => [ "%{destination_ip}" ] } + } + } +} + #if [source_ip] or [destination_ip] { + # mutate { + #add_tag => [ "conf_file_8001"] + # } + #} + diff --git a/salt/logstash/conf/pipelines/helix/9997_output_helix.conf b/salt/logstash/conf/pipelines/helix/9997_output_helix.conf new file mode 100644 index 000000000..5dd0036fe --- /dev/null +++ b/salt/logstash/conf/pipelines/helix/9997_output_helix.conf @@ -0,0 +1,142 @@ +{% set HELIX_API_KEY = salt['pillar.get']('fireeye:helix:api_key', '') %} + +filter { + if [type] =~ /^bro_conn|bro_dns|bro_http|bro_files|bro_ssl|bro_dhcp|bro_x509$/ { + grok { + match => [ + "source_ip", "^%{IPV4:srcipv4}$", + "source_ip", "(?^([0-9A-Fa-f]{0,4}:){2,7}([0-9A-Fa-f]{1,4}$|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4})$)" + ] + } + grok { + match => [ + "destination_ip", "(?^([0-9A-Fa-f]{0,4}:){2,7}([0-9A-Fa-f]{1,4}$|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4})$)", + "destination_ip", "^%{IPV4:dstipv4}$" + ] + } + + geoip { + source => "[source_ip]" + target => "source_geo" + } + geoip { + source => "[destination_ip]" + target => "destination_geo" + } + mutate { + #rename => { "%{[source_geo][country_code]}" => "srccountrycode" } + #rename => { "%{[destination_geo][country_code]}" => "dstcountrycode" } + rename => { "[beat_host][name]" => "sensor" } + copy => { "sensor" => "rawmsghostname" } + rename => { "message" => "rawmsg" } + #rename => { "event_type" => "program" } + copy => { "type" => "class" } + copy => { "class" => "program"} + rename => { "source_port" => "srcport" } + rename => { "destination_port" => "dstport" } + remove_field => ["source_ip", "destination_ip"] + remove_field => ["sensorname", "sensor_name", "service", "source", "tags", "syslog-host"] + remove_field => ["sensor_name", "source_ips", "ips", "destination_ips", "syslog-priority", "syslog-file_name", "syslog-facility"] + } + if "bro_conn" in [class] { + mutate { + #add_field => { "metaclass" => "connection" } + rename => { "original_bytes" => "sentbytes" } + rename => { "respond_bytes" => "rcvdbytes" } + rename => { "connection_state" => "connstate" } + rename => { "uid" => "connectionid" } + rename => { "respond_packets" => "rcvdpackets" } + rename => { "original_packets" => "sentpackets" } + rename => { "respond_ip_bytes" => "rcvdipbytes" } + rename => { "original_ip_bytes" => "sentipbytes" } + rename => { "local_respond" => "local_resp" } + rename => { "local_orig" => "localorig" } + rename => { "missed_bytes" => "missingbytes" } + } + } + if "bro_dns" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "answers" => "answer" } + rename => { "query" => "domain" } + rename => { "query_class" => "queryclass" } + rename => { "query_class_name" => "queryclassname" } + rename => { "query_type" => "querytype" } + rename => { "query_type_name" => "querytypename" } + rename => { "ra" => "recursionavailable" } + rename => { "rd" => "recursiondesired" } + } + } + if "bro_dhcp" in [class] { + mutate{ + #add_field = { "metaclass" => "dhcp"} + rename => { "message_types" => "direction" } + rename => { "lease_time" => "duration" } + } + } + if "bro_files" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "missing_bytes" => "missingbytes" } + rename => { "fuid" => "fileid" } + rename => { "uid" => "connectionid" } + } + } + if "bro_http" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "virtual_host" => "hostname" } + rename => { "status_code" => "statuscode" } + rename => { "status_message" => "statusmsg" } + rename => { "resp_mime_types" => "rcvdmimetype" } + rename => { "resp_fuids" => "rcvdfileid" } + rename => { "response_body_len" => "rcvdbodybytes" } + rename => { "request_body_len" => "sentbodybytes" } + rename => { "uid" => "connectionid" } + rename => { "ts"=> "eventtime" } + rename => { "@timestamp"=> "eventtime" } + } + } + if "bro_ssl" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "status_code" => "statuscode" } + rename => { "status_message" => "statusmsg" } + rename => { "resp_mime_types" => "rcvdmimetype" } + rename => { "resp_fuids" => "rcvdfileid" } + rename => { "response_body_len" => "rcvdbodybytes" } + rename => { "request_body_len" => "sentbodybytes" } + } + } + if "bro_weird" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "name" => "eventname" } + } + } + if "bro_x509" in [class] { + mutate{ + #add_field = { "metaclass" => "dns"} + rename => { "certificate_common_name" => "certname" } + rename => { "certificate_subject" => "certsubject" } + rename => { "issuer_common_name" => "issuer" } + rename => { "certificate_issuer" => "issuersubject" } + rename => { "certificate_not_valid_before" => "issuetime" } + rename => { "certificate_key_type" => "cert_type" } + } + } + } +} + +output { + if [type] =~ /^bro_conn|bro_dns|bro_http|bro_files|bro_ssl|bro_dhcp|bro_x509$/ { + http { + url => "https://helix-integrations.cloud.aws.apps.fireeye.com/api/upload" + http_method => post + http_compression => true + socket_timeout => 60 + headers => ["Authorization","{{ HELIX_API_KEY }}"] + format => json_batch + } + } +} diff --git a/salt/logstash/conf/pipelines/master/0010_input_hhbeats.conf b/salt/logstash/conf/pipelines/master/0010_input_hhbeats.conf new file mode 100644 index 000000000..6b7667f5c --- /dev/null +++ b/salt/logstash/conf/pipelines/master/0010_input_hhbeats.conf @@ -0,0 +1,40 @@ +input { + beats { + port => "5644" + ssl => true + ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"] + ssl_certificate => "/usr/share/logstash/filebeat.crt" + ssl_key => "/usr/share/logstash/filebeat.key" + tags => [ "beat" ] + } +} +filter { + if [type] == "ids" or [type] =~ "bro" { + mutate { + rename => { "host" => "beat_host" } + remove_tag => ["beat"] + add_field => { "sensor_name" => "%{[beat][name]}" } + add_field => { "syslog-host_from" => "%{[beat][name]}" } + remove_field => [ "beat", "prospector", "input", "offset" ] + } + } + if [type] =~ "ossec" { + mutate { + rename => { "host" => "beat_host" } + remove_tag => ["beat"] + add_field => { "syslog-host_from" => "%{[beat][name]}" } + remove_field => [ "beat", "prospector", "input", "offset" ] + } + } + if [type] == "osquery" { + mutate { + rename => { "host" => "beat_host" } + remove_tag => ["beat"] + add_tag => ["osquery"] + } + json { + source => "message" + target => "osquery" + } + } +} diff --git a/salt/logstash/files/dynamic/9999_output_redis.conf b/salt/logstash/conf/pipelines/master/templates/9999_output_redis.conf similarity index 100% rename from salt/logstash/files/dynamic/9999_output_redis.conf rename to salt/logstash/conf/pipelines/master/templates/9999_output_redis.conf diff --git a/salt/logstash/conf/pipelines/search/1000_preprocess_log_elapsed.conf b/salt/logstash/conf/pipelines/search/1000_preprocess_log_elapsed.conf new file mode 100644 index 000000000..d098eb11a --- /dev/null +++ b/salt/logstash/conf/pipelines/search/1000_preprocess_log_elapsed.conf @@ -0,0 +1,13 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Email: justin@hasecuritysolution.com +# Last Update: 12/9/2016 + +filter { + ruby { + code => "event.set('task_start', Time.now.to_f)" + } + mutate { + #add_tag => [ "conf_file_1000"] + } +} diff --git a/salt/logstash/conf/pipelines/search/1001_preprocess_syslogng.conf b/salt/logstash/conf/pipelines/search/1001_preprocess_syslogng.conf new file mode 100644 index 000000000..84bce8802 --- /dev/null +++ b/salt/logstash/conf/pipelines/search/1001_preprocess_syslogng.conf @@ -0,0 +1,33 @@ +# Updated by: Doug Burks and Wes Lambert +# Last Update: 10/30/2018 + +filter { + if "syslogng" in [tags] { + mutate { + rename => { "MESSAGE" => "message" } + rename => { "PROGRAM" => "type" } + rename => { "FACILITY" => "syslog-facility" } + rename => { "FILE_NAME" => "syslog-file_name" } + rename => { "HOST" => "syslog-host" } + rename => { "HOST_FROM" => "syslog-host_from" } + rename => { "LEGACY_MSGHDR" => "syslog-legacy_msghdr" } + rename => { "PID" => "syslog-pid" } + rename => { "PRIORITY" => "syslog-priority" } + rename => { "SOURCEIP" => "syslog-sourceip" } + rename => { "TAGS" => "syslog-tags" } + lowercase => [ "syslog-host_from" ] + remove_field => [ "ISODATE" ] + remove_field => [ "SEQNUM" ] + #add_tag => [ "conf_file_1001"] + } + if "bro_" in [type] { + mutate { + add_tag => [ "bro" ] + } + } else if [type] !~ /ossec.*|snort/ and "firewall" not in [tags] { + mutate { + add_tag => [ "syslog" ] + } + } + } +} diff --git a/salt/logstash/conf/pipelines/search/1002_preprocess_json.conf b/salt/logstash/conf/pipelines/search/1002_preprocess_json.conf new file mode 100644 index 000000000..ea7c677da --- /dev/null +++ b/salt/logstash/conf/pipelines/search/1002_preprocess_json.conf @@ -0,0 +1,18 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Email: justin@hasecuritysolution.com +# Last Update: 12/9/2016 + +filter { + if "json" in [tags]{ + json { + source => "message" + } + mutate { + remove_tag => [ "json" ] + } + mutate { + #add_tag => [ "conf_file_1002"] + } + } +} diff --git a/salt/logstash/conf/pipelines/search/1004_preprocess_syslog_types.conf b/salt/logstash/conf/pipelines/search/1004_preprocess_syslog_types.conf new file mode 100644 index 000000000..243abcc15 --- /dev/null +++ b/salt/logstash/conf/pipelines/search/1004_preprocess_syslog_types.conf @@ -0,0 +1,19 @@ +filter { + if "syslog" in [tags] { + if [host] == "172.16.1.1" { + mutate { + add_field => { "type" => "fortinet" } + add_tag => [ "firewall" ] + } + } + if [host] == "10.0.0.101" { + mutate { + add_field => { "type" => "brocade" } + add_tag => [ "switch" ] + } + } + mutate { + #add_tag => [ "conf_file_1004"] + } + } +} diff --git a/salt/logstash/conf/pipelines/search/1026_preprocess_dhcp.conf b/salt/logstash/conf/pipelines/search/1026_preprocess_dhcp.conf new file mode 100644 index 000000000..2f893cf7a --- /dev/null +++ b/salt/logstash/conf/pipelines/search/1026_preprocess_dhcp.conf @@ -0,0 +1,140 @@ +# Author: Justin Henderson +# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics +# Email: justin@hasecuritysolutions.com +# Last Update: 12/9/2016 +# This conf file is based on accepting logs for DHCP. It is currently based on Windows DHCP only. +filter { + if [type] == "dhcp" { + mutate { + add_field => { "Hostname" => "%{host}" } + } + mutate { + strip => "message" + } + # This is the initial parsing of the log + grok { + # Server 2008+ + match => { "message" => "%{DATA:id},%{DATE_US:date},(?