mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-16 22:12:48 +01:00
This commit is contained in:
40
salt/logstash/pipelines/config/0010_input_hhbeats.conf
Normal file
40
salt/logstash/pipelines/config/0010_input_hhbeats.conf
Normal file
@@ -0,0 +1,40 @@
|
||||
input {
|
||||
beats {
|
||||
port => "5644"
|
||||
ssl => true
|
||||
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
|
||||
ssl_certificate => "/usr/share/logstash/filebeat.crt"
|
||||
ssl_key => "/usr/share/logstash/filebeat.key"
|
||||
tags => [ "beat" ]
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if [type] == "ids" or [type] =~ "bro" {
|
||||
mutate {
|
||||
rename => { "host" => "beat_host" }
|
||||
remove_tag => ["beat"]
|
||||
add_field => { "sensor_name" => "%{[beat][name]}" }
|
||||
add_field => { "syslog-host_from" => "%{[beat][name]}" }
|
||||
remove_field => [ "beat", "prospector", "input", "offset" ]
|
||||
}
|
||||
}
|
||||
if [type] =~ "ossec" {
|
||||
mutate {
|
||||
rename => { "host" => "beat_host" }
|
||||
remove_tag => ["beat"]
|
||||
add_field => { "syslog-host_from" => "%{[beat][name]}" }
|
||||
remove_field => [ "beat", "prospector", "input", "offset" ]
|
||||
}
|
||||
}
|
||||
if [type] == "osquery" {
|
||||
mutate {
|
||||
rename => { "host" => "beat_host" }
|
||||
remove_tag => ["beat"]
|
||||
add_tag => ["osquery"]
|
||||
}
|
||||
json {
|
||||
source => "message"
|
||||
target => "osquery"
|
||||
}
|
||||
}
|
||||
}
|
||||
14
salt/logstash/pipelines/config/0900_input_redis.conf.jinja
Normal file
14
salt/logstash/pipelines/config/0900_input_redis.conf.jinja
Normal file
@@ -0,0 +1,14 @@
|
||||
{%- if grains.role == 'so-heavynode' %}
|
||||
{%- set master = salt['pillar.get']('node:mainip', '') %}
|
||||
{%- else %}
|
||||
{%- set master = salt['pillar.get']('static:masterip', '') %}
|
||||
{% endif -%}
|
||||
input {
|
||||
redis {
|
||||
host => '{{ master }}'
|
||||
data_type => 'list'
|
||||
key => 'logstash:unparsed'
|
||||
type => 'redis-input'
|
||||
# threads => 1
|
||||
}
|
||||
}
|
||||
77
salt/logstash/pipelines/config/1100_preprocess_bro_conn.conf
Normal file
77
salt/logstash/pipelines/config/1100_preprocess_bro_conn.conf
Normal file
@@ -0,0 +1,77 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
#
|
||||
# This conf file is based on accepting logs for conn.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_conn" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "proto" => "protocol" }
|
||||
#service
|
||||
#duration
|
||||
rename => { "orig_bytes" => "original_bytes" }
|
||||
rename => { "resp_bytes" => "respond_bytes" }
|
||||
rename => { "conn_state" => "connection_state" }
|
||||
#local_orig
|
||||
rename => { "local_resp" => "local_respond" }
|
||||
#missed_bytes
|
||||
#history
|
||||
rename => { "orig_pkts" => "original_packets" }
|
||||
rename => { "orig_ip_bytes" => "original_ip_bytes" }
|
||||
rename => { "resp_pkts" => "respond_packets" }
|
||||
rename => { "resp_ip_bytes" => "respond_ip_bytes" }
|
||||
#tunnel_parents
|
||||
rename => { "orig_cc" => "original_country_code" }
|
||||
rename => { "resp_cc" => "respond_country_code" }
|
||||
rename => { "sensorname" => "sensor_name" }
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","service","duration","original_bytes","respond_bytes","connection_state","local_orig","local_respond","missed_bytes","history","original_packets","original_ip_bytes","respond_packets","respond_ip_bytes","tunnel_parents","sensor_name"]
|
||||
|
||||
# If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
translate {
|
||||
field => "connection_state"
|
||||
|
||||
destination => "connection_state_description"
|
||||
|
||||
dictionary => [
|
||||
"S0", "Connection attempt seen, no reply",
|
||||
"S1", "Connection established, not terminated",
|
||||
"S2", "Connection established and close attempt by originator seen (but no reply from responder)",
|
||||
"S3", "Connection established and close attempt by responder seen (but no reply from originator)",
|
||||
"SF", "Normal SYN/FIN completion",
|
||||
"REJ", "Connection attempt rejected",
|
||||
"RSTO", "Connection established, originator aborted (sent a RST)",
|
||||
"RSTR", "Established, responder aborted",
|
||||
"RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
|
||||
"RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
|
||||
"SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
|
||||
"SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
|
||||
"OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
|
||||
]
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1100"]
|
||||
}
|
||||
}
|
||||
}
|
||||
56
salt/logstash/pipelines/config/1101_preprocess_bro_dhcp.conf
Normal file
56
salt/logstash/pipelines/config/1101_preprocess_bro_dhcp.conf
Normal file
@@ -0,0 +1,56 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks and Wes Lambert
|
||||
# Last Update: 1/3/2019
|
||||
#
|
||||
# This conf file is based on accepting logs for dhcp.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_dhcp" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#mac
|
||||
#assigned_ip
|
||||
#lease_time
|
||||
rename => { "trans_id" => "transaction_id" }
|
||||
# new dhcp log format
|
||||
rename => { "assigned_addr" => "assigned_ip" }
|
||||
rename => { "client_addr" => "source_ip" }
|
||||
rename => { "server_addr" => "destination_ip" }
|
||||
rename => { "requested_addr" => "requested_ip" }
|
||||
rename => { "domain" => "domain_name" }
|
||||
rename => { "host_name" => "hostname" }
|
||||
rename => { "msg_types" => "message_types" }
|
||||
rename => { "uids" => "uid" }
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
# Bro logs in TSV format
|
||||
csv {
|
||||
columns => [ "timestamp", "uid", "source_ip", "destination_ip", "mac", "hostname", "client_fqdn", "domain_name", "requested_ip", "assigned_ip", "lease_time","client_message", "server_message", "message_types", "duration" ]
|
||||
separator => " "
|
||||
}
|
||||
# Remove fields with empty values (-) to prevent field data type conflict
|
||||
ruby {
|
||||
code =>"
|
||||
hash = event.to_hash.each do |key,value|
|
||||
if value == '-'
|
||||
event.remove(key)
|
||||
end
|
||||
end"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
74
salt/logstash/pipelines/config/1102_preprocess_bro_dns.conf
Normal file
74
salt/logstash/pipelines/config/1102_preprocess_bro_dns.conf
Normal file
@@ -0,0 +1,74 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for dns.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_dns" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "proto" => "protocol" }
|
||||
rename => { "trans_id" => "transaction_id" }
|
||||
#rtt field
|
||||
#query field
|
||||
rename => { "qclass" => "query_class" }
|
||||
rename => { "qclass_name" => "query_class_name" }
|
||||
rename => { "qtype" => "query_type" }
|
||||
rename => { "qtype_name" => "query_type_name" }
|
||||
#rcode
|
||||
#rcode_name
|
||||
rename => { "AA" => "aa" }
|
||||
rename => { "TC" => "tc" }
|
||||
rename => { "RD" => "rd" }
|
||||
rename => { "RA" => "ra" }
|
||||
rename => { "Z" => "z" }
|
||||
#answers
|
||||
rename => { "TTLs" => "ttls" }
|
||||
#rejected
|
||||
}
|
||||
} else {
|
||||
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","transaction_id","rtt","query","query_class","query_class_name","query_type","query_type_name","rcode","rcode_name","aa","tc","rd","ra","z","answers","ttls","rejected"]
|
||||
|
||||
#If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, insert a literal <tab> in between the two quotes on your logstash system, use a text editor like nano that doesn't convert tabs to spaces.
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
add_tag => [ "dns" ]
|
||||
}
|
||||
if [ttls] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "ttls" ]
|
||||
}
|
||||
}
|
||||
if [rtt] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "rtt" ]
|
||||
}
|
||||
}
|
||||
#mutate {
|
||||
#convert => [ "rtt", "float" ]
|
||||
#}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1102"]
|
||||
}
|
||||
}
|
||||
}
|
||||
42
salt/logstash/pipelines/config/1103_preprocess_bro_dpd.conf
Normal file
42
salt/logstash/pipelines/config/1103_preprocess_bro_dpd.conf
Normal file
@@ -0,0 +1,42 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for dpd.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_dpd" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "proto" => "protocol" }
|
||||
#analyzer
|
||||
#failure_reason
|
||||
}
|
||||
} else {
|
||||
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","analyzer","failure_reason"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1103"]
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for files.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_files" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#fuid
|
||||
rename => { "tx_hosts" => "file_ip" }
|
||||
rename => { "rx_hosts" => "destination_ip" }
|
||||
rename => { "conn_uids" => "connection_uids" }
|
||||
#source field
|
||||
#depth field
|
||||
rename => { "analyzers" => "analyzer" }
|
||||
rename => { "mime_type" => "mimetype" }
|
||||
rename => { "filename" => "file_name" }
|
||||
#duration
|
||||
#local_orig
|
||||
#is_orig
|
||||
#seen_bytes
|
||||
#total_bytes
|
||||
#missing_bytes
|
||||
#overflow_bytes
|
||||
rename => { "timedout" => "timed_out" }
|
||||
#parent_fuid
|
||||
#md5
|
||||
#sha1
|
||||
#sha256
|
||||
#extracted
|
||||
#extracted_cutoff
|
||||
#extracted_size
|
||||
}
|
||||
} else {
|
||||
|
||||
csv {
|
||||
columns => ["timestamp","fuid","file_ip","destination_ip","connection_uids","source","depth","analyzer","mimetype","file_name","duration","local_orig","is_orig","seen_bytes","total_bytes","missing_bytes","overflow_bytes","timed_out","parent_fuid","md5","sha1","sha256","extracted","extracted_cutoff","extracted_size"]
|
||||
separator => " "
|
||||
}
|
||||
if [destination_ip] =~ /,/ {
|
||||
mutate {
|
||||
split => { "destination_ip" => "," }
|
||||
}
|
||||
}
|
||||
if [file_ip] =~ /,/ {
|
||||
mutate {
|
||||
split => { "file_ip" => "," }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1104"]
|
||||
}
|
||||
}
|
||||
}
|
||||
56
salt/logstash/pipelines/config/1105_preprocess_bro_ftp.conf
Normal file
56
salt/logstash/pipelines/config/1105_preprocess_bro_ftp.conf
Normal file
@@ -0,0 +1,56 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for ftp.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_ftp" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "user" => "username" }
|
||||
#password
|
||||
rename => { "command" => "ftp_command" }
|
||||
rename => { "arg" => "ftp_argument" }
|
||||
rename => { "mime_type" => "mimetype" }
|
||||
#file_size
|
||||
#reply_code
|
||||
rename => { "reply_msg" => "reply_message" }
|
||||
rename => { "data_channel.passive" => "data_channel_passive" }
|
||||
rename => { "data_channel.orig_h" => "data_channel_source_ip" }
|
||||
rename => { "data_channel.resp_h" => "data_channel_destination_ip" }
|
||||
rename => { "data_channel.resp_p" => "data_channel_destination_port" }
|
||||
#fuid
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "reply" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","username","password","ftp_command","ftp_argument","mimetype","file_size","reply_code","reply_message","data_channel_passive","data_channel_source_ip","data_channel_destination_ip","data_channel_destination_port","fuid"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1105"]
|
||||
}
|
||||
}
|
||||
}
|
||||
77
salt/logstash/pipelines/config/1106_preprocess_bro_http.conf
Normal file
77
salt/logstash/pipelines/config/1106_preprocess_bro_http.conf
Normal file
@@ -0,0 +1,77 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
#
|
||||
# This conf file is based on accepting logs for http.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_http" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
|
||||
# Rename logstash tags field to avoid being overwritten by Bro's http tags field
|
||||
mutate {
|
||||
rename => { "tags" => "tags-orig" }
|
||||
}
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#trans_depth
|
||||
#method
|
||||
rename => { "host" => "virtual_host" }
|
||||
#uri
|
||||
#referrer
|
||||
#version
|
||||
#convert => { "version" => "string" }
|
||||
rename => { "user_agent" => "useragent" }
|
||||
#origin
|
||||
rename => { "request_body_len" => "request_body_length" }
|
||||
rename => { "response_body_len" => "response_body_length" }
|
||||
#status_code
|
||||
#status_message
|
||||
rename => { "status_msg" => "status_message" }
|
||||
#info_code
|
||||
rename => { "info_msg" => "info_message" }
|
||||
#tags
|
||||
# Rename http tags field to http-tags
|
||||
rename => { "tags" => "http-tags" }
|
||||
# Rename logstash tags field to tags
|
||||
rename => { "tags-orig" => "tags" }
|
||||
#username
|
||||
#password
|
||||
#proxied
|
||||
#orig_fuids
|
||||
#orig_filenames
|
||||
#orig_mime_types
|
||||
#resp_fuids
|
||||
#resp_filenames
|
||||
#resp_mime_types
|
||||
}
|
||||
if [http-tags] {
|
||||
mutate {
|
||||
remove_field => [ "http-tags" ]
|
||||
}
|
||||
}
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<trans_depth>(.*?))\t(?<method>(.*?))\t(?<virtual_host>(.*?))\t(?<uri>(.*?))\t(?<referrer>(.*?))\t(?<version>(.*?))\t(?<useragent>(.*?))\t(?<origin>(.*?))\t(?<request_body_length>(.*?))\t(?<response_body_length>(.*?))\t(?<status_code>(.*?))\t(?<status_message>(.*?))\t(?<info_code>(.*?))\t(?<info_message>(.*?))\t(?<tags>(.*))\t(?<username>(.*))\t(?<password>(.*))\t(?<proxied>(.*))\t(?<orig_fuids>(.*))\t(?<orig_filenames>(.*?))\t(?<orig_mime_types>(.*))\t(?<resp_fuids>(.*))\t(?<resp_filenames>(.*?))\t(?<resp_mime_types>(.*))" ]
|
||||
}
|
||||
}
|
||||
|
||||
if [useragent] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "useragent" ]
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1106"]
|
||||
}
|
||||
}
|
||||
}
|
||||
46
salt/logstash/pipelines/config/1107_preprocess_bro_irc.conf
Normal file
46
salt/logstash/pipelines/config/1107_preprocess_bro_irc.conf
Normal file
@@ -0,0 +1,46 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for irc.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_irc" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#nick
|
||||
rename => { "user" => "irc_username" }
|
||||
rename => { "command" => "irc_command" }
|
||||
#value
|
||||
rename => { "addl" => "additional_info" }
|
||||
#dcc_file_name
|
||||
#dcc_file_size
|
||||
#dcc_mime_type
|
||||
#fuid
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","nick","irc_username","irc_command","value","additional_info","dcc_file_name","dcc_file_size","dcc_mime_type","fuid"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1107"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for kerberos.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_kerberos" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#request_type
|
||||
#client
|
||||
#service
|
||||
rename => { "success" => "kerberos_success" }
|
||||
rename => { "error_msg" => "error_message" }
|
||||
rename => { "from" => "valid_from" }
|
||||
rename => { "till" => "valid_till" }
|
||||
#cipher
|
||||
#forwardable
|
||||
#renewable
|
||||
rename => { "client_cert_subject" => "client_certificate_subject" }
|
||||
rename => { "client_cert_fuid" => "client_certificate_fuid" }
|
||||
rename => { "server_cert_subject" => "server_certificate_subject" }
|
||||
rename => { "server_cert_fuid" => "server_certificate_fuid" }
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "kerberos_success" => "string" }
|
||||
convert => { "renewable" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","request_type","client","service","kerberos_success","error_message","valid_from","valid_till","cipher","forwardable","renewable","client_certificate_subject","client_certificate_fuid","server_certificate_subject","server_certificate_fuid"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1108"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,56 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for notice.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_notice" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#fuid
|
||||
rename => { "mime" => "file_mime_type" }
|
||||
rename => { "desc" => "file_description" }
|
||||
rename => { "proto" => "protocol" }
|
||||
rename => { "note" => "note" }
|
||||
rename => { "msg" => "msg" }
|
||||
rename => { "sub" => "sub_msg" }
|
||||
rename => { "src" => "source_ip" }
|
||||
rename => { "dst" => "destination_ip" }
|
||||
#p
|
||||
#n
|
||||
rename => { "peer_descr" => "peer_description" }
|
||||
rename => { "actions" => "action" }
|
||||
#suppress_for
|
||||
#destination_country_code
|
||||
#destination_region
|
||||
#destination_city
|
||||
#destination_latitude
|
||||
#destination_longitude
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fuid","file_mime_type","file_description","protocol","note","msg","sub_msg","source_ip","destination_ip","p","n","peer_description","action","suppress_for","destination_country_code","destination_region","destination_city","destination_latitude","destination_longitude"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1109"]
|
||||
}
|
||||
}
|
||||
}
|
||||
52
salt/logstash/pipelines/config/1110_preprocess_bro_rdp.conf
Normal file
52
salt/logstash/pipelines/config/1110_preprocess_bro_rdp.conf
Normal file
@@ -0,0 +1,52 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
#
|
||||
# This conf file is based on accepting logs for rdp.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_rdp" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#cookie
|
||||
#result
|
||||
#security_protocol
|
||||
#client_channels
|
||||
#keyboard_layout
|
||||
#client_build
|
||||
#client_name
|
||||
rename => { "client_dig_product_id" => "client_digital_product_id" }
|
||||
#desktop_width
|
||||
#desktop_height
|
||||
#requested_color_depth
|
||||
rename => { "cert_type" => "certificate_type" }
|
||||
rename => { "cert_count" => "certificate_count" }
|
||||
rename => { "cert_permanent" => "certificate_permanent" }
|
||||
#encryption_level
|
||||
#encryption_method
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","cookie","result","security_protocol","client_channels","keyboard_layout","client_build","client_name","client_digital_product_id","desktop_width","desktop_height","requested_color_depth","certificate_type","certificate_count","certificate_permanent","encryption_level","encryption_method"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1110"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for signatures.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_signatures" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#note
|
||||
rename => { "sig_id" => "signature_id" }
|
||||
rename => { "event_msg" => "event_message" }
|
||||
rename => { "sub_msg" => "sub_message" }
|
||||
rename => { "sig_count" => "signature_count" }
|
||||
#host_count
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","note","signature_id","event_message","sub_message","signature_count","host_count"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1111"]
|
||||
}
|
||||
}
|
||||
}
|
||||
65
salt/logstash/pipelines/config/1112_preprocess_bro_smtp.conf
Normal file
65
salt/logstash/pipelines/config/1112_preprocess_bro_smtp.conf
Normal file
@@ -0,0 +1,65 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for smtp.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_smtp" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#trans_depth
|
||||
#helo
|
||||
rename => { "mailfrom" => "mail_from" }
|
||||
rename => { "rcptto" => "recipient_to" }
|
||||
rename => { "date" => "mail_date" }
|
||||
#from
|
||||
#to
|
||||
#cc
|
||||
#reply_to
|
||||
rename => { "msg_id" => "message_id" }
|
||||
#in_reply_to
|
||||
#subject
|
||||
#x_originating_ip
|
||||
#first_received
|
||||
#second_received
|
||||
#last_reply
|
||||
#path
|
||||
rename => { "user_agent" => "useragent" }
|
||||
#tls
|
||||
#fuids
|
||||
#is_webmail
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "tls" => "string" }
|
||||
convert => { "is_webmail" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<trans_depth>(.*?))\t(?<helo>(.*?))\t(?<mail_from>(.*?))\t(?<recipient_to>(.*?))\t(?<mail_date>(.*?))\t(?<from>(.*?))\t(?<to>(.*?))\t(?<cc>(.*?))\t(?<reply_to>(.*?))\t(?<message_id>(.*?))\t(?<in_reply_to>(.*?))\t(?<subject>(.*?))\t(?<x_originating_ip>(.*?))\t(?<first_received>(.*))\t(?<second_received>(.*))\t(?<last_reply>(.*))\t(?<path>(.*))\t(?<useragent>(.*))\t(?<tls>(.*))\t(?<fuids>(.*))\t(?<is_webmail>(.*))" ]
|
||||
}
|
||||
}
|
||||
|
||||
if [useragent] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "useragent" ]
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1112"]
|
||||
}
|
||||
}
|
||||
}
|
||||
47
salt/logstash/pipelines/config/1113_preprocess_bro_snmp.conf
Normal file
47
salt/logstash/pipelines/config/1113_preprocess_bro_snmp.conf
Normal file
@@ -0,0 +1,47 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for snmp.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_snmp" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#duration
|
||||
#version
|
||||
#convert => { "version" => "string" }
|
||||
#community
|
||||
#get_requests
|
||||
#get_bulk_requests
|
||||
#get_responses
|
||||
#set_requests
|
||||
#display_string
|
||||
#up_since
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","duration","version","community","get_requests","get_bulk_requests","get_responses","set_requests","display_string","up_since"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1113"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for software.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_software" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "host" => "source_ip" }
|
||||
rename => { "host_p" => "source_port" }
|
||||
#software_type
|
||||
#name
|
||||
rename => { "version.major" => "version_major" }
|
||||
rename => { "version.minor" => "version_minor" }
|
||||
rename => { "version.minor2" => "version_minor2" }
|
||||
rename => { "version.minor3" => "version_minor3" }
|
||||
rename => { "version.addl" => "version_additional_info" }
|
||||
#unparsed_version
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "version_major" => "string" }
|
||||
convert => { "version_minor" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","source_ip","source_port","software_type","name","version_major","version_minor","version_minor2","version_minor3","version_additional_info","unparsed_version"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1114"]
|
||||
}
|
||||
}
|
||||
}
|
||||
66
salt/logstash/pipelines/config/1115_preprocess_bro_ssh.conf
Normal file
66
salt/logstash/pipelines/config/1115_preprocess_bro_ssh.conf
Normal file
@@ -0,0 +1,66 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks and Wes Lambert
|
||||
# Last Update: 10/30/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for ssh.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_ssh" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#version
|
||||
#convert => { "version" => "string" }
|
||||
rename => { "auth_success" => "authentication_success" }
|
||||
rename => { "auth_attempts" => "authentication_attempts" }
|
||||
#direction
|
||||
#client
|
||||
#server
|
||||
rename => { "cipher_alg" => "cipher_algorithm" }
|
||||
rename => { "compression_alg" => "compression_algorithm" }
|
||||
rename => { "cshka" => "client_host_key_algorithms" }
|
||||
rename => { "host_key_alg" => "host_key_algorithm" }
|
||||
rename => { "hasshAlgorithms" => "hassh_algorithms" }
|
||||
rename => { "hasshServer" => "hassh_server" }
|
||||
rename => { "hasshServerAlgorithms" => "hassh_server_algorithms" }
|
||||
rename => { "hasshVersion" => "hassh_version" }
|
||||
rename => { "kex_alg" => "kex_algorithm" }
|
||||
rename => { "mac_alg" => "mac_algorithm" }
|
||||
rename => { "sshka" => "server_host_key_algorithms" }
|
||||
#host_key
|
||||
#destination_country_code
|
||||
#destination_region
|
||||
#destination_city
|
||||
#destination_latitude
|
||||
#destination_longitude
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "authentication_success" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","authentication_success","authentication_attempts","direction","client","server","cipher_algorithm","mac_algorithm","compression_algorithm","kex_algorithm","host_key_algorithm","host_key","destination_country_code","destination_region","destination_city","destination_latitude","destination_longitude","hassh_version","hassh","hassh_server","client_host_key_algorithms","hassh_algorithms","server_host_key_algorithms","hassh_server_algorithms"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1115"]
|
||||
}
|
||||
}
|
||||
}
|
||||
186
salt/logstash/pipelines/config/1116_preprocess_bro_ssl.conf
Normal file
186
salt/logstash/pipelines/config/1116_preprocess_bro_ssl.conf
Normal file
@@ -0,0 +1,186 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 10/30/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for ssl.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_ssl" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#version
|
||||
#convert => { "version" => "string" }
|
||||
#cipher
|
||||
#curve
|
||||
#server_name
|
||||
#resumed
|
||||
#last_alert
|
||||
#next_protocol
|
||||
#established
|
||||
rename => { "cert_chain_fuids" => "certificate_chain_fuids" }
|
||||
rename => { "client_cert_chain_fuids" => "client_certificate_chain_fuids" }
|
||||
rename => { "subject" => "certificate_subject" }
|
||||
rename => { "issuer" => "certificate_issuer" }
|
||||
#client_subject
|
||||
#client_issuer
|
||||
#validation_status
|
||||
#ja3
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","cipher","curve","server_name","resumed","last_alert","next_protocol","established","certificate_chain_fuids","client_certificate_chain_fuids","certificate_subject","certificate_issuer","client_subject","client_issuer","validation_status","ja3","ja3s"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
gsub => [ "subject", "\\\\,", "|" ]
|
||||
}
|
||||
kv {
|
||||
include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "DC", "GN", "pseudonym", "serialNumber", "title", "initials" ]
|
||||
field_split => ","
|
||||
source => "certificate_issuer"
|
||||
}
|
||||
mutate {
|
||||
rename => { "CN" => "issuer_common_name"}
|
||||
rename => { "C" => "issuer_country_code"}
|
||||
rename => { "O" => "issuer_organization"}
|
||||
rename => { "OU" => "issuer_organization_unit"}
|
||||
rename => { "ST" => "issuer_state"}
|
||||
rename => { "SN" => "issuer_surname"}
|
||||
rename => { "L" => "issuer_locality"}
|
||||
rename => { "DC" => "issuer_distinguished_name"}
|
||||
rename => { "GN" => "issuer_given_name"}
|
||||
rename => { "pseudonym" => "issuer_pseudonym"}
|
||||
rename => { "serialNumber" => "issuer_serial_number"}
|
||||
rename => { "title" => "issuer_title"}
|
||||
rename => { "initials" => "issuer_initials"}
|
||||
}
|
||||
kv {
|
||||
include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "GN", "pseudonym", "serialNumber", "title", "initials" ]
|
||||
field_split => ","
|
||||
source => "certificate_subject"
|
||||
}
|
||||
mutate {
|
||||
rename => { "CN" => "certificate_common_name"}
|
||||
rename => { "C" => "certificate_country_code"}
|
||||
rename => { "O" => "certificate_organization"}
|
||||
rename => { "OU" => "certificate_organization_unit"}
|
||||
rename => { "ST" => "certificate_state"}
|
||||
rename => { "SN" => "certificate_surname"}
|
||||
rename => { "L" => "certificate_locality"}
|
||||
rename => { "GN" => "certificate_given_name"}
|
||||
rename => { "pseudonym" => "certificate_pseudonym"}
|
||||
rename => { "serialNumber" => "certificate_serial_number"}
|
||||
rename => { "title" => "certificate_title"}
|
||||
rename => { "initials" => "certificate_initials"}
|
||||
}
|
||||
if [certificate_subject] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "certificate_subject" ]
|
||||
}
|
||||
}
|
||||
if [certificate_issuer] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "certificate_issuer" ]
|
||||
}
|
||||
}
|
||||
if [certificate_common_name] {
|
||||
ruby {
|
||||
code => "event.set('certificate_common_name_length', event.get('certificate_common_name').length)"
|
||||
}
|
||||
}
|
||||
if [issuer_common_name] {
|
||||
ruby {
|
||||
code => "event.set('issuer_common_name_length', event.get('issuer_common_name').length)"
|
||||
}
|
||||
}
|
||||
if [server_name] {
|
||||
if [server_name] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "server_name" ]
|
||||
}
|
||||
} else {
|
||||
ruby {
|
||||
code => "event.set('server_name_length', event.get('server_name').length)"
|
||||
}
|
||||
}
|
||||
}
|
||||
if [certificate_chain_fuids] {
|
||||
if [certificate_chain_fuids] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "certificate_chain_fuids" ]
|
||||
}
|
||||
} else {
|
||||
ruby {
|
||||
code => "event.set('certificate_chain_count', event.get('certificate_chain_fuids').count(',') + 1)"
|
||||
}
|
||||
mutate {
|
||||
convert => [ "certificate_chain_length", "integer" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
if [client_certificate_chain_fuids] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "client_certificate_chain_fuids" ]
|
||||
}
|
||||
}
|
||||
if [client_issuer] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "client_issuer" ]
|
||||
}
|
||||
}
|
||||
if [client_subject] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "client_subject" ]
|
||||
}
|
||||
}
|
||||
if [curve] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "curve" ]
|
||||
}
|
||||
}
|
||||
if [issuer] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "issuer" ]
|
||||
}
|
||||
}
|
||||
if [query] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "query" ]
|
||||
}
|
||||
}
|
||||
if [subject] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "subject" ]
|
||||
}
|
||||
}
|
||||
if [validation_status] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "validation_status" ]
|
||||
}
|
||||
}
|
||||
if [ja3] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "ja3" ]
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1116"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for syslog.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_syslog" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "proto" => "protocol" }
|
||||
#facility
|
||||
#severity
|
||||
#message
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","protocol","facility","severity","message"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1117"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for tunnel.log from Bro systems
|
||||
# Security Onion syslog-ng.conf sets type to "bro_tunnels"
|
||||
filter {
|
||||
if [type] == "bro_tunnels" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#tunnel_type
|
||||
#action
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","tunnel_type","action"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1118"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for weird.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_weird" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#name
|
||||
rename => { "addl" => "additional_info" }
|
||||
#notice
|
||||
#peer
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "notice" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<name>(.*?))\t(?<additional_info>(.*?))\t(?<notice>(.*?))\t(?<peer>(.*))" ]
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1119"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for mysql.log from Bro systems
|
||||
#
|
||||
# Parse using grok
|
||||
filter {
|
||||
if [type] == "bro_mysql" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "cmd" => "mysql_command" }
|
||||
rename => { "arg" => "mysql_argument" }
|
||||
rename => { "success" => "mysql_success" }
|
||||
#rows
|
||||
#response
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "mysql_success" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<mysql_command>(.*?))\t(?<mysql_argument>(.*?))\t(?<mysql_success>(.*?))\t(?<rows>(.*?))\t(?<response>(.*))" ]
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1121"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Reverting to grok for now, due to double-quoted values in log file
|
||||
# Parse using csv filter
|
||||
#filter {
|
||||
# if [type] == "bro_mysql" {
|
||||
# csv {
|
||||
# columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","mysql_command","mysql_argument","mysql_success","rows","response"]
|
||||
# separator => " "
|
||||
# quote_char=
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
@@ -0,0 +1,62 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for socks.log from Bro systems
|
||||
|
||||
# Parse using csv
|
||||
filter {
|
||||
if [type] == "bro_socks" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#version
|
||||
#convert => { "version" => "string" }
|
||||
rename => { "user" => "username" }
|
||||
#password
|
||||
rename => { "status" => "server_status" }
|
||||
rename => { "request.host" => "request_host" }
|
||||
rename => { "request.name" => "request_name" }
|
||||
rename => { "request_p" => "request_port" }
|
||||
rename => { "bound.host" => "bound_host" }
|
||||
rename => { "bound.name" => "bound_name" }
|
||||
rename => { "bound_p" => "bound_port" }
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","version","username","password","server_status","request_host","request_name","request_port","bound_host","bound_name","bound_port"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1122"]
|
||||
}
|
||||
}
|
||||
}
|
||||
# Parse using grok
|
||||
#filter {
|
||||
# if [type] == "bro_socks" {
|
||||
# # This is the initial parsing of the log
|
||||
# grok {
|
||||
# match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<version>(.*?))\t(?<user>(.*?))\t(?<password>(.*?))\t(?<status>(.*))\t(?<request_host>(.*))\t(?<request_name>(.*))\t(?<request_port>(.*))\t(?<bound_host>(.*))\t(?<bound_name>(.*))\t(?<bound_port>(.*))" ]
|
||||
# }
|
||||
# mutate {
|
||||
# #add_tag => [ "conf_file_1122"]
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
154
salt/logstash/pipelines/config/1123_preprocess_bro_x509.conf
Normal file
154
salt/logstash/pipelines/config/1123_preprocess_bro_x509.conf
Normal file
@@ -0,0 +1,154 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for x509.log from Bro systems
|
||||
|
||||
filter {
|
||||
if [type] == "bro_x509" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#id
|
||||
rename => { "certificate.version" => "certificate_version" }
|
||||
rename => { "certificate.serial" => "certificate_serial" }
|
||||
rename => { "certificate.subject" => "certificate_subject" }
|
||||
rename => { "certificate.issuer" => "certificate_issuer" }
|
||||
rename => { "certificate.not_valid_before" => "certificate_not_valid_before" }
|
||||
rename => { "certificate.not_valid_after" => "certificate_not_valid_after" }
|
||||
rename => { "certificate.key_alg" => "certificate_key_algorithm" }
|
||||
rename => { "certificate.sig_alg" => "certificate_signing_algorithm" }
|
||||
rename => { "certificate.key_type" => "certificate_key_type" }
|
||||
rename => { "certificate.key_length" => "certificate_key_length" }
|
||||
rename => { "certificate.exponent" => "certificate_exponent" }
|
||||
rename => { "certificate.curve" => "certificate_curve" }
|
||||
rename => { "id" => "fuid" }
|
||||
rename => { "san.dns" => "san_dns" }
|
||||
rename => { "san.uri" => "san_uri" }
|
||||
rename => { "san.email" => "san_email" }
|
||||
rename => { "san.ip" => "san_ip" }
|
||||
rename => { "basic_constraints.ca" => "basic_constraints_ca" }
|
||||
rename => { "basic_constraints.path_length" => "basic_constraints_path_length" }
|
||||
}
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<fuid>(.*?))\t(?<certificate_version>(.*?))\t(?<certificate_serial>(.*?))\t(?<certificate_subject>(.*?))\t(?<certificate_issuer>(.*?))\t(?<certificate_not_valid_before>(.*?))\t(?<certificate_not_valid_after>(.*?))\t(?<certificate_key_algorithm>(.*?))\t(?<certificate_signing_algorithm>(.*))\t(?<certificate_key_type>(.*))\t(?<certificate_key_length>(.*))\t(?<certificate_exponent>(.*))\t(?<certificate_curve>(.*))\t(?<san_dns>(.*))\t(?<san_uri>(.*))\t(?<san_email>(.*))\t(?<san_ip>(.*))\t(?<basic_constraints_ca>(.*))\t(?<basic_constraints_path_length>(.*))" ]
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
gsub => [ "certificate_issuer", "\\\\,", "|" ]
|
||||
gsub => [ "certificate_subject", "\\\\,", "|" ]
|
||||
}
|
||||
|
||||
kv {
|
||||
include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "DC", "GN", "pseudonym", "serialNumber", "title", "initials" ]
|
||||
field_split => ","
|
||||
source => "certificate_issuer"
|
||||
}
|
||||
mutate {
|
||||
rename => { "CN" => "issuer_common_name"}
|
||||
rename => { "C" => "issuer_country_code"}
|
||||
rename => { "O" => "issuer_organization"}
|
||||
rename => { "OU" => "issuer_organization_unit"}
|
||||
rename => { "ST" => "issuer_state"}
|
||||
rename => { "SN" => "issuer_surname"}
|
||||
rename => { "L" => "issuer_locality"}
|
||||
rename => { "DC" => "issuer_distinguished_name"}
|
||||
rename => { "GN" => "issuer_given_name"}
|
||||
rename => { "pseudonym" => "issuer_pseudonym"}
|
||||
rename => { "serialNumber" => "issuer_serial_number"}
|
||||
rename => { "title" => "issuer_title"}
|
||||
rename => { "initials" => "issuer_initials"}
|
||||
}
|
||||
kv {
|
||||
include_keys => [ "CN", "C", "O", "OU", "ST", "SN", "L", "GN", "pseudonym", "serialNumber", "title", "initials" ]
|
||||
field_split => ","
|
||||
source => "certificate_subject"
|
||||
}
|
||||
mutate {
|
||||
rename => { "CN" => "certificate_common_name"}
|
||||
rename => { "C" => "certificate_country_code"}
|
||||
rename => { "O" => "certificate_organization"}
|
||||
rename => { "OU" => "certificate_organization_unit"}
|
||||
rename => { "ST" => "certificate_state"}
|
||||
rename => { "SN" => "certificate_surname"}
|
||||
rename => { "L" => "certificate_locality"}
|
||||
rename => { "GN" => "certificate_given_name"}
|
||||
rename => { "pseudonym" => "certificate_pseudonym"}
|
||||
rename => { "serialNumber" => "certificate_serial_number"}
|
||||
rename => { "title" => "certificate_title"}
|
||||
rename => { "initials" => "certificate_initials"}
|
||||
convert => [ "certificate_key_length", "integer" ]
|
||||
convert => [ "certificate_not_valid_after", "integer" ]
|
||||
convert => [ "certificate_not_valid_before", "integer" ]
|
||||
}
|
||||
if [query] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "query" ]
|
||||
}
|
||||
}
|
||||
if [san_dns] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "san_dns" ]
|
||||
}
|
||||
}
|
||||
if [san_email] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "san_email" ]
|
||||
}
|
||||
}
|
||||
if [san_uri] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "san_uri" ]
|
||||
}
|
||||
}
|
||||
if [san_ip] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "san_ip" ]
|
||||
}
|
||||
}
|
||||
if [certificate_common_name] {
|
||||
ruby {
|
||||
code => "event.set('certificate_common_name_length', event.get('certificate_common_name').length)"
|
||||
}
|
||||
}
|
||||
if [issuer_common_name] {
|
||||
ruby {
|
||||
code => "event.set('issuer_common_name_length', event.get('issuer_common_name').length)"
|
||||
}
|
||||
}
|
||||
if [certificate_not_valid_after] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "certificate_not_valid_after" ]
|
||||
}
|
||||
}
|
||||
if [certificate_not_valid_before] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "certificate_not_valid_before" ]
|
||||
}
|
||||
}
|
||||
if [certificate_not_valid_after] and [certificate_not_valid_before] {
|
||||
ruby {
|
||||
code => "event.set('certificate_number_days_valid', ((event.get('certificate_not_valid_after') - event.get('certificate_not_valid_before')) / 86400).ceil)"
|
||||
}
|
||||
date {
|
||||
match => [ "certificate_not_valid_after", "UNIX" ]
|
||||
target => "certificate_not_valid_after"
|
||||
}
|
||||
date {
|
||||
match => [ "certificate_not_valid_before", "UNIX" ]
|
||||
target => "certificate_not_valid_before"
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1123"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for intel.log from Bro systems
|
||||
filter {
|
||||
if [type] == "bro_intel" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "seen.indicator" => "indicator" }
|
||||
rename => { "seen.indicator_type" => "indicator_type" }
|
||||
rename => { "seen.where" => "seen_where" }
|
||||
rename => { "seen.node" => "seen_node" }
|
||||
#matched
|
||||
#sources
|
||||
#fuid
|
||||
rename => { "file_mime_type" => "mimetype" }
|
||||
rename => { "file_desc" => "file_description" }
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","indicator","indicator_type","seen_where","seen_node","matched","sources","fuid","mimetype","file_description"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_1124"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
# Author: Wes Lambert
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for modbus.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_modbus" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
rename => { "func" => "function" }
|
||||
#exception
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","function","exception"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Parse using grok
|
||||
#filter {
|
||||
# if [type] == "bro_modbus" {
|
||||
# grok {
|
||||
# match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<func>(.*?))\t(?<exception>(.*?))$" ]
|
||||
# }
|
||||
#mutate {
|
||||
#add_tag => [ "conf_file_1125"]
|
||||
#}
|
||||
# }
|
||||
#}
|
||||
66
salt/logstash/pipelines/config/1126_preprocess_bro_sip.conf
Normal file
66
salt/logstash/pipelines/config/1126_preprocess_bro_sip.conf
Normal file
@@ -0,0 +1,66 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for sip.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_sip" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#trans_depth
|
||||
#method
|
||||
#uri
|
||||
#date
|
||||
#request_from
|
||||
#request_to
|
||||
#response_from
|
||||
#response_to
|
||||
#reply_to
|
||||
#call_id
|
||||
#seq
|
||||
#subject
|
||||
#request_path
|
||||
#response_path
|
||||
#user_agent
|
||||
#status_code
|
||||
#status_msg
|
||||
#warning
|
||||
rename => { "request_body_len" => "request_body_length" }
|
||||
rename => { "response_body_len" => "response_body_length" }
|
||||
#content_type
|
||||
}
|
||||
} else {
|
||||
grok {
|
||||
match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<trans_depth>(.*?))\t(?<method>(.*?))\t(?<uri>(.*?))\t(?<date>(.*?))\t(?<request_from>(.*?))\t(?<request_to>(.*?))\t(?<response_from>(.*?))\t(?<response_to>(.*?))\t(?<reply_to>(.*?))\t(?<call_id>(.*?))\t(?<seq>(.*?))\t(?<subject>(.*?))\t(?<request_path>(.*?))\t(?<response_path>(.*?))\t(?<user_agent>(.*?))\t(?<status_code>(.*?))\t(?<status_msg>(.*?))\t(?<warning>(.*?))\t(?<request_body_length>(.*?))\t(?<response_body_length>(.*?))\t(?<content_type>(.*?))$" ]
|
||||
}
|
||||
}
|
||||
|
||||
mutate {
|
||||
add_tag => [ "conf_file_1126"]
|
||||
}
|
||||
}
|
||||
}
|
||||
# Parse using csv filter
|
||||
#filter {
|
||||
# if [type] == "bro_sip" {
|
||||
# csv {
|
||||
# columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","trans_depth","method","uri","date","request_from","request_to","response_from","response_to","reply_to","call_id","seq","subject","request_path","response_path","user_agent","status_code","status_msg","warning","request_body_len","response_body_len","content_type"]
|
||||
# separator => " "
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
@@ -0,0 +1,73 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
#
|
||||
# This conf file is based on accepting logs for radius.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_radius" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#username
|
||||
#mac
|
||||
#framed_addr
|
||||
#tunnel_client
|
||||
#connect_info
|
||||
rename => { "reply_msg" => "reply_message" }
|
||||
#result
|
||||
#ttl
|
||||
#logged
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","username","mac","framed_addr","tunnel_client","connect_info","reply_message","result","ttl","logged"]
|
||||
separator => " "
|
||||
}
|
||||
if [tunnel_client] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "tunnel_client" ]
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
# Remove the ttl and framed_addr fields
|
||||
if [ttl] {
|
||||
mutate {
|
||||
remove_field => [ "ttl" ]
|
||||
}
|
||||
}
|
||||
if [framed_addr] {
|
||||
mutate {
|
||||
remove_field => [ "framed_addr" ]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Parse using grok
|
||||
#filter {
|
||||
# if [type] == "bro_radius" {
|
||||
# grok {
|
||||
# match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<username>(.*?))\t(?<mac>(.*?))\t(?<tunnel_client>(.*?))\t(?<logged>(.*?))\t(?<connect_info>(.*?))$" ]
|
||||
# }
|
||||
# mutate {
|
||||
# #add_tag => [ "conf_file_1127"]
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
46
salt/logstash/pipelines/config/1128_preprocess_bro_pe.conf
Normal file
46
salt/logstash/pipelines/config/1128_preprocess_bro_pe.conf
Normal file
@@ -0,0 +1,46 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
#
|
||||
# This conf file is based on accepting logs for pe.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_pe" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
rename => { "id" => "fuid" }
|
||||
#machine
|
||||
#compile_ts
|
||||
#os
|
||||
#subsystem
|
||||
#is_exe
|
||||
#is_64bit
|
||||
#uses_aslr
|
||||
#uses_dep
|
||||
#uses_code_integrity
|
||||
#uses_seh
|
||||
#has_import_table
|
||||
#has_export_table
|
||||
#has_cert_table
|
||||
#has_debug_data
|
||||
#section_names
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","fuid","machine","compile_ts","os","subsystem","is_exe","is_64bit","uses_aslr","uses_dep","uses_code_integrity","uses_seh","has_import_table","has_export_table","has_cert_table","has_debug_data","section_names"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
65
salt/logstash/pipelines/config/1129_preprocess_bro_rfb.conf
Normal file
65
salt/logstash/pipelines/config/1129_preprocess_bro_rfb.conf
Normal file
@@ -0,0 +1,65 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for rfb.log from Bro systems
|
||||
#
|
||||
# Parse using csv filter
|
||||
filter {
|
||||
if [type] == "bro_rfb" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#client_major_version
|
||||
#client_minor_version
|
||||
#server_major_version
|
||||
#server_minor_version
|
||||
#authentication_method
|
||||
#auth
|
||||
#share_flag
|
||||
#desktop_name
|
||||
#width
|
||||
#height
|
||||
}
|
||||
|
||||
mutate {
|
||||
convert => { "auth" => "string" }
|
||||
convert => { "share_flag" => "string" }
|
||||
}
|
||||
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","client_major_version","client_minor_version","server_major_version","server_minor_version","authentication_method","auth","share_flag","desktop_name","width","height"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Parse using grok
|
||||
#filter {
|
||||
# if [type] == "bro_rfb" {
|
||||
# grok {
|
||||
# match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<client_major_version>(.*?))\t(?<client_minor_version>(.*?))\t(?<server_major_version>(.*?))\t(?<server_minor_version>(.*?))\t(?<authentication_method>(.*?))\t(?<auth>(.*?))\t(?<share_flag>(.*?))\t(?<desktop_name>(.*?))\t(?<width>(.*?))\t(?<height>(.*?))$" ]
|
||||
# }
|
||||
# mutate {
|
||||
# #add_tag => [ "conf_file_1129"]
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
51
salt/logstash/pipelines/config/1130_preprocess_bro_dnp3.conf
Normal file
51
salt/logstash/pipelines/config/1130_preprocess_bro_dnp3.conf
Normal file
@@ -0,0 +1,51 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for dnp3.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_dnp3" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#fc_request
|
||||
#fc_reply
|
||||
#iin
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fc_request","fc_reply","iin"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Parse using grok
|
||||
#filter {
|
||||
# if [type] == "bro_dnp3" {
|
||||
# grok {
|
||||
# match => [ "message", "(?<timestamp>(.*?))\t(?<uid>(.*?))\t(?<source_ip>(.*?))\t(?<source_port>(.*?))\t(?<destination_ip>(.*?))\t(?<destination_port>(.*?))\t(?<fc_request>(.*?))\t(?<fc_reply>(.*?))\t(?<iin>(.*?))$" ]
|
||||
# }
|
||||
# mutate {
|
||||
# #add_tag => [ "conf_file_1130"]
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
@@ -0,0 +1,46 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for smb_files.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_smb_files" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#fuid
|
||||
#action
|
||||
#path
|
||||
#name
|
||||
#size
|
||||
#prev_name
|
||||
rename => { "times.modified" => "times_modified" }
|
||||
rename => { "times.accessed" => "times_accessed" }
|
||||
rename => { "times.created" => "times_created" }
|
||||
rename => { "times.changed" => "times_changed" }
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","fuid","action","path","name","size","prev_name","times_modified","times_accessed","times_created","times_changed"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for smb_mapping.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_smb_mapping" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#path
|
||||
#service
|
||||
#native_file_system
|
||||
#share_type
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","path","service","native_file_system","share_type"]
|
||||
separator => " "
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
50
salt/logstash/pipelines/config/1133_preprocess_bro_ntlm.conf
Normal file
50
salt/logstash/pipelines/config/1133_preprocess_bro_ntlm.conf
Normal file
@@ -0,0 +1,50 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks and Wes Lambert
|
||||
# Last Update: 1/2/2019
|
||||
#
|
||||
# This conf file is based on accepting logs for ntlm.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_ntlm" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#hostname
|
||||
rename => { "domainname" => "domain_name" }
|
||||
rename => { "success" => "ntlm_success" }
|
||||
#status
|
||||
}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => [ "timestamp", "uid", "source_ip", "source_port", "destination_ip", "destination_port", "username", "hostname", "domain_name", "server_nb_computer_name", "server_dns_computer_name", "server_tree_name", "ntlm_success"]
|
||||
separator => " "
|
||||
}
|
||||
ruby {
|
||||
code =>"
|
||||
hash = event.to_hash.each do |key,value|
|
||||
if value == '-'
|
||||
event.remove(key)
|
||||
end
|
||||
end"
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,54 @@
|
||||
# Author: Wes Lambert
|
||||
#
|
||||
# Adapted from existing filters provided by Justin Henderson
|
||||
#
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/7/2018
|
||||
#
|
||||
# This conf file is based on accepting logs for dce_rpc.log from Bro systems
|
||||
#
|
||||
filter {
|
||||
if [type] == "bro_dce_rpc" {
|
||||
# If message looks like json, try to parse it as such. Otherwise, fall back to csv or grok.
|
||||
if [message] =~ /^{.*}$/ {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
|
||||
mutate {
|
||||
rename => { "ts" => "timestamp" }
|
||||
#uid
|
||||
rename => { "id.orig_h" => "source_ip" }
|
||||
rename => { "id.orig_p" => "source_port" }
|
||||
rename => { "id.resp_h" => "destination_ip" }
|
||||
rename => { "id.resp_p" => "destination_port" }
|
||||
#rtt
|
||||
#named_pipe
|
||||
#endpoint
|
||||
#operation
|
||||
}
|
||||
|
||||
#mutate {
|
||||
#convert => { "rtt" => "float" }
|
||||
#}
|
||||
} else {
|
||||
mutate {
|
||||
gsub => [ "message", "[\"']", "" ]
|
||||
}
|
||||
csv {
|
||||
columns => ["timestamp","uid","source_ip","source_port","destination_ip","destination_port","rtt","named_pipe","endpoint","operation"]
|
||||
separator => " "
|
||||
}
|
||||
|
||||
if [rtt] == "-" {
|
||||
mutate {
|
||||
remove_field => [ "rtt" ]
|
||||
}
|
||||
}
|
||||
|
||||
#mutate {
|
||||
#convert => [ "rtt", "float" ]
|
||||
#}
|
||||
}
|
||||
}
|
||||
}
|
||||
228
salt/logstash/pipelines/config/6000_bro.conf
Normal file
228
salt/logstash/pipelines/config/6000_bro.conf
Normal file
@@ -0,0 +1,228 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/10/2018
|
||||
#
|
||||
filter {
|
||||
if "bro" in [tags] {
|
||||
|
||||
# Bro logs have a high quality timestamp, so let's copy that to @timestamp.
|
||||
# Before we do, let's copy the existing logstash @timestamp to timestamp.
|
||||
mutate {
|
||||
add_field => { "logstash_timestamp" => "%{@timestamp}" }
|
||||
}
|
||||
mutate {
|
||||
convert => { "logstash_timestamp" => "string" }
|
||||
}
|
||||
mutate {
|
||||
convert => { "timestamp" => "string" }
|
||||
}
|
||||
# New Bro JSON logs use ISO8601 timestamps.
|
||||
# Old Bro TSV logs use UNIX timestamps.
|
||||
date {
|
||||
match => [ "timestamp", "ISO8601", "UNIX" ]
|
||||
}
|
||||
mutate {
|
||||
rename => { "logstash_timestamp" => "timestamp" }
|
||||
}
|
||||
|
||||
if [duration] == "-" {
|
||||
mutate {
|
||||
replace => [ "duration", "0" ]
|
||||
}
|
||||
}
|
||||
if [original_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "original_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If MissedBytes is unspecified set it to zero so it is an integer
|
||||
if [missed_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "missed_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If OriginalIPBytes is unspecified set it to zero so it is an integer
|
||||
if [original_ip_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "original_ip_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If RespondBytes is unspecified set it to zero so it is an integer
|
||||
if [respond_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "respond_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If RespondIPBytes is unspecified set it to zero so it is an integer
|
||||
if [respond_ip_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "respond_ip_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
if [request_body_length] == "-" {
|
||||
mutate {
|
||||
replace => [ "request_body_length", "0" ]
|
||||
}
|
||||
}
|
||||
if [response_body_length] == "-" {
|
||||
mutate {
|
||||
replace => [ "response_body_length", "0" ]
|
||||
}
|
||||
}
|
||||
if [source_port] == "-" {
|
||||
mutate {
|
||||
remove_field => ["source_port"]
|
||||
}
|
||||
}
|
||||
if [destination_port] == "-" {
|
||||
mutate {
|
||||
remove_field => ["destination_port"]
|
||||
}
|
||||
}
|
||||
if [virtual_host] == "-" {
|
||||
mutate {
|
||||
remove_field => ["virtual_host"]
|
||||
}
|
||||
}
|
||||
if [x_originating_ip] == "-" {
|
||||
mutate {
|
||||
remove_field => ["x_originating_ip"]
|
||||
}
|
||||
}
|
||||
if [basic_constraints_path_length] == "-" {
|
||||
mutate {
|
||||
remove_field => ["basic_constraints_path_length"]
|
||||
}
|
||||
}
|
||||
if [data_channel_source_ip] == "-" {
|
||||
mutate {
|
||||
remove_field => ["data_channel_source_ip"]
|
||||
}
|
||||
}
|
||||
if [data_channel_destination_ip] == "-" {
|
||||
mutate {
|
||||
remove_field => ["data_channel_destination_ip"]
|
||||
}
|
||||
}
|
||||
if [desktop_width] == "-" {
|
||||
mutate {
|
||||
remove_field => ["desktop_width"]
|
||||
}
|
||||
}
|
||||
if [desktop_height] == "-" {
|
||||
mutate {
|
||||
remove_field => ["desktop_height"]
|
||||
}
|
||||
}
|
||||
if [height] == "-" {
|
||||
mutate {
|
||||
remove_field => ["height"]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# I renamed conn_uids to uid so that it is easy to pivot to all things tied to a connection
|
||||
mutate {
|
||||
rename => [ "connection_uids", "uid" ]
|
||||
}
|
||||
# If total_bytes is set to "-" change it to 0 so it is an integer
|
||||
if [total_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "total_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If seen_bytes is set to "-" change it to 0 so it is an integer
|
||||
if [seen_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "seen_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If missing_bytes is set to "-" change it to 0 so it is an integer
|
||||
if [missing_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "missing_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
# If overflow_bytes is set to "-" change it to 0 so it is an integer
|
||||
if [overflow_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "overflow_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
if [dcc_file_size] == "-" {
|
||||
mutate {
|
||||
replace => [ "dcc_file_size", "0" ]
|
||||
}
|
||||
}
|
||||
if [authentication_attempts] == "-" {
|
||||
mutate {
|
||||
replace => [ "authentication_attempts", "0" ]
|
||||
}
|
||||
}
|
||||
if [file_size] == "-" {
|
||||
mutate {
|
||||
replace => [ "file_size", "0" ]
|
||||
}
|
||||
}
|
||||
if [original_ip_bytes] == "-" {
|
||||
mutate {
|
||||
replace => [ "original_ip_bytes", "0" ]
|
||||
}
|
||||
}
|
||||
|
||||
# I recommend changing the field types below to integer or floats so searches can do greater than or less than
|
||||
# and also so math functions can be ran against them
|
||||
mutate {
|
||||
convert => [ "bound_port", "integer" ]
|
||||
convert => [ "data_channel_destination_port", "integer" ]
|
||||
convert => [ "destination_port", "integer" ]
|
||||
convert => [ "depth", "integer" ]
|
||||
#convert => [ "duration", "float" ]
|
||||
convert => [ "info_code", "integer" ]
|
||||
convert => [ "missed_bytes", "integer" ]
|
||||
convert => [ "missing_bytes", "integer" ]
|
||||
convert => [ "n", "integer" ]
|
||||
convert => [ "original_bytes", "integer" ]
|
||||
convert => [ "original_packets", "integer" ]
|
||||
convert => [ "original_ip_bytes", "integer" ]
|
||||
convert => [ "overflow_bytes", "integer" ]
|
||||
convert => [ "p", "integer" ]
|
||||
convert => [ "query_class", "integer" ]
|
||||
convert => [ "query_type", "integer" ]
|
||||
convert => [ "rcode", "integer" ]
|
||||
convert => [ "request_body_length", "integer" ]
|
||||
convert => [ "request_port", "integer" ]
|
||||
convert => [ "respond_bytes", "integer" ]
|
||||
convert => [ "respond_packets", "integer" ]
|
||||
convert => [ "respond_ip_bytes", "integer" ]
|
||||
convert => [ "response_body_length", "integer" ]
|
||||
convert => [ "seen_bytes", "integer" ]
|
||||
convert => [ "source_port", "integer" ]
|
||||
convert => [ "status_code", "integer" ]
|
||||
#convert => [ "suppress_for", "float" ]
|
||||
convert => [ "total_bytes", "integer" ]
|
||||
convert => [ "trans_depth", "integer" ]
|
||||
convert => [ "transaction_id", "integer" ]
|
||||
# convert the following boolean to text for now
|
||||
convert => [ "local_respond", "string" ]
|
||||
convert => [ "tc", "string" ]
|
||||
convert => [ "is_orig", "string" ]
|
||||
convert => [ "local_orig", "string" ]
|
||||
lowercase => [ "query" ]
|
||||
#remove_field => [ "timestamp" ]
|
||||
}
|
||||
|
||||
# Combine OriginalBytes and RespondBytes and save the value to total_bytes
|
||||
if [original_bytes] {
|
||||
if [respond_bytes] {
|
||||
ruby {
|
||||
code => "event.set('total_bytes', event.get('original_bytes') + event.get('respond_bytes'))"
|
||||
}
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_6000"]
|
||||
}
|
||||
}
|
||||
}
|
||||
16
salt/logstash/pipelines/config/6001_bro_import.conf
Normal file
16
salt/logstash/pipelines/config/6001_bro_import.conf
Normal file
@@ -0,0 +1,16 @@
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 2/10/2018
|
||||
#
|
||||
filter {
|
||||
if "import" in [tags] and "bro" in [tags] {
|
||||
|
||||
# we're setting timestamp in 6000 now
|
||||
#date {
|
||||
# match => [ "timestamp", "UNIX" ]
|
||||
#}
|
||||
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_6001"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
# Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Email: justin@hasecuritysolution.com
|
||||
# Last Update: 12/9/2016
|
||||
|
||||
filter {
|
||||
if "bro" in [tags] {
|
||||
if "_grokparsefailure" not in [tags] and "_csvparsefailure" not in [tags] and "_jsonparsefailure" not in [tags] {
|
||||
#mutate {
|
||||
# remove_field => [ "message" ]
|
||||
#}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_8000"]
|
||||
}
|
||||
}
|
||||
}
|
||||
47
salt/logstash/pipelines/config/8006_postprocess_dns.conf
Normal file
47
salt/logstash/pipelines/config/8006_postprocess_dns.conf
Normal file
@@ -0,0 +1,47 @@
|
||||
# Original Author: Justin Henderson
|
||||
# SANS Instructor and author of SANS SEC555: SIEM and Tactical Analytics
|
||||
# Updated by: Doug Burks
|
||||
# Last Update: 5/13/2017
|
||||
|
||||
filter {
|
||||
if [type] == "bro_dns" or "dns" in [tags] {
|
||||
# Used for whois lookups - can create log loop
|
||||
if [query] =~ "^whois\." {
|
||||
drop { }
|
||||
}
|
||||
# REPLACE test.int with your internal domain
|
||||
if [query] and [query] !~ "\.test\.int$" {
|
||||
mutate {
|
||||
lowercase => [ "query" ]
|
||||
}
|
||||
if [query_type_name] != "NB" and [query_type_name] != "TKEY" and [query_type_name] != "NBSTAT" and [query_type_name] != "PTR" {
|
||||
tld {
|
||||
source => "query"
|
||||
}
|
||||
ruby {
|
||||
code => "event.set('query_length', event.get('query').length)"
|
||||
}
|
||||
mutate {
|
||||
rename => { "[SubLog][sessionid]" => "sub_session_id" }
|
||||
rename => { "[tld][domain]" => "highest_registered_domain" }
|
||||
rename => { "[tld][trd]" => "subdomain" }
|
||||
rename => { "[tld][tld]" => "top_level_domain" }
|
||||
rename => { "[tld][sld]" => "parent_domain" }
|
||||
}
|
||||
if [parent_domain] {
|
||||
ruby {
|
||||
code => "event.set('parent_domain_length', event.get('parent_domain').length)"
|
||||
}
|
||||
}
|
||||
if [subdomain] {
|
||||
ruby {
|
||||
code => "event.set('subdomain_length', event.get('subdomain').length)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
mutate {
|
||||
#add_tag => [ "conf_file_8006"]
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
{%- if grains['role'] == 'so-eval' -%}
|
||||
{%- set ES = salt['pillar.get']('master:mainip', '') -%}
|
||||
{%- set NAME = grains.host -%}
|
||||
{%- else %}
|
||||
{%- set ES = salt['pillar.get']('node:mainip', '') -%}
|
||||
{%- endif %}
|
||||
@@ -13,7 +12,7 @@
|
||||
filter {
|
||||
if "bro" in [tags] and "test_data" not in [tags] and "import" not in [tags] {
|
||||
mutate {
|
||||
add_field => { "sensor_name" => "{{ NAME }}" }
|
||||
##add_tag => [ "conf_file_9000"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,24 +3,11 @@
|
||||
{%- else %}
|
||||
{%- set ES = salt['pillar.get']('node:mainip', '') -%}
|
||||
{%- endif %}
|
||||
# Author: Security Onion Solutions
|
||||
# Last Update: 2/3/2020
|
||||
# Output to ES for osquery tagged logs - EVAL install
|
||||
# Author: Josh Brower
|
||||
# Last Update: 12/29/2018
|
||||
# Output to ES for osquery tagged logs
|
||||
|
||||
|
||||
filter {
|
||||
if "osquery" in [tags] {
|
||||
mutate {
|
||||
rename => { "host" => "beat_host" }
|
||||
remove_tag => ["beat"]
|
||||
}
|
||||
json {
|
||||
source => "message"
|
||||
target => "osquery"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if "osquery" in [tags] {
|
||||
elasticsearch {
|
||||
@@ -29,4 +16,4 @@ output {
|
||||
template => "/logstash-template.json"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,5 @@
|
||||
{%- if grains['role'] == 'so-eval' -%}
|
||||
{%- set ES = salt['pillar.get']('master:mainip', '') -%}
|
||||
{%- set NAME = grains.host -%}
|
||||
{%- else %}
|
||||
{%- set ES = salt['pillar.get']('node:mainip', '') -%}
|
||||
{%- endif %}
|
||||
@@ -12,7 +11,7 @@
|
||||
filter {
|
||||
if [event_type] == "suricata" and "test_data" not in [tags] {
|
||||
mutate {
|
||||
add_field => { "sensor_name" => "{{ NAME }}" }
|
||||
##add_tag => [ "conf_file_9400"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
160
salt/logstash/pipelines/config/9997_output_helix.conf.jinja
Normal file
160
salt/logstash/pipelines/config/9997_output_helix.conf.jinja
Normal file
@@ -0,0 +1,160 @@
|
||||
{% set HELIX_API_KEY = salt['pillar.get']('fireeye:helix:api_key', '') %}
|
||||
{% set UNIQUEID = salt['pillar.get']('sensor:uniqueid', '') %}
|
||||
{% set CBNAME = grains.host %}
|
||||
|
||||
filter {
|
||||
if [type] =~ /^bro_conn|bro_dns|bro_http|bro_files|bro_ssl|bro_dhcp|bro_x509|suricata$/ {
|
||||
grok {
|
||||
match => [
|
||||
"source_ip", "^%{IPV4:srcipv4}$",
|
||||
"source_ip", "(?<srcipv6>^([0-9A-Fa-f]{0,4}:){2,7}([0-9A-Fa-f]{1,4}$|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4})$)"
|
||||
]
|
||||
}
|
||||
grok {
|
||||
match => [
|
||||
"destination_ip", "(?<dstipv6>^([0-9A-Fa-f]{0,4}:){2,7}([0-9A-Fa-f]{1,4}$|((25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)(\.|$)){4})$)",
|
||||
"destination_ip", "^%{IPV4:dstipv4}$"
|
||||
]
|
||||
}
|
||||
|
||||
#geoip {
|
||||
# source => "[source_ip]"
|
||||
# target => "source_geo"
|
||||
#}
|
||||
#geoip {
|
||||
# source => "[destination_ip]"
|
||||
# target => "destination_geo"
|
||||
#}
|
||||
mutate {
|
||||
rename => { "[beat_host][name]" => "sensor" }
|
||||
copy => { "sensor" => "rawmsghostname" }
|
||||
rename => { "message" => "rawmsg" }
|
||||
copy => { "type" => "class" }
|
||||
copy => { "class" => "program"}
|
||||
rename => { "source_port" => "srcport" }
|
||||
rename => { "destination_port" => "dstport" }
|
||||
rename => { "[log][file][path]" => "filepath" }
|
||||
add_field => { "meta_cbid" => "{{ UNIQUEID }}" }
|
||||
add_field => { "meta_cbname" => "{{ CBNAME }}" }
|
||||
remove_field => ["source_ip", "destination_ip", "syslog-host_from"]
|
||||
remove_field => ["beat_host", "timestamp", "type", "log", "@version", "@timestamp"]
|
||||
remove_field => ["sensorname", "sensor_name", "service", "source", "tags", "syslog-host"]
|
||||
remove_field => ["sensor_name", "source_ips", "ips", "destination_ips", "syslog-priority", "syslog-file_name", "syslog-facility"]
|
||||
}
|
||||
if "bro_conn" in [class] {
|
||||
mutate {
|
||||
#add_field => { "metaclass" => "connection" }
|
||||
rename => { "original_bytes" => "sentbytes" }
|
||||
rename => { "respond_bytes" => "rcvdbytes" }
|
||||
rename => { "connection_state" => "connstate" }
|
||||
rename => { "uid" => "connectionid" }
|
||||
rename => { "respond_packets" => "rcvdpackets" }
|
||||
rename => { "original_packets" => "sentpackets" }
|
||||
rename => { "respond_ip_bytes" => "rcvdipbytes" }
|
||||
rename => { "original_ip_bytes" => "sentipbytes" }
|
||||
rename => { "local_respond" => "local_resp" }
|
||||
rename => { "local_orig" => "localorig" }
|
||||
rename => { "missed_bytes" => "missingbytes" }
|
||||
rename => { "connection_state_description" => "description" }
|
||||
}
|
||||
}
|
||||
if "bro_dns" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "answers" => "answer" }
|
||||
rename => { "query" => "domain" }
|
||||
rename => { "query_class" => "queryclass" }
|
||||
rename => { "query_class_name" => "queryclassname" }
|
||||
rename => { "query_type" => "querytype" }
|
||||
rename => { "query_type_name" => "querytypename" }
|
||||
rename => { "ra" => "recursionavailable" }
|
||||
rename => { "rd" => "recursiondesired" }
|
||||
rename => { "uid" => "connectionid" }
|
||||
rename => { "ttls" => "ttl" }
|
||||
rename => { "transaction_id" => "transactionid" }
|
||||
}
|
||||
}
|
||||
if "bro_dhcp" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dhcp"}
|
||||
rename => { "message_types" => "direction" }
|
||||
rename => { "uid" => "connectionid" }
|
||||
rename => { "lease_time" => "duration" }
|
||||
}
|
||||
}
|
||||
if "bro_files" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "missing_bytes" => "missingbytes" }
|
||||
rename => { "seen_bytes" => "seenbytes" }
|
||||
rename => { "overflow_bytes" => "overflowbytes" }
|
||||
rename => { "fuid" => "fileid" }
|
||||
rename => { "conn_uids" => "connectionid" }
|
||||
rename => { "is_orig" => "isorig" }
|
||||
rename => { "timed_out" => "timedout" }
|
||||
rename => { "local_orig" => "localorig" }
|
||||
rename => { "file_ip" => "tx_host" }
|
||||
}
|
||||
}
|
||||
if "bro_http" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "virtual_host" => "hostname" }
|
||||
rename => { "status_code" => "statuscode" }
|
||||
rename => { "status_message" => "statusmsg" }
|
||||
rename => { "resp_mime_types" => "rcvdmimetype" }
|
||||
rename => { "resp_fuids" => "rcvdfileid" }
|
||||
rename => { "response_body_len" => "rcvdbodybytes" }
|
||||
rename => { "request_body_len" => "sentbodybytes" }
|
||||
rename => { "uid" => "connectionid" }
|
||||
rename => { "ts"=> "eventtime" }
|
||||
rename => { "@timestamp"=> "eventtime" }
|
||||
rename => { "trans_depth" => "depth" }
|
||||
rename => { "request_body_length" => "sentbodybytes" }
|
||||
rename => { "response_body_length" => "rcvdbodybytes" }
|
||||
}
|
||||
}
|
||||
if "bro_ssl" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "status_code" => "statuscode" }
|
||||
rename => { "status_message" => "statusmsg" }
|
||||
rename => { "resp_mime_types" => "rcvdmimetype" }
|
||||
rename => { "resp_fuids" => "rcvdfileid" }
|
||||
rename => { "response_body_len" => "rcvdbodybytes" }
|
||||
rename => { "request_body_len" => "sentbodybytes" }
|
||||
rename => { "uid" => "connectionid" }
|
||||
}
|
||||
}
|
||||
if "bro_weird" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "name" => "eventname" }
|
||||
}
|
||||
}
|
||||
if "bro_x509" in [class] {
|
||||
mutate{
|
||||
#add_field = { "metaclass" => "dns"}
|
||||
rename => { "certificate_common_name" => "certname" }
|
||||
rename => { "certificate_subject" => "certsubject" }
|
||||
rename => { "issuer_common_name" => "issuer" }
|
||||
rename => { "certificate_issuer" => "issuersubject" }
|
||||
rename => { "certificate_not_valid_before" => "issuetime" }
|
||||
rename => { "certificate_key_type" => "cert_type" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
if [class] =~ /^bro_conn|bro_dns|bro_http|bro_files|bro_ssl|bro_dhcp|bro_x509|suricata$/ {
|
||||
http {
|
||||
url => "https://helix-integrations.cloud.aws.apps.fireeye.com/api/upload"
|
||||
http_method => post
|
||||
http_compression => true
|
||||
socket_timeout => 60
|
||||
headers => ["Authorization","{{ HELIX_API_KEY }}"]
|
||||
format => json_batch
|
||||
}
|
||||
}
|
||||
}
|
||||
26
salt/logstash/pipelines/config/9999_output_redis.conf.jinja
Normal file
26
salt/logstash/pipelines/config/9999_output_redis.conf.jinja
Normal file
@@ -0,0 +1,26 @@
|
||||
{%- if salt['grains.get']('role') == 'so-master' %}
|
||||
{% set master = salt['pillar.get']('static:masterip', '') %}
|
||||
{%- set nodetype = 'master' %}
|
||||
{% elif grains.role == 'so-heavynode' %}
|
||||
{% set master = salt['pillar.get']('node:mainip', '') %}
|
||||
{%- set nodetype = salt['pillar.get']('node:node_type', 'search') %}
|
||||
{%- else %}
|
||||
{%- set nodetype = salt['pillar.get']('node:node_type', 'storage') %}
|
||||
{% set master = salt['pillar.get']('static:masterip', '') %}
|
||||
{%- endif %}
|
||||
|
||||
|
||||
output {
|
||||
redis {
|
||||
host => '{{ master }}'
|
||||
data_type => 'list'
|
||||
{%- if nodetype == 'parser' %}
|
||||
key => 'logstash:parsed'
|
||||
{%- else %}
|
||||
key => 'logstash:unparsed'
|
||||
{%- endif %}
|
||||
congestion_interval => 1
|
||||
congestion_threshold => 50000000
|
||||
# batch_events => 500
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user