Merge remote-tracking branch 'origin/reyesj2/kafka' into kaffytaffy

This commit is contained in:
m0duspwnens
2024-04-09 11:13:02 -04:00
4 changed files with 95 additions and 49 deletions

View File

@@ -78,8 +78,6 @@ so-logstash:
{% if GLOBALS.role in ['so-manager', 'so-managersearch', 'so-standalone', 'so-import', 'so-heavynode', 'so-searchnode', 'so-kafkanode' ] %}
- /opt/so/conf/ca/cacerts:/etc/pki/ca-trust/extracted/java/cacerts:ro
- /opt/so/conf/ca/tls-ca-bundle.pem:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem:ro
{% endif %}
{% if GLOBALS.role in ['so-kafkanode'] %}
- /etc/pki/kafka-logstash.p12:/usr/share/logstash/kafka-logstash.p12:ro
{% endif %}
{% if GLOBALS.role == 'so-eval' %}

View File

@@ -1,26 +1,37 @@
{% set kafka_brokers = salt['pillar.get']('logstash:nodes:kafkanode', {}) %}
{% set broker_ips = [] %}
{% for node, node_data in kafka_brokers.items() %}
{% do broker_ips.append(node_data['ip'] + ":9092") %}
{% endfor %}
{% set bootstrap_servers = "','".join(broker_ips) %}
#Run on searchnodes ingest kafka topic(s) group_id allows load balancing of event ingest to all searchnodes
input {
kafka {
codec => json
#Can ingest multiple topics. Set to a value from SOC UI?
topics => ['logstash-topic',]
group_id => 'searchnodes'
security_protocol => 'SSL'
bootstrap_servers => {{ bootstrap_servers }}
ssl_keystore_location => '/usr/share/logstash/kafka-logstash.p12'
ssl_keystore_password => ''
ssl_keystore_type => 'PKCS12'
ssl_truststore_location => '/etc/pki/ca-trust/extracted/java/cacerts'
# Set password as a pillar to avoid bad optics? This is default truststore for grid
ssl_truststore_password => 'changeit'
}
{% set kafka_brokers = salt['pillar.get']('logstash:nodes:kafkanode', {}) %}
{% set kafka_on_mngr = salt ['pillar.get']('logstash:nodes:manager', {}) %}
{% set broker_ips = [] %}
{% for node, node_data in kafka_brokers.items() %}
{% do broker_ips.append(node_data['ip'] + ":9092") %}
{% endfor %}
{# For testing kafka stuff from manager not dedicated kafkanodes #}
{% for node, node_data in kafka_on_mngr.items() %}
{% do broker_ips.append(node_data['ip'] + ":9092") %}
{% endfor %}
{% set bootstrap_servers = "','".join(broker_ips) %}
input {
kafka {
codec => json
topics => ['default-logs', 'kratos-logs', 'soc-logs', 'strelka-logs', 'suricata-logs', 'zeek-logs']
group_id => 'searchnodes'
client_id => '{{ GLOBALS.hostname }}'
security_protocol => 'SSL'
bootstrap_servers => '{{ bootstrap_servers }}'
ssl_keystore_location => '/usr/share/logstash/kafka-logstash.p12'
ssl_keystore_password => 'changeit'
ssl_keystore_type => 'PKCS12'
ssl_truststore_location => '/etc/pki/ca-trust/extracted/java/cacerts'
ssl_truststore_password => 'changeit'
decorate_events => true
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname}}", "kafka" ]
}
}
filter {
if ![metadata] {
mutate {
rename => { "@metadata" => "metadata" }
}
}
}

View File

@@ -1,22 +0,0 @@
{% set kafka_brokers = salt['pillar.get']('logstash:nodes:kafkanode', {}) %}
{% set broker_ips = [] %}
{% for node, node_data in kafka_brokers.items() %}
{% do broker_ips.append(node_data['ip'] + ":9092") %}
{% endfor %}
{% set bootstrap_servers = "','".join(broker_ips) %}
#Run on kafka broker logstash writes to topic 'logstash-topic'
output {
kafka {
codec => json
topic_id => 'logstash-topic'
bootstrap_servers => '{{ bootstrap_servers }}'
security_protocol => 'SSL'
ssl_keystore_location => '/usr/share/logstash/kafka-logstash.p12'
ssl_keystore_password => ''
ssl_keystore_type => 'PKCS12'
ssl_truststore_location => '/etc/pki/ca-trust/extracted/java/cacerts'
ssl_truststore_password => 'changeit'
}
}

View File

@@ -736,6 +736,40 @@ elasticfleet_kafka_crt:
- onchanges:
- x509: elasticfleet_kafka_key
kafka_logstash_key:
x509.private_key_managed:
- name: /etc/pki/kafka-logstash.key
- keysize: 4096
- backup: True
- new: True
{% if salt['file.file_exists']('/etc/pki/kafka-logstash.key') -%}
- prereq:
- x509: /etc/pki/kafka-logstash.crt
{%- endif %}
- retry:
attempts: 5
interval: 30
kafka_logstash_crt:
x509.certificate_managed:
- name: /etc/pki/kafka-logstash.crt
- ca_server: {{ ca_server }}
- subjectAltName: DNS:{{ GLOBALS.hostname }}, IP:{{ GLOBALS.node_ip }}
- signing_policy: kafka
- private_key: /etc/pki/kafka-logstash.key
- CN: {{ GLOBALS.hostname }}
- days_remaining: 0
- days_valid: 820
- backup: True
- timeout: 30
- retry:
attempts: 5
interval: 30
cmd.run:
- name: "/usr/bin/openssl pkcs12 -inkey /etc/pki/kafka-logstash.key -in /etc/pki/kafka-logstash.crt -export -out /etc/pki/kafka-logstash.p12 -nodes -passout pass:changeit"
- onchanges:
- x509: /etc/pki/kafka-logstash.key
{% if grains['role'] in ['so-manager'] %}
kafka_client_key:
x509.private_key_managed:
@@ -783,6 +817,7 @@ kafka_client_crt_perms:
- user: 960
- group: 939
{% endif %}
kafka_key_perms:
file.managed:
- replace: False
@@ -799,6 +834,30 @@ kafka_crt_perms:
- user: 960
- group: 939
kafka_logstash_key_perms:
file.managed:
- replace: False
- name: /etc/pki/kafka-logstash.key
- mode: 640
- user: 960
- group: 939
kafka_logstash_crt_perms:
file.managed:
- replace: False
- name: /etc/pki/kafka-logstash.crt
- mode: 640
- user: 960
- group: 939
kafka_logstash_pkcs12_perms:
file.managed:
- replace: False
- name: /etc/pki/kafka-logstash.p12
- mode: 640
- user: 960
- group: 931
kafka_pkcs8_perms:
file.managed:
- replace: False