mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2026-05-16 00:01:43 +02:00
add ingest latency metrics
This commit is contained in:
@@ -177,12 +177,84 @@
|
||||
"description": "Extract IPs from Elastic Agent events (host.ip) and adds them to related.ip"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Snapshot event.ingested into _tmp.event_ingested_pre_fleet before .fleet_final_pipeline-1 overwrites it with ES ingest time",
|
||||
"lang": "painless",
|
||||
"if": "ctx.event?.ingested != null && ctx.event?.created == null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.putIfAbsent('_tmp', [:]); ctx._tmp.event_ingested_pre_fleet = ctx.event.ingested;"
|
||||
}
|
||||
},
|
||||
{
|
||||
"pipeline": {
|
||||
"name": ".fleet_final_pipeline-1",
|
||||
"ignore_missing_pipeline": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time from Elastic Agent to Logstash.",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_agent != null",
|
||||
"ignore_failure": true,
|
||||
"source": "ZonedDateTime start = ctx._tmp.event_ingested_pre_fleet != null ? ZonedDateTime.parse(ctx._tmp.event_ingested_pre_fleet) : ZonedDateTime.parse(ctx['@timestamp']); ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_elasticagent_to_logstash = ChronoUnit.SECONDS.between(start, ZonedDateTime.parse(ctx._tmp.logstash_from_agent));"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time from Logstash to Redis",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_agent != null && ctx._tmp?.logstash_to_redis != null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_logstash_to_redis = ChronoUnit.SECONDS.between(ZonedDateTime.parse(ctx._tmp.logstash_from_agent), ZonedDateTime.parse(ctx._tmp.logstash_to_redis));"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time message spends in redis queue (logstash delay in pulling event).",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_to_redis != null && ctx._tmp?.logstash_from_redis != null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_redis_to_logstash = ChronoUnit.SECONDS.between(ZonedDateTime.parse(ctx._tmp.logstash_to_redis), ZonedDateTime.parse(ctx._tmp.logstash_from_redis));"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time from Logstash to Elasticsearch (after read from Redis).",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_redis != null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_logstash_to_elasticsearch = ChronoUnit.SECONDS.between(ZonedDateTime.parse(ctx._tmp.logstash_from_redis), metadata().now);"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time from Elastic Agent to Kafka.",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_kafka != null && ctx._tmp?.logstash_from_agent == null",
|
||||
"ignore_failure": true,
|
||||
"source": "ZonedDateTime start = ctx._tmp.event_ingested_pre_fleet != null ? ZonedDateTime.parse(ctx._tmp.event_ingested_pre_fleet) : ZonedDateTime.parse(ctx['@timestamp']); ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_elasticagent_to_kafka = ChronoUnit.SECONDS.between(start, ZonedDateTime.parse(ctx._tmp.logstash_from_kafka));"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time message spends in Kafka queue (logstash delay in pulling event).",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_kafka != null && ctx.metadata?.kafka?.timestamp != null && ctx._tmp?.logstash_from_agent == null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_kafka_queue = ChronoUnit.SECONDS.between(ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(ctx.metadata.kafka.timestamp.toString())), ZoneId.of('UTC')), ZonedDateTime.parse(ctx._tmp.logstash_from_kafka));"
|
||||
}
|
||||
},
|
||||
{
|
||||
"script": {
|
||||
"description": "Calculate time from Logstash to Elasticsearch (after read from Kafka).",
|
||||
"lang": "painless",
|
||||
"if": "ctx._tmp?.logstash_from_kafka != null && ctx._tmp?.logstash_from_agent == null",
|
||||
"ignore_failure": true,
|
||||
"source": "ctx.event.putIfAbsent('ingestion', [:]); ctx.event.ingestion.latency_kafka_to_elasticsearch = ChronoUnit.SECONDS.between(ZonedDateTime.parse(ctx._tmp.logstash_from_kafka), metadata().now);"
|
||||
}
|
||||
},
|
||||
{
|
||||
"remove": {
|
||||
"field": "event.agent_id_status",
|
||||
@@ -202,11 +274,12 @@
|
||||
"event.dataset_temp",
|
||||
"dataset_tag_temp",
|
||||
"module_temp",
|
||||
"datastream_dataset_temp"
|
||||
"datastream_dataset_temp",
|
||||
"_tmp"
|
||||
],
|
||||
"ignore_missing": true,
|
||||
"ignore_failure": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,12 +26,12 @@ logstash:
|
||||
manager:
|
||||
- so/0011_input_endgame.conf
|
||||
- so/0012_input_elastic_agent.conf.jinja
|
||||
- so/0013_input_lumberjack_fleet.conf
|
||||
- so/0013_input_lumberjack_fleet.conf.jinja
|
||||
- so/9999_output_redis.conf.jinja
|
||||
receiver:
|
||||
- so/0011_input_endgame.conf
|
||||
- so/0012_input_elastic_agent.conf.jinja
|
||||
- so/0013_input_lumberjack_fleet.conf
|
||||
- so/0013_input_lumberjack_fleet.conf.jinja
|
||||
- so/9999_output_redis.conf.jinja
|
||||
search:
|
||||
- so/0900_input_redis.conf.jinja
|
||||
@@ -69,4 +69,5 @@ logstash:
|
||||
pipeline_x_batch_x_size: 125
|
||||
pipeline_x_ecs_compatibility: disabled
|
||||
dmz_nodes: []
|
||||
latency_metrics: False
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
input {
|
||||
elastic_agent {
|
||||
port => 5055
|
||||
@@ -11,10 +12,15 @@ input {
|
||||
}
|
||||
}
|
||||
filter {
|
||||
if ![metadata] {
|
||||
mutate {
|
||||
rename => {"@metadata" => "metadata"}
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_from_agent]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
{% endif %}
|
||||
if ![metadata] {
|
||||
mutate {
|
||||
rename => {"@metadata" => "metadata"}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
input {
|
||||
elastic_agent {
|
||||
port => 5056
|
||||
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
|
||||
ssl_enabled => true
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
|
||||
ecs_compatibility => v8
|
||||
id => "fleet-lumberjack-in"
|
||||
codec => "json"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
filter {
|
||||
if ![metadata] {
|
||||
mutate {
|
||||
rename => {"@metadata" => "metadata"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
input {
|
||||
elastic_agent {
|
||||
port => 5056
|
||||
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
|
||||
ssl_enabled => true
|
||||
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
|
||||
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
|
||||
ecs_compatibility => v8
|
||||
id => "fleet-lumberjack-in"
|
||||
codec => "json"
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_from_fleet]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
{% endif %}
|
||||
if ![metadata] {
|
||||
mutate {
|
||||
rename => {"@metadata" => "metadata"}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
{%- set kafka_password = salt['pillar.get']('kafka:config:password') %}
|
||||
{%- set kafka_trustpass = salt['pillar.get']('kafka:config:trustpass') %}
|
||||
{%- set kafka_brokers = salt['pillar.get']('kafka:nodes', {}) %}
|
||||
@@ -30,6 +31,11 @@ input {
|
||||
}
|
||||
}
|
||||
filter {
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_from_kafka]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
{% endif %}
|
||||
if ![metadata] {
|
||||
mutate {
|
||||
rename => { "@metadata" => "metadata" }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_REDIS_NODES with context %}
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_REDIS_NODES, LOGSTASH_MERGED %}
|
||||
{%- set REDIS_PASS = salt['pillar.get']('redis:config:requirepass') %}
|
||||
|
||||
{%- for index in range(LOGSTASH_REDIS_NODES|length) %}
|
||||
@@ -18,3 +18,10 @@ input {
|
||||
}
|
||||
{% endfor %}
|
||||
{% endfor -%}
|
||||
filter {
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_from_redis]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
{% endif %}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
filter {
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_to_elasticsearch]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
}
|
||||
{% endif %}
|
||||
output {
|
||||
if "elastic-agent" in [tags] and "so-ip-mappings" in [tags] {
|
||||
elasticsearch {
|
||||
|
||||
@@ -13,13 +13,20 @@ filter {
|
||||
add_tag => "fleet-lumberjack-{{ GLOBALS.hostname }}"
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
lumberjack {
|
||||
codec => json
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
filter {
|
||||
ruby {
|
||||
code => "event.set('[_tmp][fleet_to_logstash]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
}
|
||||
{% endif %}
|
||||
output {
|
||||
lumberjack {
|
||||
codec => json
|
||||
hosts => {{ FAILOVER_LOGSTASH_NODES }}
|
||||
ssl_certificate => "/usr/share/filebeat/ca.crt"
|
||||
port => 5056
|
||||
port => 5056
|
||||
id => "fleet-lumberjack-{{ GLOBALS.hostname }}"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,17 @@
|
||||
{%- from 'logstash/map.jinja' import LOGSTASH_MERGED %}
|
||||
{%- if grains.role in ['so-heavynode', 'so-receiver'] %}
|
||||
{%- set HOST = GLOBALS.hostname %}
|
||||
{%- else %}
|
||||
{%- set HOST = GLOBALS.manager %}
|
||||
{%- endif %}
|
||||
{%- set REDIS_PASS = salt['pillar.get']('redis:config:requirepass') %}
|
||||
|
||||
{% if LOGSTASH_MERGED.get('latency_metrics', False) %}
|
||||
filter {
|
||||
ruby {
|
||||
code => "event.set('[_tmp][logstash_to_redis]', Time.now().utc.iso8601(3));"
|
||||
}
|
||||
}
|
||||
{% endif %}
|
||||
output {
|
||||
redis {
|
||||
host => '{{ HOST }}'
|
||||
|
||||
@@ -86,3 +86,8 @@ logstash:
|
||||
multiline: True
|
||||
advanced: True
|
||||
forcedType: "[]string"
|
||||
latency_metrics:
|
||||
description: Enable latency metrics within events processed by logstash. Useful for pinpointing log ingest delay.
|
||||
forcedType: bool
|
||||
global: False
|
||||
advanced: True
|
||||
|
||||
Reference in New Issue
Block a user