Merge pull request #9650 from Security-Onion-Solutions/fix/elastic_agent_add_import_mode

Elastic Agent - Import Mode
This commit is contained in:
weslambert
2023-01-26 11:33:20 -05:00
committed by GitHub
8 changed files with 35 additions and 10 deletions

View File

@@ -107,7 +107,8 @@
'zeek', 'zeek',
'schedule', 'schedule',
'tcpreplay', 'tcpreplay',
'docker_clean' 'docker_clean',
'elastic-fleet'
], ],
'so-manager': [ 'so-manager': [
'salt.master', 'salt.master',

View File

@@ -66,10 +66,10 @@ curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X POS
echo echo
# RITA Logs # RITA Logs
echo #echo
echo "Setting up RITA package policy..." #echo "Setting up RITA package policy..."
curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X POST "localhost:5601/api/fleet/package_policies" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d'{ "policy_id": "so-grid-nodes", "package": { "name": "log", "version": "1.1.0" }, "id": "rita-logs", "name": "rita-logs", "description": "RITA Beacon logs", "namespace": "so", "inputs": { "logs-logfile": { "enabled": true, "streams": { "log.log": { "enabled": true, "vars": { "paths": [ "/nsm/rita/beacons.csv", "/nsm/rita/long-connections.csv", "/nsm/rita/short-connections.csv", "/nsm/rita/exploded-dns.csv" ], "data_stream.dataset": "rita", "tags": [], "processors": "- add_fields:\n target: event\n fields:\n category: network\n module: rita\n- if:\n log.file.path: beacons.csv\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.beacon\n- if:\n regexp:\n log.file.path: \"*connections.csv\"\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.connection\n- if:\n log.file.path: \"exploded-dns.csv\"\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.dns" }}}}}}' #curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X POST "localhost:5601/api/fleet/package_policies" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d'{ "policy_id": "so-grid-nodes", "package": { "name": "log", "version": "1.1.0" }, "id": "rita-logs", "name": "rita-logs", "description": "RITA Beacon logs", "namespace": "so", "inputs": { "logs-logfile": { "enabled": true, "streams": { "log.log": { "enabled": true, "vars": { "paths": [ "/nsm/rita/beacons.csv", "/nsm/rita/long-connections.csv", "/nsm/rita/short-connections.csv", "/nsm/rita/exploded-dns.csv" ], "data_stream.dataset": "rita", "tags": [], "processors": "- add_fields:\n target: event\n fields:\n category: network\n module: rita\n- if:\n log.file.path: beacons.csv\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.beacon\n- if:\n regexp:\n log.file.path: \"*connections.csv\"\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.connection\n- if:\n log.file.path: \"exploded-dns.csv\"\n then: \n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: rita.dns" }}}}}}'
echo #echo
# Elasticsearch logs # Elasticsearch logs
echo echo

View File

@@ -19,10 +19,22 @@ printf "\n"
curl -K /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/settings" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d '{"fleet_server_hosts":["https://{{ GLOBALS.manager_ip }}:8220"]}' curl -K /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/settings" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d '{"fleet_server_hosts":["https://{{ GLOBALS.manager_ip }}:8220"]}'
printf "\n\n" printf "\n\n"
# Create Logstash Output payload # Configure certificates
mkdir -p /opt/so/conf/elastic-fleet/certs mkdir -p /opt/so/conf/elastic-fleet/certs
cp /etc/ssl/certs/intca.crt /opt/so/conf/elastic-fleet/certs cp /etc/ssl/certs/intca.crt /opt/so/conf/elastic-fleet/certs
cp /etc/pki/elasticfleet* /opt/so/conf/elastic-fleet/certs cp /etc/pki/elasticfleet* /opt/so/conf/elastic-fleet/certs
{% if grains.role == 'so-import' %}
# Add SO-Manager Elasticsearch Ouput
ESCACRT=$(openssl x509 -in /opt/so/conf/elastic-fleet/certs/intca.crt)
JSON_STRING=$( jq -n \
--arg ESCACRT "$ESCACRT" \
'{"name":"so-manager_elasticsearch2","id":"so-manager_elasticsearch2","type":"elasticsearch","hosts":["https://{{ GLOBALS.manager_ip }}:9200"],"is_default":true,"is_default_monitoring":true,"config_yaml":"","ssl":{"certificate_authorities": [$ESCACRT]}}' )
curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
printf "\n\n"
{% else %}
# Create Logstash Output payload
LOGSTASHCRT=$(openssl x509 -in /opt/so/conf/elastic-fleet/certs/elasticfleet.crt) LOGSTASHCRT=$(openssl x509 -in /opt/so/conf/elastic-fleet/certs/elasticfleet.crt)
LOGSTASHKEY=$(openssl rsa -in /opt/so/conf/elastic-fleet/certs/elasticfleet.key) LOGSTASHKEY=$(openssl rsa -in /opt/so/conf/elastic-fleet/certs/elasticfleet.key)
LOGSTASHCA=$(openssl x509 -in /opt/so/conf/elastic-fleet/certs/intca.crt) LOGSTASHCA=$(openssl x509 -in /opt/so/conf/elastic-fleet/certs/intca.crt)
@@ -36,6 +48,7 @@ JSON_STRING=$( jq -n \
# Add SO-Manager Logstash Ouput # Add SO-Manager Logstash Ouput
curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" curl -K /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING"
printf "\n\n" printf "\n\n"
{%- endif %}
# Add Elastic Fleet Integrations # Add Elastic Fleet Integrations

View File

@@ -37,6 +37,8 @@ container_list() {
"so-suricata" "so-suricata"
"so-telegraf" "so-telegraf"
"so-zeek" "so-zeek"
"so-elastic-agent"
"so-elastic-agent-builder"
) )
elif [ $MANAGERCHECK != 'so-helix' ]; then elif [ $MANAGERCHECK != 'so-helix' ]; then
TRUSTED_CONTAINERS=( TRUSTED_CONTAINERS=(

View File

@@ -2677,6 +2677,9 @@ elasticsearch:
delete: 365 delete: 365
index_sorting: False index_sorting: False
index_template: index_template:
data_stream:
hidden: false
allow_custom_routing: false
index_patterns: index_patterns:
- logs-kratos-so* - logs-kratos-so*
template: template:

View File

@@ -436,6 +436,7 @@ role:
- {{ portgroups.influxdb }} - {{ portgroups.influxdb }}
- {{ portgroups.elasticsearch_rest }} - {{ portgroups.elasticsearch_rest }}
- {{ portgroups.elasticsearch_node }} - {{ portgroups.elasticsearch_node }}
- {{ portgroups.elastic_agent_control }}
sensors: sensors:
portgroups: portgroups:
- {{ portgroups.beats_5044 }} - {{ portgroups.beats_5044 }}
@@ -453,6 +454,10 @@ role:
elasticsearch_rest: elasticsearch_rest:
portgroups: portgroups:
- {{ portgroups.elasticsearch_rest }} - {{ portgroups.elasticsearch_rest }}
elastic_agent_endpoint:
portgroups:
- {{ portgroups.elastic_agent_control }}
- {{ portgroups.elastic_agent_data }}
analyst: analyst:
portgroups: portgroups:
- {{ portgroups.nginx }} - {{ portgroups.nginx }}

View File

@@ -344,6 +344,7 @@ base:
- zeek - zeek
- schedule - schedule
- docker_clean - docker_clean
- elastic-fleet
'*_receiver and G@saltversion:{{saltversion}}': '*_receiver and G@saltversion:{{saltversion}}':
- match: compound - match: compound

View File

@@ -603,9 +603,9 @@ if ! [[ -f $install_opt_file ]]; then
add_web_user add_web_user
info "Restarting SOC to pick up initial user" info "Restarting SOC to pick up initial user"
logCmd "so-soc-restart" logCmd "so-soc-restart"
title "Setting up Elastic Fleet"
logCmd "so-elastic-fleet-setup"
if [[ ! $is_import ]]; then if [[ ! $is_import ]]; then
title "Setting up Elastic Fleet"
logCmd "so-elastic-fleet-setup"
title "Setting up Playbook" title "Setting up Playbook"
logCmd "so-playbook-reset" logCmd "so-playbook-reset"
fi fi