mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
use elastic map file
This commit is contained in:
@@ -2,8 +2,7 @@
|
|||||||
{% if sls in allowed_states %}
|
{% if sls in allowed_states %}
|
||||||
|
|
||||||
{% set role = grains.id.split('_') | last %}
|
{% set role = grains.id.split('_') | last %}
|
||||||
{% set ELASTICUSER = salt['pillar.get']('elasticsearch:auth:user', '' ) %}
|
{% from 'elasticsearch/auth.map.jinja' import ELASTICAUTH with context %}
|
||||||
{% set ELASTICPASS = salt['pillar.get']('elasticsearch:auth:pass', '' ) %}
|
|
||||||
|
|
||||||
# Remove variables.txt from /tmp - This is temp
|
# Remove variables.txt from /tmp - This is temp
|
||||||
rmvariablesfile:
|
rmvariablesfile:
|
||||||
@@ -181,11 +180,7 @@ utilsyncscripts:
|
|||||||
- template: jinja
|
- template: jinja
|
||||||
- source: salt://common/tools/sbin
|
- source: salt://common/tools/sbin
|
||||||
- defaults:
|
- defaults:
|
||||||
ELASTICCURL: "curl"
|
ELASTICCURL: {{ ELASTICAUTH.elasticcurl }}
|
||||||
{% if salt['pillar.get']('elasticsearch:auth_enabled', False) %}
|
|
||||||
- context:
|
|
||||||
ELASTICCURL: "curl --user {{ELASTICUSER}}:{{ELASTICPASS}}"
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
|
||||||
{% if role in ['eval', 'standalone', 'sensor', 'heavynode'] %}
|
{% if role in ['eval', 'standalone', 'sensor', 'heavynode'] %}
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ echo -n "Waiting for ElasticSearch..."
|
|||||||
COUNT=0
|
COUNT=0
|
||||||
ELASTICSEARCH_CONNECTED="no"
|
ELASTICSEARCH_CONNECTED="no"
|
||||||
while [[ "$COUNT" -le 240 ]]; do
|
while [[ "$COUNT" -le 240 ]]; do
|
||||||
curl -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
{{ ELASTICCURL }} -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
ELASTICSEARCH_CONNECTED="yes"
|
ELASTICSEARCH_CONNECTED="yes"
|
||||||
echo "connected!"
|
echo "connected!"
|
||||||
@@ -51,7 +51,7 @@ cd ${ELASTICSEARCH_TEMPLATES}
|
|||||||
|
|
||||||
|
|
||||||
echo "Loading templates..."
|
echo "Loading templates..."
|
||||||
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; curl -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_template/so-$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
|
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_template/so-$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
|
||||||
echo
|
echo
|
||||||
|
|
||||||
cd - >/dev/null
|
cd - >/dev/null
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ overlimit() {
|
|||||||
|
|
||||||
closedindices() {
|
closedindices() {
|
||||||
|
|
||||||
INDICES=$(curl -s -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/_cat/indices?h=index\&expand_wildcards=closed 2> /dev/null)
|
INDICES=$({{ ELASTICCURL }} -s -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/_cat/indices?h=index\&expand_wildcards=closed 2> /dev/null)
|
||||||
[ $? -eq 1 ] && return false
|
[ $? -eq 1 ] && return false
|
||||||
echo ${INDICES} | grep -q -E "(logstash-|so-)"
|
echo ${INDICES} | grep -q -E "(logstash-|so-)"
|
||||||
}
|
}
|
||||||
@@ -49,10 +49,10 @@ while overlimit && closedindices; do
|
|||||||
# First, get the list of closed indices using _cat/indices?h=index\&expand_wildcards=closed.
|
# First, get the list of closed indices using _cat/indices?h=index\&expand_wildcards=closed.
|
||||||
# Then, sort by date by telling sort to use hyphen as delimiter and then sort on the third field.
|
# Then, sort by date by telling sort to use hyphen as delimiter and then sort on the third field.
|
||||||
# Finally, select the first entry in that sorted list.
|
# Finally, select the first entry in that sorted list.
|
||||||
OLDEST_INDEX=$(curl -s -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/_cat/indices?h=index\&expand_wildcards=closed | grep -E "(logstash-|so-)" | sort -t- -k3 | head -1)
|
OLDEST_INDEX=$({{ ELASTICCURL }} -s -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/_cat/indices?h=index\&expand_wildcards=closed | grep -E "(logstash-|so-)" | sort -t- -k3 | head -1)
|
||||||
|
|
||||||
# Now that we've determined OLDEST_INDEX, ask Elasticsearch to delete it.
|
# Now that we've determined OLDEST_INDEX, ask Elasticsearch to delete it.
|
||||||
curl -XDELETE -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/${OLDEST_INDEX}
|
{{ ELASTICCURL }} -XDELETE -k https://{{ELASTICSEARCH_HOST}}:{{ELASTICSEARCH_PORT}}/${OLDEST_INDEX}
|
||||||
|
|
||||||
# Finally, write a log entry that says we deleted it.
|
# Finally, write a log entry that says we deleted it.
|
||||||
echo "$(date) - Used disk space exceeds LOG_SIZE_LIMIT ({{LOG_SIZE_LIMIT}} GB) - Index ${OLDEST_INDEX} deleted ..." >> ${LOG}
|
echo "$(date) - Used disk space exceeds LOG_SIZE_LIMIT ({{LOG_SIZE_LIMIT}} GB) - Index ${OLDEST_INDEX} deleted ..." >> ${LOG}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||||
{% set MANAGER = salt['grains.get']('master') %}
|
{% set MANAGER = salt['grains.get']('master') %}
|
||||||
{% if grains['role'] in ['so-eval', 'so-node', 'so-managersearch', 'so-heavynode', 'so-standalone'] %}
|
{% if grains['role'] in ['so-eval', 'so-node', 'so-managersearch', 'so-heavynode', 'so-standalone'] %}
|
||||||
|
{% from 'elasticsearch/auth.map.jinja' import ELASTICAUTH with context %}
|
||||||
# Curator
|
# Curator
|
||||||
# Create the group
|
# Create the group
|
||||||
curatorgroup:
|
curatorgroup:
|
||||||
@@ -66,6 +67,8 @@ curcloseddeldel:
|
|||||||
- group: 939
|
- group: 939
|
||||||
- mode: 755
|
- mode: 755
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
- defaults:
|
||||||
|
ELASTICCURL: {{ ELASTICAUTH.elasticcurl }}
|
||||||
|
|
||||||
curclose:
|
curclose:
|
||||||
file.managed:
|
file.managed:
|
||||||
|
|||||||
7
salt/elasticsearch/auth.map.jinja
Normal file
7
salt/elasticsearch/auth.map.jinja
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{% set ELASTICAUTH = salt['pillar.filter_by']({
|
||||||
|
True: {
|
||||||
|
'user': salt['pillar.get']('elasticsearch:auth:user'),
|
||||||
|
'pass': salt['pillar.get']('elasticsearch:auth:pass'),
|
||||||
|
'elasticcurl':'curl --user {{ELASTICAUTH.user}}:{{ELASTICAUTH.pass}}'},
|
||||||
|
False: {'elasticcurl': 'curl'},
|
||||||
|
}, pillar='elasticsearch:auth:enabled') %}
|
||||||
@@ -27,7 +27,7 @@ echo -n "Waiting for ElasticSearch..."
|
|||||||
COUNT=0
|
COUNT=0
|
||||||
ELASTICSEARCH_CONNECTED="no"
|
ELASTICSEARCH_CONNECTED="no"
|
||||||
while [[ "$COUNT" -le 240 ]]; do
|
while [[ "$COUNT" -le 240 ]]; do
|
||||||
curl ${ELASTICSEARCH_AUTH} -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
{{ ELASTICCURL }} -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
ELASTICSEARCH_CONNECTED="yes"
|
ELASTICSEARCH_CONNECTED="yes"
|
||||||
echo "connected!"
|
echo "connected!"
|
||||||
@@ -47,7 +47,7 @@ fi
|
|||||||
cd ${ELASTICSEARCH_INGEST_PIPELINES}
|
cd ${ELASTICSEARCH_INGEST_PIPELINES}
|
||||||
|
|
||||||
echo "Loading pipelines..."
|
echo "Loading pipelines..."
|
||||||
for i in *; do echo $i; RESPONSE=$(curl ${ELASTICSEARCH_AUTH} -k -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_ingest/pipeline/$i -H 'Content-Type: application/json' -d@$i 2>/dev/null); echo $RESPONSE; if [[ "$RESPONSE" == *"error"* ]]; then RETURN_CODE=1; fi; done
|
for i in *; do echo $i; RESPONSE=$({{ ELASTICCURL }} -k -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_ingest/pipeline/$i -H 'Content-Type: application/json' -d@$i 2>/dev/null); echo $RESPONSE; if [[ "$RESPONSE" == *"error"* ]]; then RETURN_CODE=1; fi; done
|
||||||
echo
|
echo
|
||||||
|
|
||||||
cd - >/dev/null
|
cd - >/dev/null
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||||
|
|
||||||
{% if sls in allowed_states %}
|
{% if sls in allowed_states %}
|
||||||
{% set ELASTICUSER = salt['pillar.get']('elasticsearch:auth:user', '' ) %}
|
{% from 'elasticsearch/auth.map.jinja' import ELASTICAUTH with context %}
|
||||||
{% set ELASTICPASS = salt['pillar.get']('elasticsearch:auth:pass', '' ) %}
|
|
||||||
|
|
||||||
# This state is for checking things
|
# This state is for checking things
|
||||||
{% if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone'] %}
|
{% if grains['role'] in ['so-manager', 'so-managersearch', 'so-standalone'] %}
|
||||||
@@ -15,11 +14,7 @@ crossclusterson:
|
|||||||
- source: salt://utility/bin/crossthestreams
|
- source: salt://utility/bin/crossthestreams
|
||||||
- template: jinja
|
- template: jinja
|
||||||
- defaults:
|
- defaults:
|
||||||
ELASTICCURL: "curl"
|
ELASTICCURL: {{ ELASTICAUTH.elasticcurl }}
|
||||||
{% if salt['pillar.get']('elasticsearch:auth_enabled', False) %}
|
|
||||||
- context:
|
|
||||||
ELASTICCURL: "curl --user {{ELASTICUSER}}:{{ELASTICPASS}}"
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if grains['role'] in ['so-eval', 'so-import'] %}
|
{% if grains['role'] in ['so-eval', 'so-import'] %}
|
||||||
@@ -31,11 +26,7 @@ fixsearch:
|
|||||||
- source: salt://utility/bin/eval
|
- source: salt://utility/bin/eval
|
||||||
- template: jinja
|
- template: jinja
|
||||||
- defaults:
|
- defaults:
|
||||||
ELASTICCURL: "curl"
|
ELASTICCURL: {{ ELASTICAUTH.elasticcurl }}
|
||||||
{% if salt['pillar.get']('elasticsearch:auth_enabled', False) %}
|
|
||||||
- context:
|
|
||||||
ELASTICCURL: "curl --user {{ELASTICUSER}}:{{ELASTICPASS}}"
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
|
|||||||
Reference in New Issue
Block a user