diff --git a/salt/common/tools/sbin_jinja/so-import-evtx b/salt/common/tools/sbin_jinja/so-import-evtx index 59a13612c..d12f34593 100755 --- a/salt/common/tools/sbin_jinja/so-import-evtx +++ b/salt/common/tools/sbin_jinja/so-import-evtx @@ -80,8 +80,8 @@ function evtx2es() { -e "SHIFTTS=$SHIFTDATE" \ -v "$EVTX:/tmp/data.evtx" \ -v "/nsm/import/$HASH/evtx/:/tmp/evtx/" \ - -v "/nsm/import/evtx-end_newest:/tmp/newest" \ - -v "/nsm/import/evtx-start_oldest:/tmp/oldest" \ + -v "/nsm/import/$HASH/evtx-end_newest:/tmp/newest" \ + -v "/nsm/import/$HASH/evtx-start_oldest:/tmp/oldest" \ --entrypoint "/evtx_calc_timestamps.sh" \ {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-pcaptools:{{ VERSION }} >> $LOG_FILE 2>&1 } @@ -111,12 +111,6 @@ INVALID_EVTXS_COUNT=0 VALID_EVTXS_COUNT=0 SKIPPED_EVTXS_COUNT=0 -touch /nsm/import/evtx-start_oldest -touch /nsm/import/evtx-end_newest - -echo $START_OLDEST > /nsm/import/evtx-start_oldest -echo $END_NEWEST > /nsm/import/evtx-end_newest - # paths must be quoted in case they include spaces for EVTX in $INPUT_FILES; do EVTX=$(/usr/bin/realpath "$EVTX") @@ -141,8 +135,15 @@ for EVTX in $INPUT_FILES; do status "- this EVTX has already been imported; skipping" SKIPPED_EVTXS_COUNT=$((SKIPPED_EVTXS_COUNT + 1)) else + # create EVTX directory EVTX_DIR=$HASH_DIR/evtx mkdir -p $EVTX_DIR + # create import timestamp files + for i in evtx-start_oldest evtx-end_newest; do + if ! [ -f "$i" ]; then + touch /nsm/import/$HASH/$i + fi + done # import evtx and write them to import ingest pipeline status "- importing logs to Elasticsearch..." @@ -154,28 +155,37 @@ for EVTX in $INPUT_FILES; do VALID_EVTXS_COUNT=$((VALID_EVTXS_COUNT + 1)) fi - # compare $START to $START_OLDEST - START=$(cat /nsm/import/evtx-start_oldest) - START_COMPARE=$(date -d $START +%s) - START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s) - if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then - START_OLDEST=$START - fi - - # compare $ENDNEXT to $END_NEWEST - END=$(cat /nsm/import/evtx-end_newest) - ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"` - ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s) - END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s) - if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then - END_NEWEST=$ENDNEXT - fi - cp -f "${EVTX}" "${EVTX_DIR}"/data.evtx chmod 644 "${EVTX_DIR}"/data.evtx fi # end of valid evtx + # determine start and end and make sure they aren't reversed + START=$(cat /nsm/import/$HASH/evtx-start_oldest) + END=$(cat /nsm/import/$HASH/evtx-end_newest) + START_EPOCH=`date -d "$START" +"%s"` + END_EPOCH=`date -d "$END" +"%s"` + if [ "$START_EPOCH" -gt "$END_EPOCH" ]; then + TEMP=$START + START=$END + END=$TEMP + fi + + # compare $START to $START_OLDEST + START_COMPARE=$(date -d $START +%s) + START_OLDEST_COMPARE=$(date -d $START_OLDEST +%s) + if [ $START_COMPARE -lt $START_OLDEST_COMPARE ]; then + START_OLDEST=$START + fi + + # compare $ENDNEXT to $END_NEWEST + ENDNEXT=`date +%Y-%m-%d --date="$END 1 day"` + ENDNEXT_COMPARE=$(date -d $ENDNEXT +%s) + END_NEWEST_COMPARE=$(date -d $END_NEWEST +%s) + if [ $ENDNEXT_COMPARE -gt $END_NEWEST_COMPARE ]; then + END_NEWEST=$ENDNEXT + fi + status done # end of for-loop processing evtx files diff --git a/salt/elasticsearch/defaults.yaml b/salt/elasticsearch/defaults.yaml index cc2f5e1cd..91e5191f6 100644 --- a/salt/elasticsearch/defaults.yaml +++ b/salt/elasticsearch/defaults.yaml @@ -3689,6 +3689,7 @@ elasticsearch: refresh_interval: 30s number_of_shards: 1 number_of_replicas: 0 + final_pipeline: ".fleet_final_pipeline-1" composed_of: - agent-mappings - dtc-agent-mappings diff --git a/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 b/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 index 688000fb7..52b6bae7a 100644 --- a/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 +++ b/salt/elasticsearch/files/ingest/.fleet_final_pipeline-1 @@ -80,6 +80,7 @@ { "set": { "if": "ctx.network?.type == 'ipv6'", "override": true, "field": "destination.ipv6", "value": "true" } }, { "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.dataset", "value": "import" } }, { "set": { "if": "ctx.tags.0 == 'import'", "override": true, "field": "data_stream.namespace", "value": "so" } }, + { "date": { "if": "ctx.event?.module == 'system'", "field": "event.created", "target_field": "@timestamp", "formats": ["yyyy-MM-dd'T'HH:mm:ss.SSSSSS'Z'"] } }, { "community_id":{ "if": "ctx.event?.dataset == 'endpoint.events.network'", "ignore_failure":true } }, { "remove": { "field": [ "message2", "type", "fields", "category", "module", "dataset", "event.dataset_temp", "dataset_tag_temp", "module_temp" ], "ignore_missing": true, "ignore_failure": true } } ],