Compare commits

..

25 Commits

Author SHA1 Message Date
Jorge Reyes
4630eaef43 Update version to 2.4.0-foxtrot 2025-12-16 16:05:24 -06:00
coreyogburn
1d63269883 Merge pull request #15323 from Security-Onion-Solutions/cogburn/non-advanced-apiurl
Un-Advanced Assistant ApiUrl
2025-12-16 12:08:14 -07:00
Corey Ogburn
dd8027480b Un-Advanced Assistant ApiUrl 2025-12-16 12:02:01 -07:00
Mike Reeves
c45bd77e44 Merge pull request #15320 from Security-Onion-Solutions/TOoSmOotH-patch-1
Update VERSION
2025-12-16 11:25:35 -05:00
Mike Reeves
032e0abd61 Update 2-4.yml 2025-12-16 11:23:53 -05:00
Mike Reeves
8509d1e454 Update VERSION 2025-12-16 11:23:12 -05:00
Mike Reeves
ddd6935e50 Merge pull request #15318 from Security-Onion-Solutions/2.4.200
2.4.200
2025-12-16 09:15:32 -05:00
Mike Reeves
5588a56b24 2.4.200 2025-12-16 09:07:29 -05:00
Mike Reeves
12aed6e280 Merge pull request #15311 from Security-Onion-Solutions/TOoSmOotH-patch-5
Update so-minion
2025-12-15 12:07:37 -05:00
Mike Reeves
b2a469e08c Update so-minion 2025-12-15 11:56:23 -05:00
Jason Ertel
285b0e4af9 Merge pull request #15308 from Security-Onion-Solutions/idstools-refactor
Add trailing nl if it doesnt already exist
2025-12-14 15:35:24 -05:00
DefensiveDepth
f9edfd6391 Add trailing nl if it doesnt already exist 2025-12-14 12:03:44 -05:00
Josh Patterson
f6301bc3e5 Merge pull request #15304 from Security-Onion-Solutions/ggjorge
fix cleaning repos on remote nodes if airgap
2025-12-12 14:22:21 -05:00
Josh Patterson
6c5c176b7d fix cleaning repos on remote nodes if airgap 2025-12-12 14:18:54 -05:00
Josh Brower
c6d52b5eb1 Merge pull request #15303 from Security-Onion-Solutions/idstools-refactor
Add Airgap check
2025-12-12 09:59:19 -05:00
DefensiveDepth
7cac528389 Add Airgap check 2025-12-12 09:52:01 -05:00
Josh Brower
6fe817ca4a Merge pull request #15301 from Security-Onion-Solutions/idstools-refactor
Rework backup
2025-12-11 13:57:25 -05:00
DefensiveDepth
cb9a6fac25 Update tests for rework 2025-12-11 12:14:37 -05:00
DefensiveDepth
a945768251 Refactor backup 2025-12-11 11:15:30 -05:00
Mike Reeves
c6646e3821 Merge pull request #15289 from Security-Onion-Solutions/TOoSmOotH-patch-3
Update Assistant Models
2025-12-10 17:22:13 -05:00
Mike Reeves
99dc72cece Merge branch '2.4/dev' into TOoSmOotH-patch-3 2025-12-10 17:19:32 -05:00
Mike Reeves
94694d394e Add origin field to model training configuration 2025-12-08 16:36:09 -05:00
Mike Reeves
03dd746601 Add origin field to model configurations 2025-12-08 16:34:19 -05:00
Mike Reeves
eec3373ae7 Update display name for Claude Sonnet 4 2025-12-08 16:30:50 -05:00
Mike Reeves
db45ce07ed Modify model display names and remove GPT-OSS 120B
Updated display names for models and removed GPT-OSS 120B.
2025-12-08 16:26:45 -05:00
39 changed files with 297 additions and 562 deletions

View File

@@ -33,6 +33,7 @@ body:
- 2.4.180
- 2.4.190
- 2.4.200
- 2.4.210
- Other (please provide detail below)
validations:
required: true

View File

@@ -1,17 +1,17 @@
### 2.4.190-20251024 ISO image released on 2025/10/24
### 2.4.200-20251216 ISO image released on 2025/12/16
### Download and Verify
2.4.190-20251024 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
2.4.200-20251216 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
MD5: 25358481FB876226499C011FC0710358
SHA1: 0B26173C0CE136F2CA40A15046D1DFB78BCA1165
SHA256: 4FD9F62EDA672408828B3C0C446FE5EA9FF3C4EE8488A7AB1101544A3C487872
MD5: 07B38499952D1F2FD7B5AF10096D0043
SHA1: 7F3A26839CA3CAEC2D90BB73D229D55E04C7D370
SHA256: 8D3AC735873A2EA8527E16A6A08C34BD5018CBC0925AC4096E15A0C99F591D5F
Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
@@ -25,22 +25,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.
Download the signature file for the ISO:
```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.190-20251024.iso.sig
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.200-20251216.iso.sig
```
Download the ISO image:
```
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.190-20251024.iso
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.200-20251216.iso
```
Verify the downloaded ISO image using the signature file:
```
gpg --verify securityonion-2.4.190-20251024.iso.sig securityonion-2.4.190-20251024.iso
gpg --verify securityonion-2.4.200-20251216.iso.sig securityonion-2.4.200-20251216.iso
```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
```
gpg: Signature made Thu 23 Oct 2025 07:21:46 AM EDT using RSA key ID FE507013
gpg: Signature made Mon 15 Dec 2025 05:24:11 PM EST using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner.

View File

@@ -60,7 +60,7 @@ so-elastalert:
- watch:
- file: elastaconf
- onlyif:
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 9" {# only run this state if elasticsearch is version 9 #}
- "so-elasticsearch-query / | jq -r '.version.number[0:1]' | grep -q 8" {# only run this state if elasticsearch is version 8 #}
delete_so-elastalert_so-status.disabled:
file.uncomment:

View File

@@ -2,7 +2,7 @@
{%- raw -%}
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "import-zeek-logs",
@@ -10,31 +10,19 @@
"description": "Zeek Import logs",
"policy_id": "so-grid-nodes_general",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/import/*/zeek/logs/*.log"
],
"data_stream.dataset": "import",
"pipeline": "",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": ["({%- endraw -%}{{ ELASTICFLEETMERGED.logging.zeek.excluded | join('|') }}{%- raw -%}).log$"],
"include_files": [],
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/zeek/logs/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"import.file\").slice(0,-4);\n event.Put(\"@metadata.pipeline\", \"zeek.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: zeek\n imported: true\n- add_tags:\n tags: \"ics\"\n when:\n regexp:\n import.file: \"^bacnet*|^bsap*|^cip*|^cotp*|^dnp3*|^ecat*|^enip*|^modbus*|^opcua*|^profinet*|^s7comm*\"",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/zeek/logs/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"import.file\").slice(0,-4);\n event.Put(\"@metadata.pipeline\", \"zeek.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: zeek\n imported: true\n- add_tags:\n tags: \"ics\"\n when:\n regexp:\n import.file: \"^bacnet*|^bsap*|^cip*|^cotp*|^dnp3*|^ecat*|^enip*|^modbus*|^opcua*|^profinet*|^s7comm*\"",
"custom": "exclude_files: [\"{%- endraw -%}{{ ELASTICFLEETMERGED.logging.zeek.excluded | join('|') }}{%- raw -%}.log$\"]\n"
}
}
}

View File

@@ -11,51 +11,36 @@
{%- endif -%}
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "kratos-logs",
"namespace": "so",
"description": "Kratos logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/kratos/kratos.log"
],
"data_stream.dataset": "kratos",
"pipeline": "kratos",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"tags": ["so-kratos"],
{%- if valid_identities -%}
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true\n- add_fields:\n target: event\n fields:\n category: iam\n module: kratos\n- if:\n has_fields:\n - identity_id\n then:{% for id, email in identities %}\n - if:\n equals:\n identity_id: \"{{ id }}\"\n then:\n - add_fields:\n target: ''\n fields:\n user.name: \"{{ email }}\"{% endfor %}",
{%- else -%}
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true\n- add_fields:\n target: event\n fields:\n category: iam\n module: kratos",
{%- endif -%}
"tags": [
"so-kratos"
],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"custom": "pipeline: kratos"
}
}
}
}
},
"force": true
}
}

View File

@@ -2,38 +2,28 @@
{%- raw -%}
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"id": "zeek-logs",
"name": "zeek-logs",
"namespace": "so",
"description": "Zeek logs",
"policy_id": "so-grid-nodes_general",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/zeek/logs/current/*.log"
],
"data_stream.dataset": "zeek",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": ["({%- endraw -%}{{ ELASTICFLEETMERGED.logging.zeek.excluded | join('|') }}{%- raw -%}).log$"],
"include_files": [],
"processors": "- dissect:\n tokenizer: \"/nsm/zeek/logs/current/%{pipeline}.log\"\n field: \"log.file.path\"\n trim_chars: \".log\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"pipeline\");\n event.Put(\"@metadata.pipeline\", \"zeek.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: zeek\n- add_tags:\n tags: \"ics\"\n when:\n regexp:\n pipeline: \"^bacnet*|^bsap*|^cip*|^cotp*|^dnp3*|^ecat*|^enip*|^modbus*|^opcua*|^profinet*|^s7comm*\"",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- dissect:\n tokenizer: \"/nsm/zeek/logs/current/%{pipeline}.log\"\n field: \"log.file.path\"\n trim_chars: \".log\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"pipeline\");\n event.Put(\"@metadata.pipeline\", \"zeek.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: zeek\n- add_tags:\n tags: \"ics\"\n when:\n regexp:\n pipeline: \"^bacnet*|^bsap*|^cip*|^cotp*|^dnp3*|^ecat*|^enip*|^modbus*|^opcua*|^profinet*|^s7comm*\"",
"custom": "exclude_files: [\"{%- endraw -%}{{ ELASTICFLEETMERGED.logging.zeek.excluded | join('|') }}{%- raw -%}.log$\"]\n"
}
}
}
@@ -41,4 +31,4 @@
},
"force": true
}
{%- endraw -%}
{%- endraw -%}

View File

@@ -5,7 +5,7 @@
"package": {
"name": "endpoint",
"title": "Elastic Defend",
"version": "9.0.2",
"version": "8.18.1",
"requires_root": true
},
"enabled": true,

View File

@@ -1,43 +1,26 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "hydra-logs",
"namespace": "so",
"description": "Hydra logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/hydra/hydra.log"
],
"data_stream.dataset": "hydra",
"pipeline": "hydra",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true\n- add_fields:\n target: event\n fields:\n category: iam\n module: hydra",
"tags": [
"so-hydra"
],
"recursive_glob": true,
"ignore_older": "72h",
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"tags": ["so-hydra"],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: iam\n module: hydra",
"custom": "pipeline: hydra"
}
}
}
@@ -45,5 +28,3 @@
},
"force": true
}

View File

@@ -1,44 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "idh-logs",
"namespace": "so",
"description": "IDH integration",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/idh/opencanary.log"
],
"data_stream.dataset": "idh",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true\n- convert:\n fields:\n - {from: \"logtype\", to: \"event.code\", type: \"string\"}\n- drop_fields:\n when:\n equals:\n event.code: \"1001\"\n fields: [\"src_host\", \"src_port\", \"dst_host\", \"dst_port\" ]\n ignore_missing: true\n- rename:\n fields:\n - from: \"src_host\"\n to: \"source.ip\"\n - from: \"src_port\"\n to: \"source.port\"\n - from: \"dst_host\"\n to: \"destination.host\"\n - from: \"dst_port\"\n to: \"destination.port\"\n ignore_missing: true\n- drop_fields:\n fields: '[\"prospector\", \"input\", \"offset\", \"beat\"]'\n- add_fields:\n target: event\n fields:\n category: host\n module: opencanary",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n add_error_key: true\n- convert:\n fields:\n - {from: \"logtype\", to: \"event.code\", type: \"string\"}\n- drop_fields:\n when:\n equals:\n event.code: \"1001\"\n fields: [\"src_host\", \"src_port\", \"dst_host\", \"dst_port\" ]\n ignore_missing: true\n- rename:\n fields:\n - from: \"src_host\"\n to: \"source.ip\"\n - from: \"src_port\"\n to: \"source.port\"\n - from: \"dst_host\"\n to: \"destination.host\"\n - from: \"dst_port\"\n to: \"destination.port\"\n ignore_missing: true\n- drop_fields:\n fields: '[\"prospector\", \"input\", \"offset\", \"beat\"]'\n- add_fields:\n target: event\n fields:\n category: host\n module: opencanary",
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,46 +1,33 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "import-evtx-logs",
"namespace": "so",
"description": "Import Windows EVTX logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"vars": {},
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/import/*/evtx/*.json"
],
"data_stream.dataset": "import",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"custom": "",
"processors": "- dissect:\n tokenizer: \"/nsm/import/%{import.id}/evtx/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n- decode_json_fields:\n fields: [\"message\"]\n target: \"\"\n- drop_fields:\n fields: [\"host\"]\n ignore_missing: true\n- add_fields:\n target: data_stream\n fields:\n type: logs\n dataset: system.security\n- add_fields:\n target: event\n fields:\n dataset: system.security\n module: system\n imported: true\n- add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.security-2.6.1\n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-Sysmon/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.sysmon_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.sysmon_operational\n module: windows\n imported: true\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.sysmon_operational-3.1.2\n- if:\n equals:\n winlog.channel: 'Application'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.application\n - add_fields:\n target: event\n fields:\n dataset: system.application\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.application-2.6.1\n- if:\n equals:\n winlog.channel: 'System'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: system.system\n - add_fields:\n target: event\n fields:\n dataset: system.system\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-system.system-2.6.1\n \n- if:\n equals:\n winlog.channel: 'Microsoft-Windows-PowerShell/Operational'\n then: \n - add_fields:\n target: data_stream\n fields:\n dataset: windows.powershell_operational\n - add_fields:\n target: event\n fields:\n dataset: windows.powershell_operational\n module: windows\n - add_fields:\n target: \"@metadata\"\n fields:\n pipeline: logs-windows.powershell_operational-3.1.2\n- add_fields:\n target: data_stream\n fields:\n dataset: import",
"tags": [
"import"
],
"recursive_glob": true,
"ignore_older": "72h",
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
]
}
}
}
}
},
"force": true
}
}

View File

@@ -1,45 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "import-suricata-logs",
"namespace": "so",
"description": "Import Suricata logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/import/*/suricata/eve*.json"
],
"data_stream.dataset": "import",
"pipeline": "suricata.common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- add_fields:\n target: event\n fields:\n category: network\n module: suricata\n imported: true\n- dissect:\n tokenizer: \"/nsm/import/%{import.id}/suricata/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"\n",
"tags": [],
"recursive_glob": true,
"ignore_older": "72h",
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- add_fields:\n target: event\n fields:\n category: network\n module: suricata\n imported: true\n- dissect:\n tokenizer: \"/nsm/import/%{import.id}/suricata/%{import.file}\"\n field: \"log.file.path\"\n target_prefix: \"\"",
"custom": "pipeline: suricata.common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,17 +1,18 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "rita-logs",
"namespace": "so",
"description": "RITA Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"vars": {},
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
@@ -19,28 +20,15 @@
"/nsm/rita/exploded-dns.csv",
"/nsm/rita/long-connections.csv"
],
"data_stream.dataset": "rita",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- dissect:\n tokenizer: \"/nsm/rita/%{pipeline}.csv\"\n field: \"log.file.path\"\n trim_chars: \".csv\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"pipeline\").split(\"-\");\n if (pl.length > 1) {\n pl = pl[1];\n }\n else {\n pl = pl[0];\n }\n event.Put(\"@metadata.pipeline\", \"rita.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: rita",
"tags": [],
"recursive_glob": true,
"exclude_files": [],
"ignore_older": "72h",
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"data_stream.dataset": "rita",
"tags": [],
"processors": "- dissect:\n tokenizer: \"/nsm/rita/%{pipeline}.csv\"\n field: \"log.file.path\"\n trim_chars: \".csv\"\n target_prefix: \"\"\n- script:\n lang: javascript\n source: >\n function process(event) {\n var pl = event.Get(\"pipeline\").split(\"-\");\n if (pl.length > 1) {\n pl = pl[1];\n }\n else {\n pl = pl[0];\n }\n event.Put(\"@metadata.pipeline\", \"rita.\" + pl);\n }\n- add_fields:\n target: event\n fields:\n category: network\n module: rita",
"custom": "exclude_lines: ['^Score', '^Source', '^Domain', '^No results']"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,41 +1,29 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "so-ip-mappings",
"namespace": "so",
"description": "IP Description mappings",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"vars": {},
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/custom-mappings/ip-descriptions.csv"
],
"data_stream.dataset": "hostnamemappings",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- decode_csv_fields:\n fields:\n message: decoded.csv\n separator: \",\"\n ignore_missing: false\n overwrite_keys: true\n trim_leading_space: true\n fail_on_error: true\n\n- extract_array:\n field: decoded.csv\n mappings:\n so.ip_address: '0'\n so.description: '1'\n\n- script:\n lang: javascript\n source: >\n function process(event) {\n var ip = event.Get('so.ip_address');\n var validIpRegex = /^((25[0-5]|2[0-4]\\d|1\\d{2}|[1-9]?\\d)\\.){3}(25[0-5]|2[0-4]\\d|1\\d{2}|[1-9]?\\d)$/\n if (!validIpRegex.test(ip)) {\n event.Cancel();\n }\n }\n- fingerprint:\n fields: [\"so.ip_address\"]\n target_field: \"@metadata._id\"\n",
"tags": [
"so-ip-mappings"
],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- decode_csv_fields:\n fields:\n message: decoded.csv\n separator: \",\"\n ignore_missing: false\n overwrite_keys: true\n trim_leading_space: true\n fail_on_error: true\n\n- extract_array:\n field: decoded.csv\n mappings:\n so.ip_address: '0'\n so.description: '1'\n\n- script:\n lang: javascript\n source: >\n function process(event) {\n var ip = event.Get('so.ip_address');\n var validIpRegex = /^((25[0-5]|2[0-4]\\d|1\\d{2}|[1-9]?\\d)\\.){3}(25[0-5]|2[0-4]\\d|1\\d{2}|[1-9]?\\d)$/\n if (!validIpRegex.test(ip)) {\n event.Cancel();\n }\n }\n- fingerprint:\n fields: [\"so.ip_address\"]\n target_field: \"@metadata._id\"\n",
"custom": ""
}
}
}
@@ -43,3 +31,5 @@
},
"force": true
}

View File

@@ -1,44 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "soc-auth-sync-logs",
"namespace": "so",
"description": "Security Onion - Elastic Auth Sync - Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/soc/sync.log"
],
"data_stream.dataset": "soc",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"tags": ["so-soc"],
"processors": "- dissect:\n tokenizer: \"%{event.action}\"\n field: \"message\"\n target_prefix: \"\"\n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: auth_sync",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,48 +1,35 @@
{
"policy_id": "so-grid-nodes_general",
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "soc-detections-logs",
"description": "Security Onion Console - Detections Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/soc/detections_runtime-status_sigma.log",
"/opt/so/log/soc/detections_runtime-status_yara.log"
],
"exclude_files": [],
"ignore_older": "72h",
"data_stream.dataset": "soc",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"soc\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: detections\n- rename:\n fields:\n - from: \"soc.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"soc.fields.status\"\n to: \"http.response.status_code\"\n - from: \"soc.fields.method\"\n to: \"http.request.method\"\n - from: \"soc.fields.path\"\n to: \"url.path\"\n - from: \"soc.message\"\n to: \"event.action\"\n - from: \"soc.level\"\n to: \"log.level\"\n ignore_missing: true",
"tags": [
"so-soc"
],
"recursive_glob": true,
"ignore_older": "72h",
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"soc\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: detections\n- rename:\n fields:\n - from: \"soc.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"soc.fields.status\"\n to: \"http.response.status_code\"\n - from: \"soc.fields.method\"\n to: \"http.request.method\"\n - from: \"soc.fields.path\"\n to: \"url.path\"\n - from: \"soc.message\"\n to: \"event.action\"\n - from: \"soc.level\"\n to: \"log.level\"\n ignore_missing: true",
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,46 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "soc-salt-relay-logs",
"namespace": "so",
"description": "Security Onion - Salt Relay - Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/soc/salt-relay.log"
],
"data_stream.dataset": "soc",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"tags": ["so-soc"],
"processors": "- dissect:\n tokenizer: \"%{soc.ts} | %{event.action}\"\n field: \"message\"\n target_prefix: \"\"\n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: salt_relay",
"tags": [
"so-soc"
],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,44 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "soc-sensoroni-logs",
"namespace": "so",
"description": "Security Onion - Sensoroni - Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/sensoroni/sensoroni.log"
],
"data_stream.dataset": "soc",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"sensoroni\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: sensoroni\n- rename:\n fields:\n - from: \"sensoroni.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"sensoroni.fields.status\"\n to: \"http.response.status_code\"\n - from: \"sensoroni.fields.method\"\n to: \"http.request.method\"\n - from: \"sensoroni.fields.path\"\n to: \"url.path\"\n - from: \"sensoroni.message\"\n to: \"event.action\"\n - from: \"sensoroni.level\"\n to: \"log.level\"\n ignore_missing: true",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"sensoroni\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: sensoroni\n- rename:\n fields:\n - from: \"sensoroni.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"sensoroni.fields.status\"\n to: \"http.response.status_code\"\n - from: \"sensoroni.fields.method\"\n to: \"http.request.method\"\n - from: \"sensoroni.fields.path\"\n to: \"url.path\"\n - from: \"sensoroni.message\"\n to: \"event.action\"\n - from: \"sensoroni.level\"\n to: \"log.level\"\n ignore_missing: true",
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
"force": true
}

View File

@@ -1,46 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "soc-server-logs",
"namespace": "so",
"description": "Security Onion Console Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/opt/so/log/soc/sensoroni-server.log"
],
"data_stream.dataset": "soc",
"pipeline": "common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"tags": ["so-soc"],
"processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"soc\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: server\n- rename:\n fields:\n - from: \"soc.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"soc.fields.status\"\n to: \"http.response.status_code\"\n - from: \"soc.fields.method\"\n to: \"http.request.method\"\n - from: \"soc.fields.path\"\n to: \"url.path\"\n - from: \"soc.message\"\n to: \"event.action\"\n - from: \"soc.level\"\n to: \"log.level\"\n ignore_missing: true",
"tags": [
"so-soc"
],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"custom": "pipeline: common"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,44 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "strelka-logs",
"description": "Strelka Logs",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"description": "Strelka logs",
"policy_id": "so-grid-nodes_general",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/strelka/log/strelka.log"
],
"data_stream.dataset": "strelka",
"pipeline": "strelka.file",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- add_fields:\n target: event\n fields:\n category: file\n module: strelka",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- add_fields:\n target: event\n fields:\n category: file\n module: strelka",
"custom": "pipeline: strelka.file"
}
}
}
}
},
"force": true
}
}

View File

@@ -1,44 +1,30 @@
{
"package": {
"name": "filestream",
"name": "log",
"version": ""
},
"name": "suricata-logs",
"namespace": "so",
"description": "Suricata integration",
"policy_id": "so-grid-nodes_general",
"namespace": "so",
"inputs": {
"filestream-filestream": {
"logs-logfile": {
"enabled": true,
"streams": {
"filestream.generic": {
"log.logs": {
"enabled": true,
"vars": {
"paths": [
"/nsm/suricata/eve*.json"
],
"data_stream.dataset": "filestream.generic",
"pipeline": "suricata.common",
"parsers": "#- ndjson:\n# target: \"\"\n# message_key: msg\n#- multiline:\n# type: count\n# count_lines: 3\n",
"exclude_files": [
"\\.gz$"
],
"include_files": [],
"processors": "- add_fields:\n target: event\n fields:\n category: network\n module: suricata",
"data_stream.dataset": "suricata",
"tags": [],
"recursive_glob": true,
"clean_inactive": -1,
"harvester_limit": 0,
"fingerprint": true,
"fingerprint_offset": 0,
"fingerprint_length": "64",
"file_identity_native": false,
"exclude_lines": [],
"include_lines": []
"processors": "- add_fields:\n target: event\n fields:\n category: network\n module: suricata",
"custom": "pipeline: suricata.common"
}
}
}
}
},
"force": true
}
}

View File

@@ -21,7 +21,6 @@
'azure_application_insights.app_state': 'azure.app_state',
'azure_billing.billing': 'azure.billing',
'azure_functions.metrics': 'azure.function',
'azure_ai_foundry.metrics': 'azure.ai_foundry',
'azure_metrics.compute_vm_scaleset': 'azure.compute_vm_scaleset',
'azure_metrics.compute_vm': 'azure.compute_vm',
'azure_metrics.container_instance': 'azure.container_instance',

View File

@@ -86,7 +86,7 @@ if [[ -f $STATE_FILE_SUCCESS ]]; then
latest_package_list=$(/usr/sbin/so-elastic-fleet-package-list)
echo '{ "packages" : []}' > $BULK_INSTALL_PACKAGE_LIST
rm -f $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .installationInfo.version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
echo $latest_package_list | jq '{packages: [.items[] | {name: .name, latest_version: .version, installed_version: .savedObject.attributes.install_version, subscription: .conditions.elastic.subscription }]}' >> $INSTALLED_PACKAGE_LIST
while read -r package; do
# get package details

View File

@@ -47,7 +47,7 @@ if ! kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg KAFKACA "$KAFKACA" \
--arg MANAGER_IP "{{ GLOBALS.manager_ip }}:9092" \
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka", "id":"so-manager_kafka","type":"kafka","hosts":[ $MANAGER_IP ],"is_default":false,"is_default_monitoring":false,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X POST "localhost:5601/api/fleet/outputs" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to setup Elastic Fleet output policy for Kafka...\n"
@@ -67,7 +67,7 @@ elif kafka_output=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L "http://l
--arg ENABLED_DISABLED "$ENABLED_DISABLED"\
--arg KAFKA_OUTPUT_VERSION "$KAFKA_OUTPUT_VERSION" \
--argjson HOSTS "$HOSTS" \
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topic":"default-securityonion","headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
'{"name":"grid-kafka","type":"kafka","hosts":$HOSTS,"is_default":$ENABLED_DISABLED,"is_default_monitoring":$ENABLED_DISABLED,"config_yaml":"","ssl":{"certificate_authorities":[ $KAFKACA ],"certificate": $KAFKACRT ,"key":"","verification_mode":"full"},"proxy_id":null,"client_id":"Elastic","version": $KAFKA_OUTPUT_VERSION ,"compression":"none","auth_type":"ssl","partition":"round_robin","round_robin":{"group_events":10},"topics":[{"topic":"default-securityonion"}],"headers":[{"key":"","value":""}],"timeout":30,"broker_timeout":30,"required_acks":1,"secrets":{"ssl":{"key": $KAFKAKEY }}}'
)
if ! response=$(curl -sK /opt/so/conf/elasticsearch/curl.config -L -X PUT "localhost:5601/api/fleet/outputs/so-manager_kafka" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d "$JSON_STRING" --fail 2>/dev/null); then
echo -e "\nFailed to force update to Elastic Fleet output policy for Kafka...\n"

View File

@@ -1,6 +1,6 @@
elasticsearch:
enabled: false
version: 9.0.8
version: 8.18.8
index_clean: true
config:
action:
@@ -299,6 +299,19 @@ elasticsearch:
hot:
actions: {}
min_age: 0ms
sos-backup:
index_sorting: false
index_template:
composed_of: []
ignore_missing_component_templates: []
index_patterns:
- sos-backup-*
priority: 501
template:
settings:
index:
number_of_replicas: 0
number_of_shards: 1
so-assistant-chat:
index_sorting: false
index_template:

View File

@@ -15,7 +15,7 @@ set -e
if [ ! -f /opt/so/saltstack/local/salt/elasticsearch/cacerts ]; then
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ GLOBALS.registry_host }}:5000/{{ GLOBALS.image_repo }}/so-elasticsearch:$ELASTIC_AGENT_TARBALL_VERSION -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/elasticsearch/cacerts
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
docker rm so-elasticsearchca
echo "" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem
echo "sosca" >> /opt/so/saltstack/local/salt/elasticsearch/tls-ca-bundle.pem

View File

@@ -63,7 +63,7 @@ logstash:
settings:
lsheap: 500m
config:
api_x_http_x_host: 0.0.0.0
http_x_host: 0.0.0.0
path_x_logs: /var/log/logstash
pipeline_x_workers: 1
pipeline_x_batch_x_size: 125

View File

@@ -5,10 +5,10 @@ input {
codec => es_bulk
request_headers_target_field => client_headers
remote_host_target_field => client_host
ssl_enabled => true
ssl => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/filebeat.crt"
ssl_key => "/usr/share/logstash/filebeat.key"
ssl_client_authentication => "required"
ssl_verify_mode => "peer"
}
}

View File

@@ -2,11 +2,11 @@ input {
elastic_agent {
port => 5055
tags => [ "elastic-agent", "input-{{ GLOBALS.hostname }}" ]
ssl_enabled => true
ssl => true
ssl_certificate_authorities => ["/usr/share/filebeat/ca.crt"]
ssl_certificate => "/usr/share/logstash/elasticfleet-logstash.crt"
ssl_key => "/usr/share/logstash/elasticfleet-logstash.key"
ssl_client_authentication => "required"
ssl_verify_mode => "force_peer"
ecs_compatibility => v8
}
}

View File

@@ -2,7 +2,7 @@ input {
elastic_agent {
port => 5056
tags => [ "elastic-agent", "fleet-lumberjack-input" ]
ssl_enabled => true
ssl => true
ssl_certificate => "/usr/share/logstash/elasticfleet-lumberjack.crt"
ssl_key => "/usr/share/logstash/elasticfleet-lumberjack.key"
ecs_compatibility => v8

View File

@@ -8,8 +8,8 @@ output {
document_id => "%{[metadata][_id]}"
index => "so-ip-mappings"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl_enabled => true
ssl_verification_mode => "none"
ssl => true
ssl_certificate_verification => false
}
}
else {
@@ -25,8 +25,8 @@ output {
document_id => "%{[metadata][_id]}"
pipeline => "%{[metadata][pipeline]}"
silence_errors_in_log => ["version_conflict_engine_exception"]
ssl_enabled => true
ssl_verification_mode => "none"
ssl => true
ssl_certificate_verification => false
}
}
else {
@@ -37,8 +37,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
pipeline => "%{[metadata][pipeline]}"
ssl_enabled => true
ssl_verification_mode => "none"
ssl => true
ssl_certificate_verification => false
}
}
}
@@ -49,8 +49,8 @@ output {
data_stream => true
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
ssl_enabled => true
ssl_verification_mode=> "none"
ssl => true
ssl_certificate_verification => false
}
}
}

View File

@@ -13,8 +13,8 @@ output {
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
index => "endgame-%{+YYYY.MM.dd}"
ssl_enabled => true
ssl_verification_mode => "none"
ssl => true
ssl_certificate_verification => false
}
}
}

View File

@@ -56,7 +56,7 @@ logstash:
helpLink: logstash.html
global: False
config:
api_x_http_x_host:
http_x_host:
description: Host interface to listen to connections.
helpLink: logstash.html
readonly: True

View File

@@ -133,7 +133,7 @@ function getinstallinfo() {
return 1
fi
source <(echo $INSTALLVARS)
export $(echo "$INSTALLVARS" | xargs)
if [ $? -ne 0 ]; then
log "ERROR" "Failed to source install variables"
return 1

View File

@@ -87,9 +87,6 @@ check_err() {
113)
echo 'No route to host'
;;
160)
echo 'Incompatiable Elasticsearch upgrade'
;;
*)
echo 'Unhandled error'
echo "$err_msg"
@@ -430,7 +427,6 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.4.170 ]] && up_to_2.4.180
[[ "$INSTALLEDVERSION" == 2.4.180 ]] && up_to_2.4.190
[[ "$INSTALLEDVERSION" == 2.4.190 ]] && up_to_2.4.200
[[ "$INSTALLEDVERSION" == 2.4.200 ]] && up_to_2.4.210
true
}
@@ -463,7 +459,6 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.4.170 ]] && post_to_2.4.180
[[ "$POSTVERSION" == 2.4.180 ]] && post_to_2.4.190
[[ "$POSTVERSION" == 2.4.190 ]] && post_to_2.4.200
[[ "$POSTVERSION" == 2.4.200 ]] && post_to_2.4.210
true
}
@@ -620,6 +615,9 @@ post_to_2.4.180() {
}
post_to_2.4.190() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
# Only need to update import / eval nodes
if [[ "$MINION_ROLE" == "import" ]] || [[ "$MINION_ROLE" == "eval" ]]; then
update_import_fleet_output
@@ -647,13 +645,6 @@ post_to_2.4.200() {
POSTVERSION=2.4.200
}
post_to_2.4.210() {
echo "Regenerating Elastic Agent Installers"
/sbin/so-elastic-agent-gen-installers
POSTVERSION=2.4.210
}
repo_sync() {
echo "Sync the local repo."
su socore -c '/usr/sbin/so-repo-sync' || fail "Unable to complete so-repo-sync."
@@ -915,7 +906,9 @@ up_to_2.4.180() {
}
up_to_2.4.190() {
echo "Nothing to do for 2.4.190"
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.190
}
@@ -928,13 +921,6 @@ up_to_2.4.200() {
INSTALLEDVERSION=2.4.200
}
up_to_2.4.210() {
# Elastic Update for this release, so download Elastic Agent files
determine_elastic_agent_upgrade
INSTALLEDVERSION=2.4.210
}
add_hydra_pillars() {
mkdir -p /opt/so/saltstack/local/pillar/hydra
touch /opt/so/saltstack/local/pillar/hydra/soc_hydra.sls
@@ -1139,40 +1125,35 @@ mkdir -p /nsm/backup/detections-migration/2-4-200
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
cp /opt/so/conf/idstools/etc/rulecat.conf /nsm/backup/detections-migration/2-4-200
if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
python3 /opt/so/conf/soc/so-detections-backup.py
# Backup so-detection index via reindex
echo "Creating sos-backup index template..."
template_result=$(/sbin/so-elasticsearch-query '_index_template/sos-backup' -X PUT \
--retry 5 --retry-delay 15 --retry-all-errors \
-d '{"index_patterns":["sos-backup-*"],"priority":501,"template":{"settings":{"index":{"number_of_replicas":0,"number_of_shards":1}}}}')
# Verify backup by comparing counts
echo "Verifying detection overrides backup..."
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
--retry 5 --retry-delay 10 --retry-all-errors \
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
echo " Error: Failed to query Elasticsearch for override count"
exit 1
}
if [[ -z "$template_result" ]] || ! echo "$template_result" | jq -e '.acknowledged == true' > /dev/null 2>&1; then
echo "Error: Failed to create sos-backup index template"
echo "$template_result"
exit 1
fi
if [[ ! "$es_override_count" =~ ^[0-9]+$ ]]; then
echo " Error: Invalid override count from Elasticsearch: '$es_override_count'"
exit 1
fi
BACKUP_INDEX="sos-backup-detection-$(date +%Y%m%d-%H%M%S)"
echo "Backing up so-detection index to $BACKUP_INDEX..."
reindex_result=$(/sbin/so-elasticsearch-query '_reindex?wait_for_completion=true' \
--retry 5 --retry-delay 15 --retry-all-errors \
-X POST -d "{\"source\": {\"index\": \"so-detection\"}, \"dest\": {\"index\": \"$BACKUP_INDEX\"}}")
backup_override_count=$(find /nsm/backup/detections/repo/*/overrides -type f 2>/dev/null | wc -l)
echo " Elasticsearch overrides: $es_override_count"
echo " Backed up overrides: $backup_override_count"
if [[ "$es_override_count" -gt 0 ]]; then
if [[ "$backup_override_count" -gt 0 ]]; then
echo " Override backup verified successfully"
else
echo " Error: Elasticsearch has $es_override_count overrides but backup has 0 files"
exit 1
fi
else
echo " No overrides to backup"
fi
if [[ -z "$reindex_result" ]]; then
echo "Error: Backup of detections failed - no response from Elasticsearch"
exit 1
elif echo "$reindex_result" | jq -e '.created >= 0' > /dev/null 2>&1; then
echo "Backup complete: $(echo "$reindex_result" | jq -r '.created') documents copied"
elif echo "$reindex_result" | grep -q "index_not_found_exception"; then
echo "so-detection index does not exist, skipping backup"
else
echo "SOC Detections backup script not found, skipping detection backup"
echo "Error: Backup of detections failed"
echo "$reindex_result"
exit 1
fi
}
@@ -1193,11 +1174,12 @@ hash_normalized_file() {
return 1
fi
sed -E \
# Ensure trailing newline for consistent hashing regardless of source file
{ sed -E \
-e 's/^[[:space:]]+//; s/[[:space:]]+$//' \
-e '/^$/d' \
-e 's|--url=http://[^:]+:7788|--url=http://MANAGER:7788|' \
"$file" | sha256sum | awk '{print $1}'
"$file"; echo; } | sed '/^$/d' | sha256sum | awk '{print $1}'
}
# Known-default hashes for so-rule-update (ETOPEN ruleset)
@@ -1293,6 +1275,13 @@ custom_found=0
check_config_file "$SO_RULE_UPDATE" "KNOWN_SO_RULE_UPDATE_HASHES" || custom_found=1
check_config_file "$RULECAT_CONF" "KNOWN_RULECAT_CONF_HASHES" || custom_found=1
# Check for ETPRO rules on airgap systems
if [[ $is_airgap -eq 0 ]] && grep -q 'ETPRO ' /nsm/rules/suricata/emerging-all.rules 2>/dev/null; then
echo "ETPRO rules detected on airgap system - custom configuration"
echo "ETPRO rules detected on Airgap in /nsm/rules/suricata/emerging-all.rules" >> /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
custom_found=1
fi
# If no custom configs found, remove syncBlock
if [[ $custom_found -eq 0 ]]; then
echo "idstools migration completed successfully - removing Suricata engine syncBlock"
@@ -1318,6 +1307,7 @@ fi
echo "Removing idstools symlink and scripts..."
rm -rf /usr/sbin/so-idstools*
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
crontab -l | grep -v 'so-rule-update' | crontab -
# Backup the salt master config & manager pillar before editing it
cp /opt/so/saltstack/local/pillar/minions/$MINIONID.sls /nsm/backup/detections-migration/2-4-200/
@@ -1622,69 +1612,6 @@ verify_latest_update_script() {
fi
}
verify_es_version_compatibility() {
# supported upgrade paths for SO-ES versions
declare -A es_upgrade_map=(
["8.14.3"]="8.17.3 8.18.4 8.18.6 8.18.8"
["8.17.3"]="8.18.4 8.18.6 8.18.8"
["8.18.4"]="8.18.6 8.18.8 9.0.8"
["8.18.6"]="8.18.8 9.0.8"
["8.18.8"]="9.0.8"
)
# Elasticsearch MUST upgrade through these versions
declare -A es_to_so_version=(
["8.18.8"]="2.4.190-20251024"
)
# Get current Elasticsearch version
if es_version_raw=$(so-elasticsearch-query / --fail --retry 5 --retry-delay 10); then
es_version=$(echo "$es_version_raw" | jq -r '.version.number' )
else
echo "Could not determine current Elasticsearch version to validate compatibility with post soup Elasticsearch version."
exit 160
fi
if ! target_es_version=$(so-yaml.py get $UPDATE_DIR/salt/elasticsearch/defaults.yaml elasticsearch.version | sed -n '1p'); then
# so-yaml.py failed to get the ES version from upgrade versions elasticsearch/defaults.yaml file. Likely they are upgrading to an SO version older than 2.4.110 prior to the ES version pinning and should be OKAY to continue with the upgrade.
# if so-yaml.py failed to get the ES version AND the version we are upgrading to is newer than 2.4.110 then we should bail
if [[ $(cat $UPDATE_DIR/VERSION | cut -d'.' -f3) > 110 ]]; then
echo "Couldn't determine the target Elasticsearch version (post soup version) to ensure compatibility with current Elasticsearch version. Exiting"
exit 160
fi
# allow upgrade to version < 2.4.110 without checking ES version compatibility
return 0
fi
if [[ " ${es_upgrade_map[$es_version]} " =~ " $target_es_version " || "$es_version" == "$target_es_version" ]]; then
# supported upgrade
return 0
else
compatible_versions=${es_upgrade_map[$es_version]}
next_step_so_version=${es_to_so_version[${compatible_versions##* }]}
echo -e "\n##############################################################################################################################\n"
echo -e "You are currently running Security Onion $INSTALLEDVERSION. You will need to update to version $next_step_so_version before updating to $(cat $UPDATE_DIR/VERSION).\n"
if [[ $is_airgap -eq 0 ]]; then
echo "You can download the $next_step_so_version ISO image from https://download.securityonion.net/file/securityonion/securityonion-$next_step_so_version.iso"
else
echo "You can use the following soup command to upgrade to $next_step_so_version;"
echo -e " sudo BRANCH=$next_step_so_version soup\n"
fi
echo "*** Once you have updated to $next_step_so_version, you can then run soup again to update to $(cat $UPDATE_DIR/VERSION). ***"
echo -e "\n###############################################################################################################################\n"
exit 160
fi
}
# Keeping this block in case we need to do a hotfix that requires salt update
apply_hotfix() {
if [[ "$INSTALLEDVERSION" == "2.4.20" ]] ; then
@@ -1781,8 +1708,6 @@ main() {
echo "Verifying we have the latest soup script."
verify_latest_update_script
verify_es_version_compatibility
echo "Let's see if we need to update Security Onion."
upgrade_check
upgrade_space
@@ -1948,7 +1873,7 @@ main() {
if [[ $is_airgap -eq 0 ]]; then
echo ""
echo "Cleaning repos on remote Security Onion nodes."
salt -C 'not *_eval and not *_manager and not *_managersearch and not *_standalone and G@os:CentOS' cmd.run "yum clean all"
salt -C 'not *_eval and not *_manager* and not *_standalone and G@os:OEL' cmd.run "dnf clean all"
echo ""
fi
fi

View File

@@ -2653,19 +2653,15 @@ soc:
thresholdColorRatioMax: 1
availableModels:
- id: sonnet-4.5
displayName: Claude Sonnet 4.5
displayName: Claude Sonnet 4.5 ($$$)
origin: USA
contextLimitSmall: 200000
contextLimitLarge: 1000000
lowBalanceColorAlert: 500000
enabled: true
- id: gptoss-120b
displayName: GPT-OSS 120B
contextLimitSmall: 128000
contextLimitLarge: 128000
lowBalanceColorAlert: 500000
enabled: true
- id: qwen-235b
displayName: QWEN 235B
displayName: QWEN 235B ($)
origin: China
contextLimitSmall: 256000
contextLimitLarge: 256000
lowBalanceColorAlert: 500000

View File

@@ -6,6 +6,7 @@
# This script queries Elasticsearch for Custom Detections and all Overrides,
# and git commits them to disk at $OUTPUT_DIR
import argparse
import os
import subprocess
import json
@@ -18,10 +19,10 @@ from datetime import datetime
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Constants
ES_URL = "https://localhost:9200/so-detection/_search"
DEFAULT_INDEX = "so-detection"
DEFAULT_OUTPUT_DIR = "/nsm/backup/detections/repo"
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
OUTPUT_DIR = "/nsm/backup/detections/repo"
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
def get_auth_credentials(auth_file):
@@ -30,9 +31,10 @@ def get_auth_credentials(auth_file):
if line.startswith('user ='):
return line.split('=', 1)[1].strip().replace('"', '')
def query_elasticsearch(query, auth):
def query_elasticsearch(query, auth, index):
url = f"https://localhost:9200/{index}/_search"
headers = {"Content-Type": "application/json"}
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
response = requests.get(url, headers=headers, data=query, auth=auth, verify=False)
response.raise_for_status()
return response.json()
@@ -47,12 +49,12 @@ def save_content(hit, base_folder, subfolder="", extension="txt"):
f.write(content)
return file_path
def save_overrides(hit):
def save_overrides(hit, output_dir):
so_detection = hit["_source"]["so_detection"]
public_id = so_detection["publicId"]
overrides = so_detection["overrides"]
language = so_detection["language"]
folder = os.path.join(OUTPUT_DIR, language, "overrides")
folder = os.path.join(output_dir, language, "overrides")
os.makedirs(folder, exist_ok=True)
extension = "yaml" if language == "sigma" else "txt"
file_path = os.path.join(folder, f"{public_id}.{extension}")
@@ -60,20 +62,20 @@ def save_overrides(hit):
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
return file_path
def ensure_git_repo():
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
def ensure_git_repo(output_dir):
if not os.path.isdir(os.path.join(output_dir, '.git')):
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
subprocess.run(["git", "-C", output_dir, "init"], check=True)
subprocess.run(["git", "-C", output_dir, "remote", "add", "origin", "default"], check=True)
def commit_changes():
ensure_git_repo()
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
def commit_changes(output_dir):
ensure_git_repo(output_dir)
subprocess.run(["git", "-C", output_dir, "config", "user.email", "securityonion@local.invalid"], check=True)
subprocess.run(["git", "-C", output_dir, "config", "user.name", "securityonion"], check=True)
subprocess.run(["git", "-C", output_dir, "add", "."], check=True)
status_result = subprocess.run(["git", "-C", output_dir, "status"], capture_output=True, text=True)
print(status_result.stdout)
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
commit_result = subprocess.run(["git", "-C", output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
if commit_result.returncode == 1:
print("No changes to commit.")
elif commit_result.returncode == 0:
@@ -81,29 +83,41 @@ def commit_changes():
else:
commit_result.check_returncode()
def parse_args():
parser = argparse.ArgumentParser(description="Backup custom detections and overrides from Elasticsearch")
parser.add_argument("--output", "-o", default=DEFAULT_OUTPUT_DIR,
help=f"Output directory for backups (default: {DEFAULT_OUTPUT_DIR})")
parser.add_argument("--index", "-i", default=DEFAULT_INDEX,
help=f"Elasticsearch index to query (default: {DEFAULT_INDEX})")
return parser.parse_args()
def main():
args = parse_args()
output_dir = args.output
index = args.index
try:
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
os.makedirs(OUTPUT_DIR, exist_ok=True)
print(f"Backing up Custom Detections and all Overrides to {output_dir} - {timestamp}\n")
os.makedirs(output_dir, exist_ok=True)
auth_credentials = get_auth_credentials(AUTH_FILE)
username, password = auth_credentials.split(':', 1)
auth = HTTPBasicAuth(username, password)
# Query and save custom detections
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
detections = query_elasticsearch(QUERY_DETECTIONS, auth, index)["hits"]["hits"]
for hit in detections:
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
save_content(hit, output_dir, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
# Query and save overrides
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
overrides = query_elasticsearch(QUERY_OVERRIDES, auth, index)["hits"]["hits"]
for hit in overrides:
save_overrides(hit)
commit_changes()
save_overrides(hit, output_dir)
commit_changes(output_dir)
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
print(f"Backup Completed - {timestamp}")
except Exception as e:

View File

@@ -57,12 +57,12 @@ class TestBackupScript(unittest.TestCase):
mock_response.json.return_value = {'hits': {'hits': []}}
mock_response.raise_for_status = MagicMock()
mock_get.return_value = mock_response
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX)
self.assertEqual(response, {'hits': {'hits': []}})
mock_get.assert_called_once_with(
ds.ES_URL,
f"https://localhost:9200/{ds.DEFAULT_INDEX}/_search",
headers={"Content-Type": "application/json"},
data=ds.QUERY_DETECTIONS,
auth=self.auth,
@@ -81,7 +81,7 @@ class TestBackupScript(unittest.TestCase):
@patch('os.makedirs')
@patch('builtins.open', new_callable=mock_open)
def test_save_overrides(self, mock_file, mock_makedirs):
file_path = ds.save_overrides(self.mock_override_hit)
file_path = ds.save_overrides(self.mock_override_hit, self.output_dir)
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
self.assertEqual(file_path, expected_path)
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
@@ -90,9 +90,9 @@ class TestBackupScript(unittest.TestCase):
@patch('subprocess.run')
def test_ensure_git_repo(self, mock_run):
mock_run.return_value = MagicMock(returncode=0)
ds.ensure_git_repo()
ds.ensure_git_repo(self.output_dir)
mock_run.assert_has_calls([
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
call(["git", "-C", self.output_dir, "init"], check=True),
@@ -106,9 +106,9 @@ class TestBackupScript(unittest.TestCase):
mock_commit_result = MagicMock(returncode=1)
# Ensure sufficient number of MagicMock instances for each subprocess.run call
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
print("Running test_commit_changes...")
ds.commit_changes()
ds.commit_changes(self.output_dir)
print("Finished test_commit_changes.")
mock_run.assert_has_calls([
@@ -120,39 +120,45 @@ class TestBackupScript(unittest.TestCase):
])
@patch('builtins.print')
@patch('so-detections-backup.commit_changes')
@patch('so-detections-backup.save_overrides')
@patch('so-detections-backup.save_content')
@patch('so-detections-backup.query_elasticsearch')
@patch('so-detections-backup.get_auth_credentials')
@patch.object(ds, 'commit_changes')
@patch.object(ds, 'save_overrides')
@patch.object(ds, 'save_content')
@patch.object(ds, 'query_elasticsearch')
@patch.object(ds, 'get_auth_credentials')
@patch('os.makedirs')
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
@patch.object(ds, 'parse_args')
def test_main(self, mock_parse_args, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
mock_args = MagicMock()
mock_args.output = self.output_dir
mock_args.index = ds.DEFAULT_INDEX
mock_parse_args.return_value = mock_args
mock_get_auth.return_value = self.auth_credentials
mock_query.side_effect = [
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
]
with patch('datetime.datetime') as mock_datetime:
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
ds.main()
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
mock_query.assert_has_calls([
call(ds.QUERY_DETECTIONS, self.auth),
call(ds.QUERY_OVERRIDES, self.auth)
call(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX),
call(ds.QUERY_OVERRIDES, self.auth, ds.DEFAULT_INDEX)
])
mock_save_content.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
self.output_dir,
"sigma",
"yaml"
)
mock_save_overrides.assert_called_once_with(
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}},
self.output_dir
)
mock_commit.assert_called_once()
mock_commit.assert_called_once_with(self.output_dir)
mock_print.assert_called()
if __name__ == '__main__':

View File

@@ -652,7 +652,6 @@ soc:
assistant:
apiUrl:
description: The URL of the AI gateway.
advanced: True
global: True
healthTimeoutSeconds:
description: Timeout in seconds for the Onion AI health check.
@@ -708,6 +707,9 @@ soc:
- field: displayName
label: Display Name
required: True
- field: origin
label: Country of Origin for the Model Training
required: false
- field: contextLimitSmall
label: Context Limit (Small)
forcedType: int

Binary file not shown.