Compare commits

..

377 Commits

Author SHA1 Message Date
Mike Reeves
65165e52f4 Merge pull request #8086 from Security-Onion-Solutions/dev
2.3.130
2022-06-07 15:51:12 -04:00
Mike Reeves
2cceae54df Merge pull request #8087 from Security-Onion-Solutions/2.3.130
2.3.130
2022-06-07 13:44:38 -04:00
Mike Reeves
8912e241aa 2.3.130 2022-06-07 13:41:51 -04:00
Mike Reeves
7357f157ec Merge pull request #8085 from Security-Onion-Solutions/2.3.130
2.3.130
2022-06-07 12:04:47 -04:00
Mike Reeves
37881bd4b6 2.3.130 2022-06-07 11:34:10 -04:00
Josh Brower
2574f0e23d Merge pull request #8081 from Security-Onion-Solutions/fix/fleetdm-websockets
Allow websockets for fleetdm
2022-06-06 19:15:02 -04:00
Josh Brower
c9d9804c3a Allow websockets for fleetdm 2022-06-06 17:26:24 -04:00
Doug Burks
73baa1d2f0 Merge pull request #8073 from Security-Onion-Solutions/dougburks-patch-1
Update motd.md to include links to Dashboards and Cases
2022-06-04 08:53:54 -04:00
Doug Burks
dce415297c improve readability in motd.md 2022-06-04 06:59:09 -04:00
Doug Burks
de126647f8 Update motd.md to include links to Dashboards and Cases 2022-06-04 06:55:08 -04:00
Doug Burks
c34f456151 Merge pull request #8069 from Security-Onion-Solutions/dougburks-patch-1
add bar and pie examples to overview dashboard in dashboards.queries.…
2022-06-03 15:04:16 -04:00
Doug Burks
83bff5ee87 add bar and pie examples to overview dashboard in dashboards.queries.json 2022-06-03 15:02:40 -04:00
Doug Burks
918f431728 Merge pull request #8065 from Security-Onion-Solutions/dougburks-patch-1
Add sankey diagram to default dashboard in dashboards.queries.json
2022-06-03 11:13:39 -04:00
Doug Burks
4a886338c8 fix description field for default dashboard in dashboards.queries.json 2022-06-03 11:10:01 -04:00
Doug Burks
7da1802eae Add sankey diagram to default dashboard in dashboards.queries.json 2022-06-03 11:03:48 -04:00
Mike Reeves
ff92b524c2 Merge pull request #8062 from Security-Onion-Solutions/TOoSmOotH-patch-1
Update soup
2022-06-02 11:51:42 -04:00
Mike Reeves
395eaa39b4 Update soup 2022-06-02 11:45:37 -04:00
Mike Reeves
2867a32931 Merge pull request #8061 from Security-Onion-Solutions/soup130
soup for 130
2022-06-02 10:42:17 -04:00
Mike Reeves
fce43cf390 soup for 130 2022-06-02 10:33:18 -04:00
Josh Patterson
e5c9b91529 Merge pull request #8054 from Security-Onion-Solutions/dmz_receiver
Dmz receiver
2022-06-01 15:31:42 -04:00
m0duspwnens
e5b74bcb78 remove podman state 2022-06-01 15:26:25 -04:00
Doug Burks
91f8d3e5e9 Merge pull request #8050 from Security-Onion-Solutions/fix/elastalert-query
FIX: Elastalert query in Hunt #8049
2022-05-31 16:54:34 -04:00
Doug Burks
269b16bbfd https://github.com/Security-Onion-Solutions/securityonion/issues/8049 2022-05-31 16:51:05 -04:00
Doug Burks
cd382a1b25 FIX: Elastalert query in Hunt #8049 2022-05-31 16:50:32 -04:00
Doug Burks
e1c9b0d108 FIX: Elastalert query in Hunt #8049 2022-05-31 16:47:52 -04:00
Doug Burks
9a98667e85 FIX: Elastalert query in Hunt #8049 2022-05-31 16:47:11 -04:00
weslambert
494ce0756d Merge pull request #8045 from Security-Onion-Solutions/fix/mhr_naming
Fix naming for Malware Hash Registry analyzer
2022-05-31 07:52:48 -04:00
Wes Lambert
7f30a364ee Make sure everything is added back after renaming mhr to malwarehashregistry 2022-05-31 11:44:35 +00:00
Wes Lambert
c82aa89497 Fix Malware Hash Registry naming so it's more descriptive in SOC 2022-05-31 11:41:48 +00:00
Josh Brower
025677a1e6 Merge pull request #8034 from Security-Onion-Solutions/feature/sigmafp
Feature/SigmaCustomFilters
2022-05-31 07:25:44 -04:00
Josh Brower
a5361fb745 Change Target_log name 2022-05-28 18:07:05 -04:00
Mike Reeves
30d7801ae1 Merge pull request #8033 from Security-Onion-Solutions/kilo 2022-05-28 11:38:35 -04:00
Jason Ertel
210bc556db Add logscan and suricata variants for cloud tests to move from PM into the cloud and help alleviate disk contention 2022-05-28 10:29:04 -04:00
Jason Ertel
e87e672b9e Add logscan and suricata variants for cloud tests to move from PM into the cloud and help alleviate disk contention 2022-05-28 10:28:20 -04:00
Jason Ertel
a70da41f20 Merge pull request #8032 from Security-Onion-Solutions/kilo
Exclude pkg upgrade retry error logs from failing setup
2022-05-28 08:34:40 -04:00
Jason Ertel
8bb02763dc Exclude pkg upgrade retry error logs from failing setup 2022-05-28 08:28:10 -04:00
weslambert
a59ada695b Merge pull request #8031 from Security-Onion-Solutions/fix/screenshots
Fix/screenshots
2022-05-27 17:05:51 -04:00
doug
b93a108386 update Cases screenshot in README 2022-05-27 16:33:08 -04:00
doug
6089f3906d update screenshots and README 2022-05-27 16:32:00 -04:00
Josh Brower
94ee45ac63 Merge pull request #8029 from Security-Onion-Solutions/upgrade/navigator
Upgrade Navigator to 4.6.4
2022-05-27 14:46:59 -04:00
Josh Brower
43cb78a6a8 Upgrade Navigator 2022-05-27 14:21:11 -04:00
Josh Patterson
76bb1fbbcc Merge pull request #8014 from Security-Onion-Solutions/issue/7918
manage suricata classifications.config
2022-05-26 13:13:03 -04:00
m0duspwnens
53d6e1d30d simplfy 2022-05-26 11:51:17 -04:00
m0duspwnens
1bfde852f5 manage suricata classifications.config https://github.com/Security-Onion-Solutions/securityonion/issues/7918 2022-05-26 11:43:31 -04:00
m0duspwnens
53883e4ade manage suricata classifications.config https://github.com/Security-Onion-Solutions/securityonion/issues/7918 2022-05-26 11:40:33 -04:00
weslambert
1a0ac4d253 Merge pull request #8007 from Security-Onion-Solutions/fix/filestream-id
Add filestream input ID for RITA logs
2022-05-25 10:11:36 -04:00
weslambert
44622350ea Add ID for RITA filestream inputs 2022-05-25 10:09:01 -04:00
weslambert
99864f4787 Merge pull request #8001 from Security-Onion-Solutions/feature/analyzer_readme
Add configuration requirements for various analyzers
2022-05-25 09:33:07 -04:00
Doug Burks
6bd02c0b99 Merge pull request #8003 from Security-Onion-Solutions/feature/elastic-7.17.4
UPGRADE: Elastic 7.17.4 #8002
2022-05-24 13:24:13 -04:00
Doug Burks
1d0bb21908 UPGRADE: Elastic 7.17.4 #8002 2022-05-24 13:19:30 -04:00
Doug Burks
bde06e7ec5 UPGRADE: Elastic 7.17.4 #8002 2022-05-24 13:19:01 -04:00
Wes Lambert
b93512eb01 Adjust verbiage around pillar configuration 2022-05-24 12:36:32 +00:00
Wes Lambert
92dee14ee8 Add configuration requirements for various analyzers 2022-05-24 12:29:14 +00:00
weslambert
3e6dfcfaca Merge pull request #7996 from Security-Onion-Solutions/weslambert-patch-2
Create Virustotal README
2022-05-23 11:43:43 -04:00
weslambert
a6f1bf3aef Create Virustotal README 2022-05-23 11:39:44 -04:00
Jason Ertel
88f17f037e Merge pull request #7982 from Security-Onion-Solutions/kilo
Upgrade to Kratos 0.9.0-alpha.3
2022-05-19 13:28:58 -04:00
Jason Ertel
c20859f8c3 Upgrade to Kratos 0.9.0-alpha.3 2022-05-18 17:05:21 -04:00
Jason Ertel
c95bafd521 Merge pull request #7969 from Security-Onion-Solutions/fix/helpers-analyzers
Only import yaml module when config is loaded
2022-05-18 07:15:32 -04:00
Wes Lambert
429ccb2dcc Only import yaml module when config is loaded 2022-05-18 02:07:39 +00:00
weslambert
94ca3ddbda Merge pull request #7961 from Security-Onion-Solutions/weslambert-patch-1
Add information for MHR and WhoisLookup, and other minor updates
2022-05-17 13:33:24 -04:00
weslambert
d3206a048f Add information for MHR and WhoisLookup, and other minor updates 2022-05-17 12:49:16 -04:00
weslambert
ff855eb8f7 Merge pull request #7958 from Security-Onion-Solutions/feature/mhr_analyzer
Add Team Cymru Malware Hash Registry Analyzer
2022-05-17 12:42:01 -04:00
Wes Lambert
8af1f19ac3 Another no_results change 2022-05-17 16:12:43 +00:00
Wes Lambert
e4a7e3cba6 Change 'No results found.' to 'no_results' 2022-05-17 16:11:58 +00:00
weslambert
2688083ff1 Merge pull request #7959 from Security-Onion-Solutions/feature/whoislookup-analyzer
Add Whoislookup RDAP-based analyzer
2022-05-17 12:09:06 -04:00
Wes Lambert
766e9748c5 Add Whoislookup RDAP-based analyzer 2022-05-17 15:52:12 +00:00
weslambert
3761b491c0 Remove whitespace 2022-05-17 10:50:33 -04:00
Wes Lambert
e8fc3ccdf4 Add Team Cymru Malware Hash Registry Analyzer 2022-05-17 14:44:53 +00:00
Doug Burks
eb9597217c Merge pull request #7949 from Security-Onion-Solutions/fix/dashboards-hunt-queries
update dashboards.queries.json and hunt.queries.json
2022-05-16 08:47:06 -04:00
doug
5cbb50a781 update dashboards.queries.json and hunt.queries.json 2022-05-16 08:33:48 -04:00
Jason Ertel
685789de33 Merge pull request #7936 from Security-Onion-Solutions/kilo
Improved unit test coverage of new analyzers; Utilize localized summa…
2022-05-12 16:47:18 -04:00
Jason Ertel
b45b6b198b Improved unit test coverage of new analyzers; Utilize localized summaries; Require 100% code coverage on analyzers 2022-05-12 16:32:47 -04:00
weslambert
6c506bbab0 Merge pull request #7935 from Security-Onion-Solutions/fix/pulsedive
Fix Pulsedive analyzer logic
2022-05-12 15:20:15 -04:00
Wes Lambert
3dc266cfa9 Add test for when indicator is not found 2022-05-12 19:02:41 +00:00
Wes Lambert
a233c08830 Update logic to handle indicators that are not present in database. 2022-05-12 19:02:02 +00:00
Doug Burks
58b049257d Merge pull request #7932 from Security-Onion-Solutions/dougburks-patch-1
remove duplicate showSubtitle from hunt.queries.json
2022-05-12 09:24:18 -04:00
Doug Burks
6ed3f42449 remove duplicate showSubtitle from hunt.queries.json 2022-05-12 09:23:00 -04:00
m0duspwnens
d8abc0a195 if in dmz_nodes dont add to filebeta 2022-05-11 11:51:18 -04:00
m0duspwnens
a641346c02 prevent nodes with logstash:dmz:true from being added to logstash:nodes pillar 2022-05-10 17:28:19 -04:00
Jason Ertel
60b55acd6f Merge pull request #7926 from Security-Onion-Solutions/kilo
Add support for analyzers in airgapped environments
2022-05-10 17:12:18 -04:00
Jason Ertel
35e47c8c3e Add support for analyzers in airgapped environments 2022-05-10 16:51:00 -04:00
weslambert
7f797a11f8 Merge pull request #7924 from Security-Onion-Solutions/analyzer-docs
Update analyzer docs with information about analyzers that require au…
2022-05-10 09:40:50 -04:00
Jason Ertel
91a7f25d3a Corrected brand name capitalization 2022-05-10 09:39:19 -04:00
weslambert
34d57c386b Update analyzer docs with information about analyzers that require authentication 2022-05-10 09:32:18 -04:00
weslambert
000e813fbb Merge pull request #7921 from Security-Onion-Solutions/fix/analyzer-packages
Update analyzer packages to those downloaded by Alpine and add additional build script option
2022-05-09 16:43:31 -04:00
Wes Lambert
555ca2e277 Update analyzer build/testing script to download necessary Python packages 2022-05-09 20:06:39 +00:00
Wes Lambert
32adba6141 Update analyzer packages with those built from native (Alpine) Docker image 2022-05-09 20:04:41 +00:00
Jason Ertel
e19635e44a Merge pull request #7920 from Security-Onion-Solutions/kilo
Disable MRU queries on dashboards
2022-05-09 15:08:55 -04:00
Jason Ertel
31c04aabdd Disable MRU queries on dashboards 2022-05-09 15:06:43 -04:00
Jason Ertel
dc209a37cd Merge pull request #7916 from Security-Onion-Solutions/kilo
Disable actions on dashboards group-by tables
2022-05-09 11:52:22 -04:00
Jason Ertel
3f35dc54d2 Disable actions on dashboards group-by tables 2022-05-09 11:44:39 -04:00
Jason Ertel
0e64a9e5c3 Merge pull request #7912 from Security-Onion-Solutions/kilo
Add dashboard ref to soc.json
2022-05-06 15:18:05 -04:00
Jason Ertel
0786191fc9 Add dashboard ref to soc.json 2022-05-06 15:16:27 -04:00
Jason Ertel
60763c38db Merge pull request #7911 from Security-Onion-Solutions/kilo
Analyzers + Dashboards
2022-05-06 13:50:54 -04:00
weslambert
9800f59ed7 Add Urlscan to observable support matrix 2022-05-06 13:11:43 -04:00
Wes Lambert
ccac71f649 Fix formatting/whitespace 2022-05-06 17:08:40 +00:00
Wes Lambert
1990ba0cf0 Fix formatting/whitespace 2022-05-06 17:08:33 +00:00
Wes Lambert
8ff5778569 Add Urlscan analyzer and tests 2022-05-06 17:01:06 +00:00
Jason Ertel
bee4cf4c52 Fix typo in analyzer desc 2022-05-06 09:20:03 -04:00
Jason Ertel
105c95909c Dashboard queries 2022-05-04 19:32:06 -04:00
Jason Ertel
890bcd58f9 Merge branch 'dev' into kilo 2022-05-04 19:25:08 -04:00
weslambert
a96c665d04 Change test name for EmailRep 2022-05-03 14:13:25 -04:00
weslambert
f3a91d9fcd Add EmailRep analyzer to observable support matrix 2022-05-03 10:10:57 -04:00
Wes Lambert
5a9acb3857 Add EmailRep analyzer and tests 2022-05-03 14:06:32 +00:00
Wes Lambert
8b5666b238 Ensure API key is used 2022-05-03 12:48:06 +00:00
weslambert
efb229cfcb Update to match configuration in analyzer dir 2022-05-02 16:35:21 -04:00
weslambert
2fcb2b081d Update allowed complexity to 12 2022-05-02 16:14:43 -04:00
weslambert
25f17a5efd Update allowed complexity to 11 2022-04-29 09:42:57 -04:00
weslambert
66b4fe9f58 Add additional information around URI and User Agent 2022-04-28 17:14:36 -04:00
Wes Lambert
c001708707 Add Pulsedive analyzer and tests 2022-04-28 20:56:03 +00:00
weslambert
4edd729596 Add initial supported observable matrix/table 2022-04-27 08:58:34 -04:00
Wes Lambert
76f183b112 Add Greynoise analyzer and tests 2022-04-26 17:25:35 +00:00
Wes Lambert
bd63753d80 Update analyzer name/description 2022-04-25 19:27:10 +00:00
Wes Lambert
15fcaa7030 Add localfile analyzer and tests 2022-04-25 19:23:35 +00:00
Jason Ertel
71a86b0a3c Merge pull request #7856 from Security-Onion-Solutions/bumpver
Bump version
2022-04-25 13:01:19 -04:00
Jason Ertel
e2145720bd Bump version 2022-04-25 12:10:29 -04:00
Mike Reeves
b4aa59c619 Merge pull request #7853 from Security-Onion-Solutions/dev
2.3.120
2022-04-25 11:33:05 -04:00
Mike Reeves
6975153cf4 Merge pull request #7852 from Security-Onion-Solutions/2.3.120
2.3.120
2022-04-25 08:59:52 -04:00
Mike Reeves
0935f51667 2.3.120 2022-04-25 08:57:35 -04:00
Mike Reeves
f92d65737b 2.3.120 2022-04-25 08:53:04 -04:00
Josh Patterson
8f5967911b Merge pull request #7847 from Security-Onion-Solutions/m0duspwnens-patch-1
add eval
2022-04-22 16:06:01 -04:00
Josh Patterson
80eb31368a add eval 2022-04-22 16:04:29 -04:00
Jason Ertel
d8fdf2b701 Merge branch 'dev' into kilo 2022-04-22 15:11:24 -04:00
Jason Ertel
459d388614 Only override nameservers if the first nameserver given is non empty 2022-04-22 15:08:56 -04:00
Wes Lambert
fbf6e64e67 Add initial OTX analyzer and tests 2022-04-22 17:13:40 +00:00
weslambert
677db7c563 Merge pull request #7841 from Security-Onion-Solutions/weslambert-patch-2
Update shard count for Zeek in setup
2022-04-21 17:27:57 -04:00
weslambert
1bb216954c Merge pull request #7840 from Security-Onion-Solutions/weslambert-patch-1
Update shards for Zeek
2022-04-21 17:26:57 -04:00
weslambert
c81988ab00 Update shard count for Zeek in setup 2022-04-21 17:26:30 -04:00
weslambert
542db5b7f5 Update defaults.yaml 2022-04-21 17:24:24 -04:00
Wes Lambert
b2db32a2c7 Add function/test for non-existent VT api_key 2022-04-21 17:33:24 +00:00
Wes Lambert
9287d6adf7 Reduce size of test output for test 2022-04-21 16:56:22 +00:00
Wes Lambert
c8e189f35a Add source-packages for JA3er 2022-04-21 16:46:45 +00:00
Wes Lambert
5afcc8de4f Add JA3er analyzer and associated test 2022-04-21 16:42:46 +00:00
weslambert
d7eed52fae Change -f to -r 2022-04-21 09:46:44 -04:00
Doug Burks
2910b56ea1 Merge pull request #7835 from Security-Onion-Solutions/elastic-7.17.3
UPGRADE: Elastic 7.17.3 #7807
2022-04-21 09:02:51 -04:00
Doug Burks
e608285341 UPGRADE: Elastic 7.17.3 #7807 2022-04-21 08:57:08 -04:00
Doug Burks
04856540dc UPGRADE: Elastic 7.17.3 #7807 2022-04-21 08:54:09 -04:00
Doug Burks
feb7eeeb8e UPGRADE: Elastic 7.17.3 #7807 2022-04-21 08:47:40 -04:00
Doug Burks
44f4b1da7f Merge pull request #7832 from Security-Onion-Solutions/fix/prevent-multiple-instances
FIX: Prevent multiple instances of so-sensor-clean and so-playbook-sync #6622
2022-04-20 17:00:09 -04:00
Doug Burks
1edb443c5d so-playbook-sync pgrep should be more strict to avoid multiple matches on Ubuntu 2022-04-20 16:48:26 -04:00
Doug Burks
8fc03afdc0 so-sensor-clean pgrep should be more strict to avoid matching multiples on Ubuntu 2022-04-20 16:47:18 -04:00
Mike Reeves
fe09b5b0d1 Merge pull request #7831 from Security-Onion-Solutions/awlocal
Remove setup from auto starting if you choose to not enter the grid
2022-04-20 14:42:58 -04:00
Mike Reeves
c3952e94c8 Remove setup from auto starting if you choose to not enter the grid 2022-04-20 14:36:38 -04:00
Doug Burks
3aac644da5 Merge pull request #7830 from Security-Onion-Solutions/dougburks-patch-1
FIX: Improve Zeek file extraction #7829
2022-04-20 14:13:13 -04:00
Doug Burks
15ef0968d9 FIX: Improve Zeek file extraction #7829 2022-04-20 14:01:46 -04:00
Jason Ertel
aeb70dad8f Doc updates 2022-04-19 14:31:21 -04:00
Jason Ertel
4129cef9fb Add new spamhaus analyzer 2022-04-19 12:12:52 -04:00
Josh Patterson
40d9335573 Merge pull request #7822 from Security-Onion-Solutions/workstation_state
add securityonion-strelka-oneshot and securityonion-strelka-fileshot to workstation
2022-04-19 09:21:35 -04:00
m0duspwnens
807f6adf1e add securityonion-strelka-oneshot and securityonion-strelka-fileshot to workstation 2022-04-19 09:19:09 -04:00
Doug Burks
6339ee3bf3 Merge pull request #7818 from Security-Onion-Solutions/dougburks-patch-1
Slight change to IDH verbiage in so-whiptail
2022-04-18 16:35:22 -04:00
Doug Burks
5d62ece03b Slight change to IDH verbiage in so-whiptail 2022-04-18 16:33:54 -04:00
Doug Burks
6905ca276a Merge pull request #7816 from Security-Onion-Solutions/dougburks-patch-1
remove old comments from so-whiptail
2022-04-18 11:30:43 -04:00
Doug Burks
3682754399 remove old comments from so-whiptail 2022-04-18 11:29:46 -04:00
Jason Ertel
0cb73d8f6a Merge branch 'dev' into kilo 2022-04-18 11:04:32 -04:00
Mike Reeves
186258687e Merge pull request #7815 from Security-Onion-Solutions/awlocal
Fix Analyst Install Loop
2022-04-18 11:04:10 -04:00
Mike Reeves
012ff3e1bc Fix Analyst Install Loop 2022-04-18 11:02:19 -04:00
Josh Brower
891a197a6a Merge pull request #7814 from Security-Onion-Solutions/defensivedepth-patch-2
Fix ES/LS Log Pruning
2022-04-18 10:45:27 -04:00
Josh Brower
b35b505f0a Fix pattern matching 2022-04-18 10:39:04 -04:00
Josh Brower
2b39570b08 Fix matching logic 2022-04-18 10:37:38 -04:00
Jason Ertel
159122b52c Merge branch 'dev' into kilo 2022-04-18 10:11:37 -04:00
Doug Burks
3fb7399000 Merge pull request #7813 from Security-Onion-Solutions/dougburks-patch-1
Remove distributed verbiage from other node option in so-whiptail
2022-04-18 08:24:52 -04:00
Doug Burks
400879c079 Remove distributed verbiage from other node option in so-whiptail 2022-04-18 07:53:57 -04:00
Doug Burks
62f3f13bbc Merge pull request #7803 from Security-Onion-Solutions/dougburks-patch-1
move thehive removal from up_to_2.3.120 to post_to_2.3.120
2022-04-15 15:48:12 -04:00
Doug Burks
0eda9a3bd7 move thehive removal from up_to_2.3.120 to post_to_2.3.120 2022-04-15 15:45:01 -04:00
Doug Burks
ee00678362 Merge pull request #7802 from Security-Onion-Solutions/dougburks-patch-1
Replace old saltstack repo in so-preflight
2022-04-15 13:17:14 -04:00
Doug Burks
ce192c2526 Update so-preflight 2022-04-15 13:11:15 -04:00
Josh Brower
d60d31f723 Merge pull request #7801 from Security-Onion-Solutions/defensivedepth-patch-1
Remove thehive entries from so-status
2022-04-15 12:25:21 -04:00
Josh Brower
bd19da1878 Remove thehive entries from so-status 2022-04-15 12:21:56 -04:00
Doug Burks
f461d01961 Merge pull request #7800 from Security-Onion-Solutions/dougburks-patch-1
Improve grammar in so-whiptail
2022-04-15 10:52:29 -04:00
Doug Burks
a69d361d1b Improve grammar in so-whiptail 2022-04-15 10:45:34 -04:00
Josh Brower
19cba9dca9 Merge pull request #7798 from Security-Onion-Solutions/awlocal
Make analyst iso install init management interface
2022-04-15 07:26:53 -04:00
Mike Reeves
5081a81a6c Make analyst iso install init management interface 2022-04-14 20:00:58 -04:00
Josh Patterson
ba61057433 Merge pull request #7796 from Security-Onion-Solutions/fix_analyst_setup
Fix analyst setup
2022-04-14 16:12:53 -04:00
m0duspwnens
b8a80f76cf change words 2022-04-14 16:09:39 -04:00
Josh Patterson
be2573bb7d Merge pull request #7794 from Security-Onion-Solutions/soup_salt_influx
remove influxdb module patched state files when salt is upgraded
2022-04-14 16:08:10 -04:00
m0duspwnens
36aef87a3c remove cd before running so-setup analyst 2022-04-14 16:03:43 -04:00
m0duspwnens
02c19da3c4 remove influxdb module patched state files when salt is upgraded 2022-04-14 15:00:14 -04:00
Josh Patterson
2d094a3bfc Merge pull request #7784 from Security-Onion-Solutions/workstation_script
modify so-analyst-install to work with new states and install on managers
2022-04-13 14:37:24 -04:00
m0duspwnens
371fda09db fix copy paste fail 2022-04-13 14:28:05 -04:00
m0duspwnens
149375115e warn about required reboot and prompt if reboot desired at completion of install 2022-04-13 14:26:14 -04:00
m0duspwnens
4728bea633 fix typo 2022-04-13 14:03:09 -04:00
m0duspwnens
3ee09db752 added warning about installing and ensure can only install workstation on centos 2022-04-13 13:39:48 -04:00
m0duspwnens
6477e6c5a2 added warning about installing and ensure can only install workstation on centos 2022-04-13 13:39:39 -04:00
m0duspwnens
2389d3fac9 modify so-analyst-install to work with new states and install on managers 2022-04-13 12:32:05 -04:00
Mike Reeves
ecc29b586d Merge pull request #7772 from Security-Onion-Solutions/awlocal 2022-04-12 15:45:56 -04:00
Mike Reeves
2977604d96 Merge branch 'awlocal' of https://github.com/Security-Onion-Solutions/securityonion into awlocal 2022-04-12 15:39:45 -04:00
Mike Reeves
5253cb5d25 Remove keys at the end of an install 2022-04-12 15:33:17 -04:00
Josh Brower
1cb5a791ca Add idh req_storage elif 2022-04-12 14:29:07 -04:00
Mike Reeves
8408628b03 Stop thehive on soup 2022-04-12 13:54:08 -04:00
Mike Reeves
02f4cd9926 Replace salt code on a saltstack update 2022-04-12 12:15:22 -04:00
Mike Reeves
c1824e9f17 Replace salt code on a saltstack update 2022-04-12 11:55:45 -04:00
Mike Reeves
081d7e3a09 Replace salt code on a saltstack update 2022-04-12 11:20:26 -04:00
Mike Reeves
a7221ba2b4 Remove summary for thins the workstation doesnt care about 2022-04-12 11:06:12 -04:00
Mike Reeves
aa90a016d7 Change disk requirements for IDH 2022-04-12 10:44:45 -04:00
Josh Patterson
dbddff7be7 Merge pull request #7766 from Security-Onion-Solutions/issue/7763
Issue/7763
2022-04-11 16:44:04 -04:00
Josh Brower
f1574de827 Merge pull request #7765 from Security-Onion-Solutions/fix/compress-clean-elastic-logs
Compress + Clean ES & Logstash App Logs
2022-04-11 16:43:03 -04:00
Josh Brower
886d69fb38 Compress + Clean ES & Logstash App Logs 2022-04-11 16:09:24 -04:00
m0duspwnens
d68b6e7c9a only start if exit code != 0 2022-04-11 16:03:00 -04:00
m0duspwnens
d102ca298d move messages about starting services on soup failure before exit message 2022-04-11 16:01:36 -04:00
m0duspwnens
9914148441 more verbose 2022-04-11 15:51:11 -04:00
m0duspwnens
464772d7d3 start salt-master and salt-minion service is soup fails and exits 2022-04-11 15:43:09 -04:00
Mike Reeves
13f6957ae8 Merge pull request #7764 from Security-Onion-Solutions/awlocal 2022-04-11 15:40:06 -04:00
m0duspwnens
2a18059ad9 use quotes 2022-04-11 15:37:07 -04:00
m0duspwnens
01510c184a set_os and set_cron_service_name sooner 2022-04-11 15:36:02 -04:00
Mike Reeves
eb2d759bf8 Add more whiptail menus 2022-04-11 15:14:29 -04:00
Mike Reeves
5ed7361e3a Add more whiptail menus 2022-04-11 15:14:06 -04:00
m0duspwnens
6ed8694008 dont need to pass -t 2022-04-11 15:11:57 -04:00
m0duspwnens
79dc2374e0 check that salt-master is running before requiring manager 2022-04-11 15:09:00 -04:00
m0duspwnens
a2180a6721 ensure salt-master service is running before proceeding with soup 2022-04-11 15:01:41 -04:00
Mike Reeves
f9633e7287 Add more whiptail menus 2022-04-11 14:51:17 -04:00
Mike Reeves
0b2745b342 Sending things to the screen 2022-04-11 11:49:24 -04:00
Mike Reeves
ea34b69795 Sending things to the screen 2022-04-11 11:46:42 -04:00
Mike Reeves
97e691c321 Sending things to the screen 2022-04-11 11:43:13 -04:00
Mike Reeves
a3bf904e2d Import GPG 2022-04-11 11:32:08 -04:00
Mike Reeves
9ed49ef318 Import GPG 2022-04-11 11:29:56 -04:00
Mike Reeves
f7760394a1 Import GPG 2022-04-11 11:25:54 -04:00
Mike Reeves
d9416f3828 Salt local install of Analyst Workstation 2022-04-11 11:04:25 -04:00
Jason Ertel
2d025e944c Add yaml since helpers module uses it 2022-04-09 17:48:21 -04:00
Jason Ertel
202ca34c6f Remove obsolete source/site pkg dirs 2022-04-09 14:36:21 -04:00
Jason Ertel
f9568626f2 Merge branch 'dev' into kilo 2022-04-09 09:02:55 -04:00
Jason Ertel
224e30c0ee Change localized table layout 2022-04-08 17:31:15 -04:00
Jason Ertel
ebcfbaa06d Analyzer improvements 2022-04-08 16:57:40 -04:00
Josh Patterson
365866c9cc Merge pull request #7750 from Security-Onion-Solutions/issue_7730
ensure bash is used for influx query
2022-04-08 15:26:24 -04:00
m0duspwnens
59d5be682a ensure bash is used for influx query 2022-04-08 15:01:38 -04:00
Mike Reeves
7805311ea2 Merge pull request #7748 from Security-Onion-Solutions/bravo
Bravo
2022-04-08 14:48:54 -04:00
Josh Patterson
8757ca0dfb Merge pull request #7749 from Security-Onion-Solutions/issue/7113
ensure we can grab management ip and display whiptail if we cant
2022-04-08 12:10:54 -04:00
m0duspwnens
3e8c687d61 ensure we can grab management ip and display whiptail if we cant 2022-04-08 12:05:38 -04:00
Jason Ertel
13c9af5a5a Clearing hotfix 2022-04-08 10:23:44 -04:00
Mike Reeves
a5313b330f Merge master into dev 2022-04-08 09:07:46 -04:00
Mike Reeves
0bc3d5d757 Merge pull request #7741 from Security-Onion-Solutions/hotfix/2.3.110
Hotfix/2.3.110 20220407
2022-04-07 16:30:50 -04:00
Mike Reeves
6d88a5b541 Merge pull request #7740 from Security-Onion-Solutions/hfix0407
2.3.110 hotfix 0407
2022-04-07 16:11:58 -04:00
Mike Reeves
6a28e752f0 2.3.110 hotfix 0407 2022-04-07 16:03:13 -04:00
Josh Brower
ae8d300567 Merge pull request #7738 from Security-Onion-Solutions/feature/idh-allow-multiple-int
Include firewall state
2022-04-07 14:52:31 -04:00
Mike Reeves
2ad3f63cb5 Merge pull request #7739 from Security-Onion-Solutions/TOoSmOotH-patch-2
Update HOTFIX
2022-04-07 14:46:20 -04:00
Mike Reeves
93e04850c4 Update HOTFIX 2022-04-07 14:40:54 -04:00
Josh Brower
36b2d78dfe Include firewall state 2022-04-07 14:02:21 -04:00
Jason Ertel
44e318e046 Provide CLI feedback for missing input 2022-04-07 10:16:44 -04:00
Josh Patterson
09e7b5a8bf Merge pull request #7733 from Security-Onion-Solutions/m0duspwnens-salt-3004.1
remove saltstack repo created by bootstrap-salt for ubuntu
2022-04-07 09:05:51 -04:00
m0duspwnens
8fbd16f75d ensure salt.list is absent 2022-04-07 09:03:51 -04:00
m0duspwnens
722b200e16 add retry to apt_update incase running in background 2022-04-07 08:58:07 -04:00
m0duspwnens
b2a98af18b proper formatting 2022-04-07 08:55:30 -04:00
m0duspwnens
be3769fd7c run apt-get update if saltstack.list changes 2022-04-07 08:53:44 -04:00
m0duspwnens
08ac696f14 remove saltstack repo created by bootstrap-salt for ubuntu 2022-04-06 17:38:06 -04:00
Josh Brower
86771e1fe6 Merge pull request #7732 from Security-Onion-Solutions/feature/idh-allow-multiple-int
Feature/idh allow multiple int
2022-04-06 17:21:30 -04:00
Josh Brower
f5e539a05c Initial support for restricting IDH services on MGT IP 2022-04-06 17:16:38 -04:00
Josh Patterson
0c1ac729e1 Merge pull request #7731 from Security-Onion-Solutions/m0duspwnens-salt-3004.1
update the centos repo for airgap prior to applying hotfix
2022-04-06 17:00:09 -04:00
m0duspwnens
833106775f update the centos repo for airgap prior to applying hotfix or standard soup run 2022-04-06 16:53:55 -04:00
Jason Ertel
d8defdd7b0 Improve unit test stability 2022-04-05 07:36:25 -04:00
Jason Ertel
d2fa80e48a Update status codes to match SOC 2022-04-05 07:20:23 -04:00
Josh Brower
7906c053b1 Initial support for restricting IDH services on MGT IP 2022-04-04 16:46:05 -04:00
Mike Reeves
0c7a07f5c0 Merge pull request #7667 from Security-Onion-Solutions/analystsetup
Analyst Setup
2022-04-04 16:09:13 -04:00
Jason Ertel
04eef0d31f Merge branch 'dev' into kilo 2022-04-04 15:59:09 -04:00
Jason Ertel
7df6833568 Add unit tests for Urlhaus; remove placeholder whois analyzer 2022-04-04 15:58:53 -04:00
Wes Lambert
07cf3469a0 Remove pyyaml for requirements file 2022-04-04 11:40:02 +00:00
Wes Lambert
39101cafd1 Add UrlHaus analyzer and helpers script 2022-04-01 21:11:57 +00:00
Mike Reeves
5387caf6f4 fix formatting 2022-04-01 16:50:55 -04:00
Mike Reeves
07783713e6 fix formatting 2022-04-01 16:22:40 -04:00
Mike Reeves
5974279ed7 fix formatting 2022-04-01 16:17:22 -04:00
Mike Reeves
277c7d9d33 fix formatting 2022-04-01 16:05:37 -04:00
Mike Reeves
d20a07bb5f fix formatting 2022-04-01 16:00:44 -04:00
Mike Reeves
a449a91f38 fix formatting 2022-04-01 15:52:38 -04:00
Mike Reeves
76f43380d9 fix so salt master gets installed 2022-04-01 14:29:24 -04:00
Mike Reeves
7c39559787 fix so salt master gets installed 2022-04-01 14:19:17 -04:00
Jason Ertel
cedb23f4bc Merge pull request #7689 from Security-Onion-Solutions/esup
Upgrade to ES 7.17.2
2022-04-01 13:57:04 -04:00
Jason Ertel
6e7b2ccedc Upgrade to ES 7.17.2 2022-04-01 13:50:57 -04:00
Mike Reeves
8e9386fcd4 fix the yum commands 2022-04-01 13:17:13 -04:00
Mike Reeves
97fc652a97 fix the yum commands 2022-04-01 11:54:55 -04:00
Mike Reeves
2782c9b464 Update salt versions 2022-04-01 11:26:58 -04:00
Josh Patterson
b5ce8756e9 Merge pull request #7686 from Security-Onion-Solutions/workstation_state
dont run workstation.trusted-ca if not connected to grid
2022-04-01 11:06:53 -04:00
m0duspwnens
e14463c0ab dont run workstation.trusted-ca if not connected to grid 2022-04-01 11:05:34 -04:00
Mike Reeves
d524f3833b Let the patch pillar do its work 2022-04-01 10:09:55 -04:00
Mike Reeves
cb2044cee9 Fix the analyst pillar 2022-04-01 09:29:29 -04:00
Mike Reeves
64e480714a Fix the analyst pillar 2022-04-01 09:10:38 -04:00
Jason Ertel
2dc370c8b6 Add source packages to salt state 2022-03-31 18:56:38 -04:00
Jason Ertel
57dc848792 Support analyzer deps 2022-03-31 16:48:13 -04:00
Jason Ertel
9947ba6e43 Support CentOS paths 2022-03-31 16:47:56 -04:00
Jason Ertel
48fbc2290f Add dep support for analyzers 2022-03-31 13:59:35 -04:00
Mike Reeves
edc6a461ec Fix analyst pillar 2022-03-31 13:57:37 -04:00
Mike Reeves
63eb15aa6d Run anayst Pillar 2022-03-31 13:35:30 -04:00
Mike Reeves
5264526ff1 Fix salt master declaration 2022-03-31 12:05:59 -04:00
Mike Reeves
c9eb188a79 Only run specific states during install for AW 2022-03-31 12:01:55 -04:00
Mike Reeves
ad833965a0 Fix extra space 2022-03-31 11:12:10 -04:00
Mike Reeves
179aa5e29c Add firewall rules for Analyst workstation 2022-03-31 10:49:38 -04:00
Josh Patterson
86b311c468 Merge pull request #7675 from Security-Onion-Solutions/issue/7203
different systemd unit files for ubuntu and centos
2022-03-31 10:18:10 -04:00
m0duspwnens
fc60f64ddb different systemd unit files for ubuntu and centos 2022-03-31 10:11:43 -04:00
Jason Ertel
1aba4da2bb Correct analyzer path 2022-03-30 21:01:07 -04:00
Mike Reeves
a049e458c6 Add workstation to the salt config 2022-03-30 14:03:52 -04:00
Jason Ertel
45f511caab Remove extra comma 2022-03-30 13:21:35 -04:00
Mike Reeves
f43a6757e0 Add analyst install network stack 2022-03-30 11:16:00 -04:00
Mike Reeves
c3d3806f65 Add analyst install network stack 2022-03-30 11:14:35 -04:00
Mike Reeves
dceb46888f Add analyst install network stack 2022-03-30 11:06:59 -04:00
Jason Ertel
e667bb1e59 merge 2022-03-30 10:57:40 -04:00
Mike Reeves
816d0b1075 Don't prompt for install type since we know its analyst 2022-03-29 17:35:13 -04:00
Mike Reeves
c4a4e9737b Set standalone to load Xwindows 2022-03-29 17:31:53 -04:00
Josh Patterson
1cb48fc6a8 Merge pull request #7668 from Security-Onion-Solutions/issue/7203
run salt_minion_service state last to prevent salt-minion from restarting during state run
2022-03-29 17:30:32 -04:00
Mike Reeves
45161b2a39 Set standalone to load Xwindows 2022-03-29 17:28:32 -04:00
Mike Reeves
67582be575 Set standalone to load Xwindows 2022-03-29 17:23:38 -04:00
Mike Reeves
86e32f3e6c Set standalone to load Xwindows 2022-03-29 17:13:47 -04:00
Mike Reeves
053ec81285 Set standalone to load Xwindows 2022-03-29 17:12:25 -04:00
Mike Reeves
853235ca9b Set standalone to load Xwindows 2022-03-29 17:11:19 -04:00
Mike Reeves
afb918d79c Set standalone to load Xwindows 2022-03-29 17:08:03 -04:00
m0duspwnens
7a4d93f09b run salt_minion_service state last to prevent salt-minion from restarting during state run 2022-03-29 15:44:05 -04:00
Jason Ertel
b2a96fab7e merge 2022-03-29 14:07:20 -04:00
Jason Ertel
d2bf6d5618 Add build script to help pre-validate analyzers before pushing 2022-03-29 14:04:23 -04:00
Jason Ertel
484ef4bc31 Ensure generated python files are not pushed to version control 2022-03-29 13:51:12 -04:00
Jason Ertel
cb491630ae Analyzer CI 2022-03-29 13:40:56 -04:00
Jason Ertel
0a8d24a225 Add automated CI for analyzers 2022-03-29 13:10:04 -04:00
Mike Reeves
3ace55dfe5 Add initial analyst install code 2022-03-29 12:49:30 -04:00
Mike Reeves
102d2507cb Add initial analyst install code 2022-03-29 12:48:52 -04:00
Mike Reeves
0d23688aa0 Add initial analyst install code 2022-03-29 12:46:45 -04:00
Mike Reeves
80af497f95 Add initial analyst install code 2022-03-29 12:43:20 -04:00
Mike Reeves
990470a765 Add initial analyst install option to so-setup 2022-03-29 10:41:45 -04:00
Josh Patterson
f5095b273d Merge pull request #7665 from Security-Onion-Solutions/workstation_state
Workstation state
2022-03-29 10:27:07 -04:00
m0duspwnens
e3f3af52e1 fix spacing 2022-03-29 10:19:29 -04:00
m0duspwnens
2f489895ef top match and remove_gui state 2022-03-29 10:17:21 -04:00
weslambert
7f7eaf173b Merge pull request #7663 from Security-Onion-Solutions/fix/strelka_fw
Add strelka_frontend to heavynode, sensor, and standalone role FW por…
2022-03-28 16:14:25 -04:00
weslambert
6004dde54a Add strelka_frontend to heavynode, sensor, and standalone role FW portgroups 2022-03-28 16:05:07 -04:00
Jason Ertel
c23b87965f Merge branch 'dev' into kilo 2022-03-28 15:53:33 -04:00
Jason Ertel
deb9b0e5ef Add analyze feature 2022-03-28 15:53:24 -04:00
m0duspwnens
0ddfaf8d74 changes for workstation 2022-03-28 15:34:15 -04:00
weslambert
fb7160cba5 Merge pull request #7644 from Security-Onion-Solutions/fix/syslog_pr_adjustment
Update with changes from Abe's PR and other fixes
2022-03-25 13:59:20 -04:00
weslambert
e6599cd10e Update with changes from Abe's PR and other fixes 2022-03-25 13:57:44 -04:00
weslambert
c02d7fab50 Merge pull request #7636 from Security-Onion-Solutions/feature/rita
Parsing of RITA Logs
2022-03-24 13:05:22 -04:00
weslambert
fbc86f43ec Add exclude filter for logs for when there are no results from analysis 2022-03-24 13:03:03 -04:00
weslambert
4c93217aac Merge pull request #7635 from Security-Onion-Solutions/fix/process_mappings_keyword
Additional .keyword shims for process mappings
2022-03-24 12:53:16 -04:00
Wes Lambert
fe1b72655b Additional .keyword shims for process mappings 2022-03-24 16:45:06 +00:00
m0duspwnens
293de159db fix package names 2022-03-24 11:33:16 -04:00
m0duspwnens
7cfc52da8a fix include 2022-03-24 10:02:25 -04:00
m0duspwnens
a0841ee7a7 workstation state 2022-03-24 09:57:58 -04:00
weslambert
5160a55dcf Merge pull request #7629 from Security-Onion-Solutions/fix/roles_load_check_cluster_health
Check ES cluster health before trying to load roles
2022-03-23 11:07:24 -04:00
weslambert
1f2bca599f Check cluster health before trying to load roles for ES 2022-03-23 11:00:26 -04:00
Wes Lambert
8a56c88773 Adjust log file paths 2022-03-22 17:51:17 +00:00
Wes Lambert
57f01c70ec Remove extra forward slash in log path 2022-03-22 17:45:23 +00:00
Wes Lambert
2487d468ab Add RITA Elasticsearch ingest pipeline config 2022-03-22 17:38:22 +00:00
Wes Lambert
f613d8ad86 Add RITA Logstash config 2022-03-22 17:36:18 +00:00
Doug Burks
a3f8a10eb9 Merge pull request #7608 from Security-Onion-Solutions/fix/telegraf-non-root
FIX: Run telegraf as non-root #7468
2022-03-18 15:17:28 -04:00
Doug Burks
eda7a8d7ea FIX: Update telegraf influxdbsize.sh to collect influxdb size from influxdb_size.log #7468 2022-03-18 13:15:43 -04:00
Doug Burks
f7dc5588ae FIX: Update common init.sls to create cron job to write influxdb size for telegraf #7468 2022-03-18 13:13:46 -04:00
Doug Burks
c13994994b FIX: Update telegraf init.sls to run telegraf as non-root #7468 2022-03-18 13:11:56 -04:00
Josh Patterson
949365c636 Merge pull request #7602 from Security-Onion-Solutions/issue/7601
prevent so-setup iso from running on ubuntu
2022-03-17 11:37:53 -04:00
m0duspwnens
a896348743 prevent so-setup iso from running on ubuntu - https://github.com/Security-Onion-Solutions/securityonion/issues/7601 2022-03-17 11:31:16 -04:00
Josh Brower
5b9c82a434 Merge pull request #7494 from Security-Onion-Solutions/fix/fleetdm-custom-hostname
Force regen of ssl cert
2022-03-16 15:17:05 -04:00
Doug Burks
50477071b8 Merge pull request #7588 from Security-Onion-Solutions/fix/prevent-multiple-instances
FIX: Prevent multiple instances of so-sensor-clean and so-playbook-sync #6622
2022-03-16 13:54:00 -04:00
Doug Burks
e65f2a5513 FIX: Prevent multiple instances of so-sensor-clean #6622 2022-03-16 13:28:39 -04:00
Doug Burks
e56f90d83c FIX: Prevent multiple instances of so-playbook-sync #6622 2022-03-16 13:27:37 -04:00
weslambert
aaded58131 Merge pull request #7565 from Security-Onion-Solutions/fix/es_template_fix
Custom ES template fixes
2022-03-15 11:09:46 -04:00
Doug Burks
9bf0265cea Merge pull request #7566 from Security-Onion-Solutions/feature/hunt-soc-auth
FEATURE: Add new Hunt query for SOC logins #7327
2022-03-15 10:58:40 -04:00
Mike Reeves
e01c1398d5 Merge pull request #7564 from Security-Onion-Solutions/removethehive
Removethehive
2022-03-15 10:56:08 -04:00
Wes Lambert
42d6c3a956 Replace Elastic connection check using ELASTICCURL with so-elasticsearch-query 2022-03-15 14:55:04 +00:00
Doug Burks
eec44a6b02 Add a SOC Auth query to hunt.queries.json 2022-03-15 10:38:46 -04:00
Doug Burks
d1e1887e36 Add support for Kratos audit logs in hunt.eventfields.json 2022-03-15 10:37:58 -04:00
Wes Lambert
5f56c7a261 Replace ELASTICCURL with so-elasticsearch-query 2022-03-15 14:32:00 +00:00
weslambert
d46620ea2a Merge pull request #7561 from Security-Onion-Solutions/es_template_map_fix
Custom ES Template Fixes
2022-03-15 10:01:42 -04:00
Jason Ertel
408f9d6695 Update .gitleaks.toml 2022-03-15 09:53:27 -04:00
Jason Ertel
b810f14428 Update .gitleaks.toml 2022-03-15 09:53:11 -04:00
Jason Ertel
cec9cba40e Create .gitleaks.toml 2022-03-15 09:47:57 -04:00
Jason Ertel
8ebeeb497f add configuration to override leak detector defaults 2022-03-15 09:43:09 -04:00
Mike Reeves
9c80ff4f65 Remove hive from more files 2022-03-15 09:37:58 -04:00
Mike Reeves
81f0aa58b8 Remove hive from more files 2022-03-15 08:28:03 -04:00
Doug Burks
63cef4daff Merge pull request #7557 from Security-Onion-Solutions/dougburks-patch-1
FIX: surilogcompress cron job not running
2022-03-15 07:41:05 -04:00
Doug Burks
db4f138a78 FIX: surilogcompress cron job not running
The suricata user was originally created with `/opt/so/conf/suricata` as its home directory. I think at some point we changed permissions on `/opt/so/conf` and at that point the `surilogcompress` cron job stopped working. Changing the home directory to `/nsm/suricata` works on all of my PROD systems (including Ubuntu and CentOS).

For more information, please see:
https://github.com/Security-Onion-Solutions/securityonion/issues/7133
2022-03-15 07:10:02 -04:00
Mike Reeves
b5b60af16f Remove hive from so-user 2022-03-14 15:06:07 -04:00
Mike Reeves
b83fec6fd2 More hive remova 2022-03-14 14:51:39 -04:00
Mike Reeves
ff30f572d7 Remove thehive from image common 2022-03-14 10:40:41 -04:00
Mike Reeves
95195c07fc Disable hive in automation files 2022-03-14 10:36:23 -04:00
Jason Ertel
16f673d956 Merge pull request #7541 from Security-Onion-Solutions/kilo
Add assignee field to case list
2022-03-14 08:49:46 -04:00
Jason Ertel
5a28725def Add assignee to case list 2022-03-14 08:45:28 -04:00
Wes Lambert
ba24f75893 Fix index typo 2022-03-11 18:11:16 +00:00
Wes Lambert
70ed20f691 Add new sls file for custom ES index templates 2022-03-11 18:07:23 +00:00
Wes Lambert
d12ff503c2 Chage role loading verbiage 2022-03-11 16:23:19 +00:00
Wes Lambert
dc258cf043 Load custom component templates in so-elasticsearch-templates-load 2022-03-11 16:22:55 +00:00
Wes Lambert
8e43a6e571 Don't generate index template if index_template definition is not present in pillar 2022-03-11 16:22:06 +00:00
m0duspwnens
e1e8a20e11 make sure values exist in data structure 2022-03-10 17:09:00 -05:00
Josh Brower
f0e44827a5 rm extra line 2022-03-10 08:48:46 -05:00
Josh Brower
814e16ba95 Force regen of ssl cert 2022-03-10 08:47:26 -05:00
Mike Reeves
7ca06df66f Merge pull request #7484 from Security-Onion-Solutions/TOoSmOotH-patch-1
Update VERSION
2022-03-09 14:50:52 -05:00
Mike Reeves
6f15acd2f9 Update VERSION 2022-03-09 14:50:14 -05:00
303 changed files with 5814 additions and 1848 deletions

546
.github/.gitleaks.toml vendored Normal file
View File

@@ -0,0 +1,546 @@
title = "gitleaks config"
# Gitleaks rules are defined by regular expressions and entropy ranges.
# Some secrets have unique signatures which make detecting those secrets easy.
# Examples of those secrets would be GitLab Personal Access Tokens, AWS keys, and GitHub Access Tokens.
# All these examples have defined prefixes like `glpat`, `AKIA`, `ghp_`, etc.
#
# Other secrets might just be a hash which means we need to write more complex rules to verify
# that what we are matching is a secret.
#
# Here is an example of a semi-generic secret
#
# discord_client_secret = "8dyfuiRyq=vVc3RRr_edRk-fK__JItpZ"
#
# We can write a regular expression to capture the variable name (identifier),
# the assignment symbol (like '=' or ':='), and finally the actual secret.
# The structure of a rule to match this example secret is below:
#
# Beginning string
# quotation
# │ End string quotation
# │ │
# ▼ ▼
# (?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]
#
# ▲ ▲ ▲
# │ │ │
# │ │ │
# identifier assignment symbol
# Secret
#
[[rules]]
id = "gitlab-pat"
description = "GitLab Personal Access Token"
regex = '''glpat-[0-9a-zA-Z\-\_]{20}'''
[[rules]]
id = "aws-access-token"
description = "AWS"
regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}'''
# Cryptographic keys
[[rules]]
id = "PKCS8-PK"
description = "PKCS8 private key"
regex = '''-----BEGIN PRIVATE KEY-----'''
[[rules]]
id = "RSA-PK"
description = "RSA private key"
regex = '''-----BEGIN RSA PRIVATE KEY-----'''
[[rules]]
id = "OPENSSH-PK"
description = "SSH private key"
regex = '''-----BEGIN OPENSSH PRIVATE KEY-----'''
[[rules]]
id = "PGP-PK"
description = "PGP private key"
regex = '''-----BEGIN PGP PRIVATE KEY BLOCK-----'''
[[rules]]
id = "github-pat"
description = "GitHub Personal Access Token"
regex = '''ghp_[0-9a-zA-Z]{36}'''
[[rules]]
id = "github-oauth"
description = "GitHub OAuth Access Token"
regex = '''gho_[0-9a-zA-Z]{36}'''
[[rules]]
id = "SSH-DSA-PK"
description = "SSH (DSA) private key"
regex = '''-----BEGIN DSA PRIVATE KEY-----'''
[[rules]]
id = "SSH-EC-PK"
description = "SSH (EC) private key"
regex = '''-----BEGIN EC PRIVATE KEY-----'''
[[rules]]
id = "github-app-token"
description = "GitHub App Token"
regex = '''(ghu|ghs)_[0-9a-zA-Z]{36}'''
[[rules]]
id = "github-refresh-token"
description = "GitHub Refresh Token"
regex = '''ghr_[0-9a-zA-Z]{76}'''
[[rules]]
id = "shopify-shared-secret"
description = "Shopify shared secret"
regex = '''shpss_[a-fA-F0-9]{32}'''
[[rules]]
id = "shopify-access-token"
description = "Shopify access token"
regex = '''shpat_[a-fA-F0-9]{32}'''
[[rules]]
id = "shopify-custom-access-token"
description = "Shopify custom app access token"
regex = '''shpca_[a-fA-F0-9]{32}'''
[[rules]]
id = "shopify-private-app-access-token"
description = "Shopify private app access token"
regex = '''shppa_[a-fA-F0-9]{32}'''
[[rules]]
id = "slack-access-token"
description = "Slack token"
regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?'''
[[rules]]
id = "stripe-access-token"
description = "Stripe"
regex = '''(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}'''
[[rules]]
id = "pypi-upload-token"
description = "PyPI upload token"
regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}'''
[[rules]]
id = "gcp-service-account"
description = "Google (GCP) Service-account"
regex = '''\"type\": \"service_account\"'''
[[rules]]
id = "heroku-api-key"
description = "Heroku API Key"
regex = ''' (?i)(heroku[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})['\"]'''
secretGroup = 3
[[rules]]
id = "slack-web-hook"
description = "Slack Webhook"
regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8,12}/[a-zA-Z0-9_]{24}'''
[[rules]]
id = "twilio-api-key"
description = "Twilio API Key"
regex = '''SK[0-9a-fA-F]{32}'''
[[rules]]
id = "age-secret-key"
description = "Age secret key"
regex = '''AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}'''
[[rules]]
id = "facebook-token"
description = "Facebook token"
regex = '''(?i)(facebook[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "twitter-token"
description = "Twitter token"
regex = '''(?i)(twitter[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{35,44})['\"]'''
secretGroup = 3
[[rules]]
id = "adobe-client-id"
description = "Adobe Client ID (Oauth Web)"
regex = '''(?i)(adobe[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "adobe-client-secret"
description = "Adobe Client Secret"
regex = '''(p8e-)(?i)[a-z0-9]{32}'''
[[rules]]
id = "alibaba-access-key-id"
description = "Alibaba AccessKey ID"
regex = '''(LTAI)(?i)[a-z0-9]{20}'''
[[rules]]
id = "alibaba-secret-key"
description = "Alibaba Secret Key"
regex = '''(?i)(alibaba[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
secretGroup = 3
[[rules]]
id = "asana-client-id"
description = "Asana Client ID"
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{16})['\"]'''
secretGroup = 3
[[rules]]
id = "asana-client-secret"
description = "Asana Client Secret"
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "atlassian-api-token"
description = "Atlassian API token"
regex = '''(?i)(atlassian[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{24})['\"]'''
secretGroup = 3
[[rules]]
id = "bitbucket-client-id"
description = "Bitbucket client ID"
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "bitbucket-client-secret"
description = "Bitbucket client secret"
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9_\-]{64})['\"]'''
secretGroup = 3
[[rules]]
id = "beamer-api-token"
description = "Beamer API token"
regex = '''(?i)(beamer[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](b_[a-z0-9=_\-]{44})['\"]'''
secretGroup = 3
[[rules]]
id = "clojars-api-token"
description = "Clojars API token"
regex = '''(CLOJARS_)(?i)[a-z0-9]{60}'''
[[rules]]
id = "contentful-delivery-api-token"
description = "Contentful delivery API token"
regex = '''(?i)(contentful[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{43})['\"]'''
secretGroup = 3
[[rules]]
id = "databricks-api-token"
description = "Databricks API token"
regex = '''dapi[a-h0-9]{32}'''
[[rules]]
id = "discord-api-token"
description = "Discord API key"
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]'''
secretGroup = 3
[[rules]]
id = "discord-client-id"
description = "Discord client ID"
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{18})['\"]'''
secretGroup = 3
[[rules]]
id = "discord-client-secret"
description = "Discord client secret"
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "doppler-api-token"
description = "Doppler API token"
regex = '''['\"](dp\.pt\.)(?i)[a-z0-9]{43}['\"]'''
[[rules]]
id = "dropbox-api-secret"
description = "Dropbox API secret/key"
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
[[rules]]
id = "dropbox--api-key"
description = "Dropbox API secret/key"
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
[[rules]]
id = "dropbox-short-lived-api-token"
description = "Dropbox short lived API token"
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](sl\.[a-z0-9\-=_]{135})['\"]'''
[[rules]]
id = "dropbox-long-lived-api-token"
description = "Dropbox long lived API token"
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"][a-z0-9]{11}(AAAAAAAAAA)[a-z0-9\-_=]{43}['\"]'''
[[rules]]
id = "duffel-api-token"
description = "Duffel API token"
regex = '''['\"]duffel_(test|live)_(?i)[a-z0-9_-]{43}['\"]'''
[[rules]]
id = "dynatrace-api-token"
description = "Dynatrace API token"
regex = '''['\"]dt0c01\.(?i)[a-z0-9]{24}\.[a-z0-9]{64}['\"]'''
[[rules]]
id = "easypost-api-token"
description = "EasyPost API token"
regex = '''['\"]EZAK(?i)[a-z0-9]{54}['\"]'''
[[rules]]
id = "easypost-test-api-token"
description = "EasyPost test API token"
regex = '''['\"]EZTK(?i)[a-z0-9]{54}['\"]'''
[[rules]]
id = "fastly-api-token"
description = "Fastly API token"
regex = '''(?i)(fastly[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "finicity-client-secret"
description = "Finicity client secret"
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{20})['\"]'''
secretGroup = 3
[[rules]]
id = "finicity-api-token"
description = "Finicity API token"
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "flutterwave-public-key"
description = "Flutterwave public key"
regex = '''FLWPUBK_TEST-(?i)[a-h0-9]{32}-X'''
[[rules]]
id = "flutterwave-secret-key"
description = "Flutterwave secret key"
regex = '''FLWSECK_TEST-(?i)[a-h0-9]{32}-X'''
[[rules]]
id = "flutterwave-enc-key"
description = "Flutterwave encrypted key"
regex = '''FLWSECK_TEST[a-h0-9]{12}'''
[[rules]]
id = "frameio-api-token"
description = "Frame.io API token"
regex = '''fio-u-(?i)[a-z0-9\-_=]{64}'''
[[rules]]
id = "gocardless-api-token"
description = "GoCardless API token"
regex = '''['\"]live_(?i)[a-z0-9\-_=]{40}['\"]'''
[[rules]]
id = "grafana-api-token"
description = "Grafana API token"
regex = '''['\"]eyJrIjoi(?i)[a-z0-9\-_=]{72,92}['\"]'''
[[rules]]
id = "hashicorp-tf-api-token"
description = "HashiCorp Terraform user/org API token"
regex = '''['\"](?i)[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}['\"]'''
[[rules]]
id = "hubspot-api-token"
description = "HubSpot API token"
regex = '''(?i)(hubspot[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
secretGroup = 3
[[rules]]
id = "intercom-api-token"
description = "Intercom API token"
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_]{60})['\"]'''
secretGroup = 3
[[rules]]
id = "intercom-client-secret"
description = "Intercom client secret/ID"
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
secretGroup = 3
[[rules]]
id = "ionic-api-token"
description = "Ionic API token"
regex = '''(?i)(ionic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](ion_[a-z0-9]{42})['\"]'''
[[rules]]
id = "linear-api-token"
description = "Linear API token"
regex = '''lin_api_(?i)[a-z0-9]{40}'''
[[rules]]
id = "linear-client-secret"
description = "Linear client secret/ID"
regex = '''(?i)(linear[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "lob-api-key"
description = "Lob API Key"
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((live|test)_[a-f0-9]{35})['\"]'''
secretGroup = 3
[[rules]]
id = "lob-pub-api-key"
description = "Lob Publishable API Key"
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((test|live)_pub_[a-f0-9]{31})['\"]'''
secretGroup = 3
[[rules]]
id = "mailchimp-api-key"
description = "Mailchimp API key"
regex = '''(?i)(mailchimp[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32}-us20)['\"]'''
secretGroup = 3
[[rules]]
id = "mailgun-private-api-token"
description = "Mailgun private API token"
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](key-[a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "mailgun-pub-key"
description = "Mailgun public validation key"
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](pubkey-[a-f0-9]{32})['\"]'''
secretGroup = 3
[[rules]]
id = "mailgun-signing-key"
description = "Mailgun webhook signing key"
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{32}-[a-h0-9]{8}-[a-h0-9]{8})['\"]'''
secretGroup = 3
[[rules]]
id = "mapbox-api-token"
description = "Mapbox API token"
regex = '''(?i)(pk\.[a-z0-9]{60}\.[a-z0-9]{22})'''
[[rules]]
id = "messagebird-api-token"
description = "MessageBird API token"
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{25})['\"]'''
secretGroup = 3
[[rules]]
id = "messagebird-client-id"
description = "MessageBird API client ID"
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
secretGroup = 3
[[rules]]
id = "new-relic-user-api-key"
description = "New Relic user API Key"
regex = '''['\"](NRAK-[A-Z0-9]{27})['\"]'''
[[rules]]
id = "new-relic-user-api-id"
description = "New Relic user API ID"
regex = '''(?i)(newrelic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([A-Z0-9]{64})['\"]'''
secretGroup = 3
[[rules]]
id = "new-relic-browser-api-token"
description = "New Relic ingest browser API token"
regex = '''['\"](NRJS-[a-f0-9]{19})['\"]'''
[[rules]]
id = "npm-access-token"
description = "npm access token"
regex = '''['\"](npm_(?i)[a-z0-9]{36})['\"]'''
[[rules]]
id = "planetscale-password"
description = "PlanetScale password"
regex = '''pscale_pw_(?i)[a-z0-9\-_\.]{43}'''
[[rules]]
id = "planetscale-api-token"
description = "PlanetScale API token"
regex = '''pscale_tkn_(?i)[a-z0-9\-_\.]{43}'''
[[rules]]
id = "postman-api-token"
description = "Postman API token"
regex = '''PMAK-(?i)[a-f0-9]{24}\-[a-f0-9]{34}'''
[[rules]]
id = "pulumi-api-token"
description = "Pulumi API token"
regex = '''pul-[a-f0-9]{40}'''
[[rules]]
id = "rubygems-api-token"
description = "Rubygem API token"
regex = '''rubygems_[a-f0-9]{48}'''
[[rules]]
id = "sendgrid-api-token"
description = "SendGrid API token"
regex = '''SG\.(?i)[a-z0-9_\-\.]{66}'''
[[rules]]
id = "sendinblue-api-token"
description = "Sendinblue API token"
regex = '''xkeysib-[a-f0-9]{64}\-(?i)[a-z0-9]{16}'''
[[rules]]
id = "shippo-api-token"
description = "Shippo API token"
regex = '''shippo_(live|test)_[a-f0-9]{40}'''
[[rules]]
id = "linkedin-client-secret"
description = "LinkedIn Client secret"
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z]{16})['\"]'''
secretGroup = 3
[[rules]]
id = "linkedin-client-id"
description = "LinkedIn Client ID"
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{14})['\"]'''
secretGroup = 3
[[rules]]
id = "twitch-api-token"
description = "Twitch API token"
regex = '''(?i)(twitch[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
secretGroup = 3
[[rules]]
id = "typeform-api-token"
description = "Typeform API token"
regex = '''(?i)(typeform[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}(tfp_[a-z0-9\-_\.=]{59})'''
secretGroup = 3
[[rules]]
id = "generic-api-key"
description = "Generic API Key"
regex = '''(?i)((key|api[^Version]|token|secret|password)[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9a-zA-Z\-_=]{8,64})['\"]'''
entropy = 3.7
secretGroup = 4
[allowlist]
description = "global allow lists"
regexes = ['''219-09-9999''', '''078-05-1120''', '''(9[0-9]{2}|666)-\d{2}-\d{4}''']
paths = [
'''gitleaks.toml''',
'''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
'''(go.mod|go.sum)$''',
'''salt/nginx/files/enterprise-attack.json'''
]

View File

@@ -13,3 +13,5 @@ jobs:
- name: Gitleaks
uses: zricethezav/gitleaks-action@master
with:
config-path: .github/.gitleaks.toml

31
.github/workflows/pythontest.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: python-test
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ["3.10"]
python-code-path: ["salt/sensoroni/files/analyzers"]
steps:
- uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest pytest-cov
find . -name requirements.txt -exec pip install -r {} \;
- name: Lint with flake8
run: |
flake8 ${{ matrix.python-code-path }} --show-source --max-complexity=12 --doctests --max-line-length=200 --statistics
- name: Test with pytest
run: |
pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=100 --cov-config=${{ matrix.python-code-path }}/pytest.ini

13
.gitignore vendored
View File

@@ -56,4 +56,15 @@ $RECYCLE.BIN/
# Windows shortcuts
*.lnk
# End of https://www.gitignore.io/api/macos,windows
# End of https://www.gitignore.io/api/macos,windows
# Pytest output
__pycache__
.pytest_cache
.coverage
*.pyc
.venv
# Analyzer dev/test config files
*_dev.yaml
site-packages

2
HOTFIX
View File

@@ -1 +1 @@
04012022 04052022

View File

@@ -1,14 +1,20 @@
## Security Onion 2.3.110
## Security Onion 2.3.130
Security Onion 2.3.110 is here!
Security Onion 2.3.130 is here!
## Screenshots
Alerts
![Alerts](./assets/images/screenshots/alerts-1.png)
![Alerts](./assets/images/screenshots/alerts.png)
Dashboards
![Dashboards](./assets/images/screenshots/dashboards.png)
Hunt
![Hunt](./assets/images/screenshots/hunt-1.png)
![Hunt](./assets/images/screenshots/hunt.png)
Cases
![Cases](./assets/images/screenshots/cases-comments.png)
### Release Notes

View File

@@ -1,18 +1,18 @@
### 2.3.110-20220405 ISO image built on 2022/04/05
### 2.3.130-20220607 ISO image built on 2022/06/07
### Download and Verify
2.3.110-20220405 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220405.iso
2.3.130-20220607 ISO image:
https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso
MD5: 9CE982FE45DC2957A3A6D376E6DCC048
SHA1: 10E3FF28A69F9617D4CCD2F5061AA2DC062B8F94
SHA256: 0C178A422ABF7B61C08728E32CE20A9F9C1EC65807EB67D06F1C23F7D1EA51A7
MD5: 0034D6A9461C04357AFF512875408A4C
SHA1: BF80EEB101C583153CAD8E185A7DB3173FD5FFE8
SHA256: 15943623B96D8BB4A204A78668447F36B54A63ABA5F8467FBDF0B25C5E4E6078
Signature for ISO image:
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220405.iso.sig
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig
Signing key:
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS
@@ -26,22 +26,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/ma
Download the signature file for the ISO:
```
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220405.iso.sig
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig
```
Download the ISO image:
```
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220405.iso
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso
```
Verify the downloaded ISO image using the signature file:
```
gpg --verify securityonion-2.3.110-20220405.iso.sig securityonion-2.3.110-20220405.iso
gpg --verify securityonion-2.3.130-20220607.iso.sig securityonion-2.3.130-20220607.iso
```
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
```
gpg: Signature made Tue 05 Apr 2022 06:37:40 PM EDT using RSA key ID FE507013
gpg: Signature made Tue 07 Jun 2022 01:27:20 PM EDT using RSA key ID FE507013
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
gpg: WARNING: This key is not certified with a trusted signature!
gpg: There is no indication that the signature belongs to the owner.

View File

@@ -1 +1 @@
2.3.110
2.3.130

Binary file not shown.

Before

Width:  |  Height:  |  Size: 245 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 186 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 201 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 386 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 168 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 191 KiB

View File

@@ -0,0 +1,2 @@
elasticsearch:
index_settings:

View File

@@ -2,7 +2,7 @@
{% set cached_grains = salt.saltutil.runner('cache.grains', tgt='*') %}
{% for minionid, ip in salt.saltutil.runner(
'mine.get',
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix ',
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix',
fun='network.ip_addrs',
tgt_type='compound') | dictsort()
%}

View File

@@ -13,4 +13,5 @@ logstash:
- so/9600_output_ossec.conf.jinja
- so/9700_output_strelka.conf.jinja
- so/9800_output_logscan.conf.jinja
- so/9801_output_rita.conf.jinja
- so/9900_output_endgame.conf.jinja

View File

@@ -15,12 +15,12 @@ base:
- logstash
- logstash.manager
- logstash.search
- elasticsearch.search
- elasticsearch.index_templates
'*_manager':
- logstash
- logstash.manager
- elasticsearch.manager
- elasticsearch.index_templates
'*_manager or *_managersearch':
- match: compound
@@ -46,7 +46,7 @@ base:
- zeeklogs
- secrets
- healthcheck.eval
- elasticsearch.eval
- elasticsearch.index_templates
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
- elasticsearch.auth
{% endif %}
@@ -60,7 +60,7 @@ base:
- logstash
- logstash.manager
- logstash.search
- elasticsearch.search
- elasticsearch.index_templates
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
- elasticsearch.auth
{% endif %}
@@ -106,7 +106,7 @@ base:
'*_searchnode':
- logstash
- logstash.search
- elasticsearch.search
- elasticsearch.index_templates
- elasticsearch.auth
- global
- minions.{{ grains.id }}
@@ -122,7 +122,7 @@ base:
'*_import':
- zeeklogs
- secrets
- elasticsearch.eval
- elasticsearch.index_templates
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
- elasticsearch.auth
{% endif %}
@@ -131,3 +131,6 @@ base:
{% endif %}
- global
- minions.{{ grains.id }}
'*_workstation':
- minions.{{ grains.id }}

View File

@@ -1,6 +1,5 @@
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
{% set WAZUH = salt['pillar.get']('global:wazuh', '0') %}
{% set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
{% set FREQSERVER = salt['pillar.get']('manager:freq', '0') %}
{% set DOMAINSTATS = salt['pillar.get']('manager:domainstats', '0') %}
@@ -218,6 +217,8 @@
'schedule',
'docker_clean'
],
'so-workstation': [
],
}, grain='role') %}
{% if FILEBEAT and grains.role in ['so-helixsensor', 'so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-import', 'so-receiver'] %}
@@ -273,10 +274,6 @@
{% do allowed_states.append('elastalert') %}
{% endif %}
{% if (THEHIVE != 0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
{% do allowed_states.append('thehive') %}
{% endif %}
{% if (PLAYBOOK !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
{% do allowed_states.append('playbook') %}
{% endif %}

View File

@@ -300,8 +300,17 @@ sostatus_log:
- month: '*'
- dayweek: '*'
{% if role in ['eval', 'manager', 'managersearch', 'standalone'] %}
# Install cron job to determine size of influxdb for telegraf
'du -s -k /nsm/influxdb | cut -f1 > /opt/so/log/telegraf/influxdb_size.log 2>&1':
cron.present:
- user: root
- minute: '*/1'
- hour: '*'
- daymonth: '*'
- month: '*'
- dayweek: '*'
# Lock permissions on the backup directory
backupdir:
file.directory:

View File

@@ -15,295 +15,86 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
if [ "$(id -u)" -ne 0 ]; then
echo "This script must be run using sudo!"
exit 1
fi
doc_workstation_url="https://docs.securityonion.net/en/2.3/analyst-vm.html"
{# we only want the script to install the workstation if it is CentOS -#}
{% if grains.os == 'CentOS' -%}
{# if this is a manager -#}
{% if grains.master == grains.id.split('_')|first -%}
INSTALL_LOG=/root/so-analyst-install.log
exec &> >(tee -a "$INSTALL_LOG")
source /usr/sbin/so-common
pillar_file="/opt/so/saltstack/local/pillar/minions/{{grains.id}}.sls"
log() {
msg=$1
level=${2:-I}
now=$(TZ=GMT date +"%Y-%m-%dT%H:%M:%SZ")
echo -e "$now | $level | $msg" >> "$INSTALL_LOG" 2>&1
}
if [ -f "$pillar_file" ]; then
if ! grep -q "^workstation:$" "$pillar_file"; then
error() {
log "$1" "E"
}
info() {
log "$1" "I"
}
title() {
echo -e "\n-----------------------------\n $1\n-----------------------------\n" >> "$INSTALL_LOG" 2>&1
}
logCmd() {
cmd=$1
info "Executing command: $cmd"
$cmd >> "$INSTALL_LOG" 2>&1
}
analyze_system() {
title "System Characteristics"
logCmd "uptime"
logCmd "uname -a"
logCmd "free -h"
logCmd "lscpu"
logCmd "df -h"
logCmd "ip a"
}
analyze_system
OS=$(grep PRETTY_NAME /etc/os-release | grep 'CentOS Linux 7')
if [ $? -ne 0 ]; then
echo "This is an unsupported OS. Please use CentOS 7 to install the analyst node."
exit 1
fi
if [[ "$manufacturer" == "Security Onion Solutions" && "$family" == "Automated" ]]; then
INSTALL=yes
CURLCONTINUE=no
else
INSTALL=''
CURLCONTINUE=''
fi
FIRSTPASS=yes
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
if [[ "$FIRSTPASS" == "yes" ]]; then
clear
echo "###########################################"
echo "## ** W A R N I N G ** ##"
echo "## _______________________________ ##"
echo "## ##"
echo "## Installing the Security Onion ##"
echo "## analyst node on this device will ##"
echo "## make permanent changes to ##"
echo "## the system. ##"
echo "## ##"
echo "###########################################"
echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)"
FIRSTPASS=no
else
echo "Please type 'yes' to continue or 'no' to exit."
fi
read INSTALL
done
if [[ $INSTALL == "no" ]]; then
echo "Exiting analyst node installation."
exit 0
fi
echo "Testing for internet connection with curl https://securityonionsolutions.com/"
CANCURL=$(curl -sI https://securityonionsolutions.com/ | grep "200 OK")
if [ $? -ne 0 ]; then
FIRSTPASS=yes
while [[ $CURLCONTINUE != "yes" ]] && [[ $CURLCONTINUE != "no" ]]; do
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
if [[ "$FIRSTPASS" == "yes" ]]; then
echo "We could not access https://securityonionsolutions.com/."
echo "Since packages are downloaded from the internet, internet access is required."
echo "If you would like to ignore this warning and continue anyway, please type 'yes'."
echo "Otherwise, type 'no' to exit."
echo "###########################################"
echo "## ** W A R N I N G ** ##"
echo "## _______________________________ ##"
echo "## ##"
echo "## Installing the Security Onion ##"
echo "## analyst node on this device will ##"
echo "## make permanent changes to ##"
echo "## the system. ##"
echo "## A system reboot will be required ##"
echo "## to complete the install. ##"
echo "## ##"
echo "###########################################"
echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)"
FIRSTPASS=no
else
echo "Please type 'yes' to continue or 'no' to exit."
fi
read CURLCONTINUE
fi
read INSTALL
done
if [[ "$CURLCONTINUE" == "no" ]]; then
if [[ $INSTALL == "no" ]]; then
echo "Exiting analyst node installation."
exit 0
fi
else
echo "We were able to curl https://securityonionsolutions.com/."
sleep 3
# Add workstation pillar to the minion's pillar file
printf '%s\n'\
"workstation:"\
" gui:"\
" enabled: true"\
"" >> "$pillar_file"
echo "Applying the workstation state. This could take some time since there are many packages that need to be installed."
if salt-call state.apply workstation -linfo queue=True; then # make sure the state ran successfully
echo ""
echo "Analyst workstation has been installed!"
echo "Press ENTER to reboot or Ctrl-C to cancel."
read pause
reboot;
else
echo "There was an issue applying the workstation state. Please review the log above or at /opt/so/logs/salt/minion."
fi
else # workstation is already added
echo "The workstation pillar already exists in $pillar_file."
echo "To enable/disable the gui, set 'workstation:gui:enabled' to true or false in $pillar_file."
echo "Additional documentation can be found at $doc_workstation_url."
fi
# Install a GUI text editor
yum -y install gedit
# Install misc utils
yum -y install wget curl unzip epel-release yum-plugin-versionlock;
# Install xWindows
yum -y groupinstall "X Window System";
yum -y install gnome-classic-session gnome-terminal nautilus-open-terminal control-center liberation-mono-fonts;
unlink /etc/systemd/system/default.target;
ln -sf /lib/systemd/system/graphical.target /etc/systemd/system/default.target;
yum -y install file-roller
# Install Mono - prereq for NetworkMiner
yum -y install mono-core mono-basic mono-winforms expect
# Install NetworkMiner
yum -y install libcanberra-gtk2;
wget https://www.netresec.com/?download=NetworkMiner -O /tmp/nm.zip;
mkdir -p /opt/networkminer/
unzip /tmp/nm.zip -d /opt/networkminer/;
rm /tmp/nm.zip;
mv /opt/networkminer/NetworkMiner_*/* /opt/networkminer/
chmod +x /opt/networkminer/NetworkMiner.exe;
chmod -R go+w /opt/networkminer/AssembledFiles/;
chmod -R go+w /opt/networkminer/Captures/;
# Create networkminer shim
cat << EOF >> /bin/networkminer
#!/bin/bash
/bin/mono /opt/networkminer/NetworkMiner.exe --noupdatecheck "\$@"
EOF
chmod +x /bin/networkminer
# Convert networkminer ico file to png format
yum -y install ImageMagick
convert /opt/networkminer/networkminericon.ico /opt/networkminer/networkminericon.png
# Create menu entry
cat << EOF >> /usr/share/applications/networkminer.desktop
[Desktop Entry]
Name=NetworkMiner
Comment=NetworkMiner
Encoding=UTF-8
Exec=/bin/networkminer %f
Icon=/opt/networkminer/networkminericon-4.png
StartupNotify=true
Terminal=false
X-MultipleArgs=false
Type=Application
MimeType=application/x-pcap;
Categories=Network;
EOF
# Set default monospace font to Liberation
cat << EOF >> /etc/fonts/local.conf
<match target="pattern">
<test name="family" qual="any">
<string>monospace</string>
</test>
<edit binding="strong" mode="prepend" name="family">
<string>Liberation Mono</string>
</edit>
</match>
EOF
# Install Wireshark for Gnome
yum -y install wireshark-gnome;
# Install dnsiff
yum -y install dsniff;
# Install hping3
yum -y install hping3;
# Install netsed
yum -y install netsed;
# Install ngrep
yum -y install ngrep;
# Install scapy
yum -y install python36-scapy;
# Install ssldump
yum -y install ssldump;
# Install tcpdump
yum -y install tcpdump;
# Install tcpflow
yum -y install tcpflow;
# Install tcpxtract
yum -y install tcpxtract;
# Install whois
yum -y install whois;
# Install foremost
yum -y install https://forensics.cert.org/centos/cert/7/x86_64//foremost-1.5.7-13.1.el7.x86_64.rpm;
# Install chromium
yum -y install chromium;
# Install tcpstat
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcpstat-1.5.0/securityonion-tcpstat-1.5.0.rpm;
# Install tcptrace
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcptrace-6.6.7/securityonion-tcptrace-6.6.7.rpm;
# Install sslsplit
yum -y install libevent;
yum -y install sslsplit;
# Install Bit-Twist
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-bittwist-2.0.0/securityonion-bittwist-2.0.0.rpm;
# Install chaosreader
yum -y install perl-IO-Compress perl-Net-DNS;
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-chaosreader-0.95.10/securityonion-chaosreader-0.95.10.rpm;
chmod +x /bin/chaosreader;
if [ -f ../../files/analyst/README ]; then
cp ../../files/analyst/README /;
cp ../../files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
cp ../../files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
cp ../../files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
else
cp /opt/so/saltstack/default/salt/common/files/analyst/README /;
cp /opt/so/saltstack/default/salt/common/files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
cp /opt/so/saltstack/default/salt/common/files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
cp /opt/so/saltstack/default/salt/common/files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
else # if the pillar file doesn't exist
echo "Could not find $pillar_file and add the workstation pillar."
fi
# Set background wallpaper
cat << EOF >> /etc/dconf/db/local.d/00-background
# Specify the dconf path
[org/gnome/desktop/background]
{#- if this is not a manager #}
{% else -%}
# Specify the path to the desktop background image file
picture-uri='file:///usr/share/backgrounds/so-wallpaper.jpg'
# Specify one of the rendering options for the background image:
# 'none', 'wallpaper', 'centered', 'scaled', 'stretched', 'zoom', 'spanned'
picture-options='zoom'
# Specify the left or top color when drawing gradients or the solid color
primary-color='000000'
# Specify the right or bottom color when drawing gradients
secondary-color='FFFFFF'
EOF
echo "Since this is not a manager, the pillar values to enable analyst workstation must be set manually. Please view the documentation at $doc_workstation_url."
# Set lock screen
cat << EOF >> /etc/dconf/db/local.d/00-screensaver
[org/gnome/desktop/session]
idle-delay=uint32 180
{#- endif if this is a manager #}
{% endif -%}
[org/gnome/desktop/screensaver]
lock-enabled=true
lock-delay=uint32 120
picture-options='zoom'
picture-uri='file:///usr/share/backgrounds/so-lockscreen.jpg'
EOF
{#- if not CentOS #}
{%- else %}
cat << EOF >> /etc/dconf/db/local.d/locks/screensaver
/org/gnome/desktop/session/idle-delay
/org/gnome/desktop/screensaver/lock-enabled
/org/gnome/desktop/screensaver/lock-delay
EOF
echo "The Analyst Workstation can only be installed on CentOS. Please view the documentation at $doc_workstation_url."
# Do not show the user list at login screen
cat << EOF >> /etc/dconf/db/local.d/00-login-screen
[org/gnome/login-screen]
logo='/usr/share/pixmaps/so-login-logo-dark.svg'
disable-user-list=true
EOF
{#- endif grains.os == CentOS #}
{% endif -%}
dconf update;
echo
echo "Analyst workstation has been installed!"
echo "Press ENTER to reboot or Ctrl-C to cancel."
read pause
reboot;
exit 0

View File

@@ -120,6 +120,30 @@ check_elastic_license() {
fi
}
check_salt_master_status() {
local timeout=$1
echo "Checking if we can talk to the salt master"
salt-call state.show_top concurrent=true
return
}
check_salt_minion_status() {
local timeout=$1
echo "Checking if the salt minion will respond to jobs" >> "$setup_log" 2>&1
salt "$MINION_ID" test.ping -t $timeout > /dev/null 2>&1
local status=$?
if [ $status -gt 0 ]; then
echo " Minion did not respond" >> "$setup_log" 2>&1
else
echo " Received job response from salt minion" >> "$setup_log" 2>&1
fi
return $status
}
copy_new_files() {
# Copy new files over to the salt dir
cd $UPDATE_DIR
@@ -367,6 +391,7 @@ run_check_net_err() {
exit $exit_code
fi
}
set_cron_service_name() {
if [[ "$OS" == "centos" ]]; then
cron_service_name="crond"

View File

@@ -17,5 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-stop cortex $1
/usr/sbin/so-start thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-start thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-stop cortex $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,38 +17,4 @@
. /usr/sbin/so-common
usage() {
echo "Usage: $0 <new-user-name>"
echo ""
echo "Adds a new user to Cortex. The new password will be read from STDIN."
exit 1
}
if [ $# -ne 1 ]; then
usage
fi
USER=$1
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
CORTEX_ORG_NAME=$(lookup_pillar cortexorgname)
CORTEX_USER=$USER
# Read password for new user from stdin
test -t 0
if [[ $? == 0 ]]; then
echo "Enter new password:"
fi
read -rs CORTEX_PASS
# Create new user in Cortex
resp=$(curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user" -d "{\"name\": \"$CORTEX_USER\",\"roles\": [\"read\",\"analyze\",\"orgadmin\"],\"organization\": \"$CORTEX_ORG_NAME\",\"login\": \"$CORTEX_USER\",\"password\" : \"$CORTEX_PASS\" }")
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
echo "Successfully added user to Cortex."
else
echo "Unable to add user to Cortex; user might already exist."
echo $resp
exit 2
fi
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,41 +17,4 @@
. /usr/sbin/so-common
usage() {
echo "Usage: $0 <user-name> <true|false>"
echo ""
echo "Enables or disables a user in Cortex."
exit 1
}
if [ $# -ne 2 ]; then
usage
fi
USER=$1
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
CORTEX_USER=$USER
case "${2^^}" in
FALSE | NO | 0)
CORTEX_STATUS=Locked
;;
TRUE | YES | 1)
CORTEX_STATUS=Ok
;;
*)
usage
;;
esac
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user/${CORTEX_USER}" -d "{\"status\":\"${CORTEX_STATUS}\" }")
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
echo "Successfully updated user in Cortex."
else
echo "Failed to update user in Cortex."
echo $resp
exit 2
fi
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,9 +17,7 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('manager:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
ESPORT=9200
THEHIVEESPORT=9400
echo "Removing read only attributes for indices..."
echo
{{ ELASTICCURL }} -s -k -XPUT -H "Content-Type: application/json" -L https://$IP:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
{{ ELASTICCURL }} -XPUT -H "Content-Type: application/json" -L http://$IP:9400/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;

View File

@@ -75,9 +75,6 @@ container_list() {
"so-strelka-manager"
"so-suricata"
"so-telegraf"
"so-thehive"
"so-thehive-cortex"
"so-thehive-es"
"so-wazuh"
"so-zeek"
)

View File

@@ -18,7 +18,7 @@
. /usr/sbin/so-common
# Check to see if we are already running
IS_RUNNING=$(ps aux | pgrep -f "so-playbook-sync" | wc -l)
[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - Multiple Playbook Sync processes already running...exiting." && exit 0
NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-playbook-sync")
[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING Playbook sync processes running...exiting." && exit 0
docker exec so-soctopus python3 playbook_play-sync.py

View File

@@ -15,7 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Usage: so-restart filebeat | kibana | playbook | thehive
# Usage: so-restart filebeat | kibana | playbook
. /usr/sbin/so-common
@@ -31,7 +31,6 @@ if [ $# -ge 1 ]; then
fi
case $1 in
"cortex") docker stop so-thehive-cortex so-thehive && docker rm so-thehive-cortex so-thehive && salt-call state.apply hive queue=True;;
"steno") docker stop so-steno && docker rm so-steno && salt-call state.apply pcap queue=True;;
*) docker stop so-$1 ; docker rm so-$1 ; salt-call state.apply $1 queue=True;;
esac

View File

@@ -32,11 +32,17 @@ copy_new_files() {
# Copy new files over to the salt dir
cd /tmp/sogh/securityonion
git checkout $BRANCH
VERSION=$(cat VERSION)
# We need to overwrite if there is a repo file
if [ -d /opt/so/repo ]; then
tar -czf /opt/so/repo/"$VERSION".tar.gz -C "$(pwd)/.." .
fi
rsync -a salt $default_salt_dir/
rsync -a pillar $default_salt_dir/
chown -R socore:socore $default_salt_dir/salt
chown -R socore:socore $default_salt_dir/pillar
chmod 755 $default_salt_dir/pillar/firewall/addfirewall.sh
rm -rf /tmp/sogh
}

View File

@@ -115,8 +115,8 @@ clean() {
}
# Check to see if we are already running
IS_RUNNING=$(ps aux | pgrep -f "so-sensor-clean" | wc -l)
[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0
NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-sensor-clean")
[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0
if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then
while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do

View File

@@ -15,7 +15,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Usage: so-start all | filebeat | kibana | playbook | thehive
# Usage: so-start all | filebeat | kibana | playbook
. /usr/sbin/so-common

View File

@@ -17,5 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-stop thehive-es $1
/usr/sbin/so-start thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-start thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-stop thehive-es $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-restart thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-start thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,4 +17,4 @@
. /usr/sbin/so-common
/usr/sbin/so-stop thehive $1
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,38 +17,4 @@
. /usr/sbin/so-common
usage() {
echo "Usage: $0 <new-user-name>"
echo ""
echo "Adds a new user to TheHive. The new password will be read from STDIN."
exit 1
}
if [ $# -ne 1 ]; then
usage
fi
USER=$1
THEHIVE_KEY=$(lookup_pillar hivekey)
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
THEHIVE_USER=$USER
# Read password for new user from stdin
test -t 0
if [[ $? == 0 ]]; then
echo "Enter new password:"
fi
read -rs THEHIVE_PASS
check_password_and_exit "$THEHIVE_PASS"
# Create new user in TheHive
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user" -d "{\"login\" : \"$THEHIVE_USER\",\"name\" : \"$THEHIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$THEHIVE_PASS\"}")
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
echo "Successfully added user to TheHive"
else
echo "Unable to add user to TheHive; user might already exist"
echo $resp
exit 2
fi
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,41 +17,4 @@
. /usr/sbin/so-common
usage() {
echo "Usage: $0 <user-name> <true|false>"
echo ""
echo "Enables or disables a user in TheHive."
exit 1
}
if [ $# -ne 2 ]; then
usage
fi
USER=$1
THEHIVE_KEY=$(lookup_pillar hivekey)
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
THEHIVE_USER=$USER
case "${2^^}" in
FALSE | NO | 0)
THEHIVE_STATUS=Locked
;;
TRUE | YES | 1)
THEHIVE_STATUS=Ok
;;
*)
usage
;;
esac
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}" -d "{\"status\":\"${THEHIVE_STATUS}\" }")
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
echo "Successfully updated user in TheHive"
else
echo "Failed to update user in TheHive"
echo "$resp"
exit 2
fi
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -17,41 +17,4 @@
. /usr/sbin/so-common
usage() {
echo "Usage: $0 <user-name>"
echo ""
echo "Update password for an existing TheHive user. The new password will be read from STDIN."
exit 1
}
if [ $# -ne 1 ]; then
usage
fi
USER=$1
THEHIVE_KEY=$(lookup_pillar hivekey)
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
THEHIVE_USER=$USER
# Read password for new user from stdin
test -t 0
if [[ $? == 0 ]]; then
echo "Enter new password:"
fi
read -rs THEHIVE_PASS
if ! check_password "$THEHIVE_PASS"; then
echo "Password is invalid. Please exclude single quotes, double quotes, dollar signs, and backslashes from the password."
exit 2
fi
# Change password for user in TheHive
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}/password/set" -d "{\"password\" : \"$THEHIVE_PASS\"}")
if [[ -z "$resp" ]]; then
echo "Successfully updated TheHive user password"
else
echo "Unable to update TheHive user password"
echo $resp
exit 2
fi
echo "TheHive and its components are no longer part of Security Onion"

View File

@@ -44,7 +44,7 @@ operation=$1
email=$2
role=$3
kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434}
kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434/admin}
databasePath=${KRATOS_DB_PATH:-/opt/so/conf/kratos/db/db.sqlite}
databaseTimeout=${KRATOS_DB_TIMEOUT:-5000}
bcryptRounds=${BCRYPT_ROUNDS:-12}
@@ -408,7 +408,7 @@ function migrateLockedUsers() {
# This is a migration function to convert locked users from prior to 2.3.90
# to inactive users using the newer Kratos functionality. This should only
# find locked users once.
lockedEmails=$(curl -s http://localhost:4434/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email')
lockedEmails=$(curl -s ${kratosUrl}/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email')
if [[ -n "$lockedEmails" ]]; then
echo "Disabling locked users..."
for email in $lockedEmails; do
@@ -476,7 +476,6 @@ case "${operation}" in
createUser "$email" "${role:-$DEFAULT_ROLE}"
syncAll
echo "Successfully added new user to SOC"
check_container thehive && echo "$password" | so-thehive-user-add "$email"
check_container fleet && echo "$password" | so-fleet-user-add "$email"
;;
@@ -528,7 +527,6 @@ case "${operation}" in
updateStatus "$email" 'active'
syncAll
echo "Successfully enabled user"
check_container thehive && so-thehive-user-enable "$email" true
echo "Fleet user will need to be recreated manually with so-fleet-user-add"
;;
@@ -540,7 +538,6 @@ case "${operation}" in
updateStatus "$email" 'locked'
syncAll
echo "Successfully disabled user"
check_container thehive && so-thehive-user-enable "$email" false
check_container fleet && so-fleet-user-delete "$email"
;;
@@ -552,7 +549,6 @@ case "${operation}" in
deleteUser "$email"
syncAll
echo "Successfully deleted user"
check_container thehive && so-thehive-user-enable "$email" false
check_container fleet && so-fleet-user-delete "$email"
;;

View File

@@ -34,7 +34,15 @@ check_err() {
local err_msg="Unhandled error occured, please check $SOUP_LOG for details."
[[ $ERR_HANDLED == true ]] && exit $exit_code
if [[ $exit_code -ne 0 ]]; then
set +e
systemctl_func "start" "$cron_service_name"
systemctl_func "start" "salt-master"
systemctl_func "start" "salt-minion"
enable_highstate
printf '%s' "Soup failed with error $exit_code: "
case $exit_code in
2)
@@ -91,9 +99,7 @@ check_err() {
if [[ $exit_code -ge 64 && $exit_code -le 113 ]]; then
echo "$err_msg"
fi
set +e
systemctl_func "start" "$cron_service_name"
enable_highstate
exit $exit_code
fi
@@ -416,6 +422,8 @@ preupgrade_changes() {
[[ "$INSTALLEDVERSION" == 2.3.80 ]] && up_to_2.3.90
[[ "$INSTALLEDVERSION" == 2.3.90 || "$INSTALLEDVERSION" == 2.3.91 ]] && up_to_2.3.100
[[ "$INSTALLEDVERSION" == 2.3.100 ]] && up_to_2.3.110
[[ "$INSTALLEDVERISON" == 2.3.110 ]] && up_to_2.3.120
[[ "$INSTALLEDVERISON" == 2.3.120 ]] && up_to_2.3.130
true
}
@@ -429,6 +437,10 @@ postupgrade_changes() {
[[ "$POSTVERSION" == 2.3.60 || "$POSTVERSION" == 2.3.61 || "$POSTVERSION" == 2.3.70 || "$POSTVERSION" == 2.3.80 ]] && post_to_2.3.90
[[ "$POSTVERSION" == 2.3.90 || "$POSTVERSION" == 2.3.91 ]] && post_to_2.3.100
[[ "$POSTVERSION" == 2.3.100 ]] && post_to_2.3.110
[[ "$POSTVERSION" == 2.3.110 ]] && post_to_2.3.120
[[ "$POSTVERSION" == 2.3.120 ]] && post_to_2.3.130
true
}
@@ -492,6 +504,19 @@ post_to_2.3.110() {
POSTVERSION=2.3.110
}
post_to_2.3.120() {
echo "Post Processing for 2.3.120"
POSTVERSION=2.3.120
sed -i '/so-thehive-es/d;/so-thehive/d;/so-cortex/d' /opt/so/conf/so-status/so-status.conf
}
post_to_2.3.130() {
echo "Post Processing for 2.3.130"
POSTVERSION=2.3.130
}
stop_salt_master() {
# kill all salt jobs across the grid because the hang indefinitely if they are queued and salt-master restarts
set +e
@@ -728,9 +753,6 @@ up_to_2.3.90() {
up_to_2.3.100() {
fix_wazuh
echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again."
rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched
echo "Adding receiver hostgroup with so-firewall"
if so-firewall addhostgroup receiver 2>&1 | grep -q 'Already exists'; then
echo 'receiver hostgroup already exists'
@@ -743,11 +765,21 @@ up_to_2.3.100() {
}
up_to_2.3.110() {
echo "Updating to Security Onion 2.3.110"
echo "Updating shard settings for Elasticsearch index templates"
sed -i 's|shards|index_template:\n template:\n settings:\n index:\n number_of_shards|g' /opt/so/saltstack/local/pillar/global.sls
}
up_to_2.3.120() {
# Stop thehive services since these will be broken in .120
so-thehive-stop
so-thehive-es-stop
so-cortex-stop
}
up_to_2.3.130() {
# Remove file for nav update
rm -f /opt/so/conf/navigator/layers/nav_layer_playbook.json
}
verify_upgradespace() {
CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//')
if [ "$CURRENTSPACE" -lt "10" ]; then
@@ -770,29 +802,6 @@ upgrade_space() {
fi
}
thehive_maint() {
echo -n "Waiting for TheHive..."
COUNT=0
THEHIVE_CONNECTED="no"
while [[ "$COUNT" -le 240 ]]; do
curl --output /dev/null --silent --head --fail -k "https://localhost/thehive/api/alert"
if [ $? -eq 0 ]; then
THEHIVE_CONNECTED="yes"
echo "connected!"
break
else
((COUNT+=1))
sleep 1
echo -n "."
fi
done
if [ "$THEHIVE_CONNECTED" == "yes" ]; then
echo "Migrating thehive databases if needed."
curl -v -k -XPOST -L "https://localhost/thehive/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
curl -v -k -XPOST -L "https://localhost/cortex/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
fi
}
unmount_update() {
cd /tmp
umount /tmp/soagupdate
@@ -908,6 +917,8 @@ upgrade_salt() {
else
echo "Salt upgrade success."
echo ""
echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again."
rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched
fi
}
@@ -946,6 +957,7 @@ update_repo() {
exit 1
fi
rm -f /etc/apt/sources.list.d/salt.list
echo "deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/$ubuntu_version/amd64/salt $OSVER main" > /etc/apt/sources.list.d/saltstack.list
apt-get update
fi
@@ -1036,6 +1048,17 @@ main() {
echo "### Preparing soup at $(date) ###"
echo ""
set_os
set_cron_service_name
if ! check_salt_master_status; then
echo "Could not talk to salt master"
echo "Please run 'systemctl status salt-master' to ensure the salt-master service is running and check the log at /opt/so/log/salt/master."
echo "SOUP will now attempt to start the salt-master service and exit."
exit 1
fi
echo "This node can communicate with the salt-master."
echo "Checking to see if this is a manager."
echo ""
require_manager
@@ -1071,8 +1094,6 @@ main() {
echo "Verifying we have the latest soup script."
verify_latest_update_script
echo ""
set_os
set_cron_service_name
set_palette
check_elastic_license
echo ""
@@ -1094,6 +1115,12 @@ main() {
upgrade_check_salt
set -e
if [[ $is_airgap -eq 0 ]]; then
update_centos_repo
yum clean all
check_os_updates
fi
if [ "$is_hotfix" == "true" ]; then
echo "Applying $HOTFIXVERSION hotfix"
copy_new_files
@@ -1115,9 +1142,6 @@ main() {
echo "Updating dockers to $NEWVERSION."
if [[ $is_airgap -eq 0 ]]; then
airgap_update_dockers
update_centos_repo
yum clean all
check_os_updates
# if not airgap but -f was used
elif [[ ! -z "$ISOLOC" ]]; then
airgap_update_dockers
@@ -1218,7 +1242,6 @@ main() {
salt-call state.highstate -l info queue=True
postupgrade_changes
[[ $is_airgap -eq 0 ]] && unmount_update
thehive_maint
echo ""
echo "Upgrade to $NEWVERSION complete."

View File

@@ -4067,7 +4067,7 @@ elasticsearch:
field: "@timestamp"
order: desc
refresh_interval: 30s
number_of_shards: 1
number_of_shards: 2
number_of_replicas: 0
composed_of:
- agent-mappings

View File

@@ -0,0 +1,127 @@
{
"description": "RITA Beacons",
"processors": [
{
"set": {
"field": "_index",
"value": "so-rita",
"override": true
}
},
{
"csv": {
"field": "message",
"target_fields": [
"beacon.score",
"source.ip",
"destination.ip",
"network.connections",
"network.average_bytes",
"beacon.interval.range",
"beacon.size.range",
"beacon.interval.top",
"beacon.size.top",
"beacon.interval.top_count",
"beacon.size.top_count",
"beacon.interval.skew",
"beacon.size.skew",
"beacon.interval.dispersion",
"beacon.size.dispersion",
"network.bytes"
]
}
},
{
"convert": {
"field": "beacon.score",
"type": "float"
}
},
{
"convert": {
"field": "network.connections",
"type": "integer"
}
},
{
"convert": {
"field": "network.average_bytes",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.interval.range",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.size.range",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.interval.top",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.size.top",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.interval.top_count",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.size.top_count",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.interval.skew",
"type": "float"
}
},
{
"convert": {
"field": "beacon.size.skew",
"type": "float"
}
},
{
"convert": {
"field": "beacon.interval.dispersion",
"type": "integer"
}
},
{
"convert": {
"field": "beacon.size.dispersion",
"type": "integer"
}
},
{
"convert": {
"field": "network.bytes",
"type": "integer"
}
},
{ "set": { "if": "ctx.beacon?.score == 1", "field": "dataset", "value": "alert", "override": true }},
{ "set": { "if": "ctx.beacon?.score == 1", "field": "rule.name", "value": "Potential C2 Beacon Activity", "override": true }},
{ "set": { "if": "ctx.beacon?.score == 1", "field": "event.severity", "value": 3, "override": true }},
{
"pipeline": {
"name": "common"
}
}
]
}

View File

@@ -0,0 +1,36 @@
{
"description": "RITA Connections",
"processors": [
{
"set": {
"field": "_index",
"value": "so-rita",
"override": true
}
},
{
"dissect": {
"field": "message",
"pattern": "%{source.ip},%{destination.ip},%{network.port}:%{network.protocol}:%{network.service},%{connection.duration},%{connection.state}"
}
},
{
"convert": {
"field": "connection.duration",
"type": "float"
}
},
{
"set": {
"field": "event.duration",
"value": "{{ connection.duration }}",
"override": true
}
},
{
"pipeline": {
"name": "common"
}
}
]
}

View File

@@ -0,0 +1,39 @@
{
"description": "RITA DNS",
"processors": [
{
"set": {
"field": "_index",
"value": "so-rita",
"override": true
}
},
{
"csv": {
"field": "message",
"target_fields": [
"dns.question.name",
"dns.question.subdomain_count",
"dns.question.count"
]
}
},
{
"convert": {
"field": "dns.question.subdomain_count",
"type": "integer"
}
},
{
"convert": {
"field": "dns.question.count",
"type": "integer"
}
},
{
"pipeline": {
"name": "common"
}
}
]
}

View File

@@ -1,36 +1,157 @@
{
"description" : "syslog",
"description" : "syslog pipeline",
"processors" : [
{
"dissect": {
"field": "message",
"pattern" : "%{message}",
"on_failure": [ { "drop" : { } } ]
},
"remove": {
"field": [ "type", "agent" ],
"ignore_failure": true
}
"dissect": {
"field": "message",
"pattern" : "%{message}",
"on_failure": [ { "drop" : { } } ]
},
"remove": {
"field": [ "type", "agent" ],
"ignore_failure": true
}
}, {
"grok": {
"field": "message",
"patterns": [
"^<%{INT:syslog.priority:int}>%{TIMESTAMP_ISO8601:syslog.timestamp} +%{IPORHOST:syslog.host} +%{PROG:syslog.program}(?:\\[%{POSINT:syslog.pid:int}\\])?: %{GREEDYDATA:real_message}$",
"^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$",
"^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$"
],
"ignore_failure": true
}
},
{
"grok":
{
"field": "message",
"patterns": [
"^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$",
"^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$"
],
"ignore_failure": true
}
"convert" : {
"if": "ctx?.syslog?.priority != null",
"field" : "syslog.priority",
"type": "integer"
}
},
{ "set": { "if": "ctx.source?.application == 'filterlog'", "field": "dataset", "value": "firewall", "ignore_failure": true } },
{ "set": { "if": "ctx.vendor != null", "field": "module", "value": "{{ vendor }}", "ignore_failure": true } },
{ "set": { "if": "ctx.product != null", "field": "dataset", "value": "{{ product }}", "ignore_failure": true } },
{ "set": { "field": "event.ingested", "value": "{{ @timestamp }}" } },
{ "date": { "if": "ctx.syslog?.timestamp != null", "field": "syslog.timestamp", "target_field": "@timestamp", "formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"], "ignore_failure": true } },
{ "remove": { "field": ["pid", "program"], "ignore_missing": true, "ignore_failure": true } },
{ "pipeline": { "if": "ctx.vendor != null && ctx.product != null", "name": "{{ vendor }}.{{ product }}", "ignore_failure": true } },
{ "pipeline": { "if": "ctx.dataset == 'firewall'", "name": "filterlog", "ignore_failure": true } },
{ "pipeline": { "name": "common" } }
{
"script": {
"description": "Map syslog priority into facility and level",
"lang": "painless",
"params" : {
"level": [
"emerg",
"alert",
"crit",
"err",
"warn",
"notice",
"info",
"debug"
],
"facility" : [
"kern",
"user",
"mail",
"daemon",
"auth",
"syslog",
"lpr",
"news",
"uucp",
"cron",
"authpriv",
"ftp",
"ntp",
"security",
"console",
"solaris-cron",
"local0",
"local1",
"local2",
"local3",
"local4",
"local5",
"local6",
"local7"
]
},
"source": "if (ctx['syslog'] != null && ctx['syslog']['priority'] != null) { int p = ctx['syslog']['priority']; int f = p / 8; int l = p - (f * 8); ctx['syslog']['facility_label'] = [ : ]; ctx['syslog']['severity_label'] = [ : ]; ctx['syslog'].put('severity', l); ctx['syslog'].put('severity_label', params.level[l].toUpperCase()); ctx['syslog'].put('facility', f); ctx['syslog'].put('facility_label', params.facility[f].toUpperCase()); }"
}
},
{
"set": {
"if": "ctx.syslog?.host != null",
"field": "host.name",
"value": "{{ syslog.host }}",
"ignore_failure": true
}
}, {
"set": {
"if": "ctx.syslog?.program != null",
"field": "process.name",
"value": "{{ syslog.program }}",
"ignore_failure": true
}
}, {
"set": {
"if": "ctx.syslog?.pid != null",
"field": "process.id",
"value": "{{ syslog.pid }}",
"ignore_failure": true
}
}, {
"set": {
"if": "ctx.source?.application == 'filterlog'",
"field": "dataset",
"value": "firewall",
"ignore_failure": true
}
}, {
"set": {
"if": "ctx.vendor != null",
"field": "module",
"value": "{{ vendor }}",
"ignore_failure": true
}
}, {
"set": {
"if": "ctx.product != null",
"field": "dataset",
"value": "{{ product }}",
"ignore_failure": true
}
}, {
"set": {
"field": "ingest.timestamp",
"value": "{{ @timestamp }}"
}
}, {
"date": {
"if": "ctx.syslog?.timestamp != null",
"field": "syslog.timestamp",
"target_field": "@timestamp",
"formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"],
"ignore_failure": true
}
}, {
"remove": {
"field": ["pid", "program"],
"ignore_missing": true,
"ignore_failure": true
}
}, {
"pipeline": {
"if": "ctx.vendor != null && ctx.product != null",
"name": "{{ vendor }}.{{ product }}",
"ignore_failure": true
}
}, {
"pipeline": {
"if": "ctx.dataset == 'firewall'",
"name": "filterlog",
"ignore_failure": true
}
}, {
"pipeline": { "name": "common" }
}
]
}

View File

@@ -11,10 +11,17 @@ appender.rolling.name = rolling
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
appender.rolling.layout.type = PatternLayout
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log.gz
appender.rolling.policies.type = Policies
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling.policies.time.interval = 1
appender.rolling.policies.time.modulate = true
appender.rolling.strategy.type = DefaultRolloverStrategy
appender.rolling.strategy.action.type = Delete
appender.rolling.strategy.action.basepath = /var/log/elasticsearch
appender.rolling.strategy.action.condition.type = IfFileName
appender.rolling.strategy.action.condition.glob = *.gz
appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified
appender.rolling.strategy.action.condition.nested_condition.age = 7D
rootLogger.level = info
rootLogger.appenderRef.rolling.ref = rolling

View File

@@ -207,6 +207,7 @@ escomponenttemplates:
# Auto-generate templates from defaults file
{% for index, settings in ES_INDEX_SETTINGS.items() %}
{% if settings.index_template is defined %}
es_index_template_{{index}}:
file.managed:
- name: /opt/so/conf/elasticsearch/templates/index/{{ index }}-template.json
@@ -216,6 +217,7 @@ es_index_template_{{index}}:
- template: jinja
- onchanges_in:
- cmd: so-elasticsearch-templates
{% endif %}
{% endfor %}
{% if TEMPLATES %}

View File

@@ -1,7 +1,9 @@
{% import_yaml 'elasticsearch/defaults.yaml' as ESCONFIG with context %}
{%- set ES_INDEX_SETTINGS = salt['pillar.get']('elasticsearch:index_settings', default=ESCONFIG.elasticsearch.index_settings, merge=True) %}
{% for index, settings in ES_INDEX_SETTINGS.items() %}
{% if settings.index_sorting, False %}
{% do settings.index_template.template.settings.index.pop('sort') %}
{% if settings.index_template is defined %}
{% if not settings.get('index_sorting', False) | to_bool and settings.index_template.template.settings.index.sort is defined %}
{% do settings.index_template.template.settings.index.pop('sort') %}
{% endif %}
{% endif %}
{% endfor %}

View File

@@ -60,6 +60,32 @@
},
"type": "wildcard"
},
"entity_id": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"executable": {
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
},
"ignore_above": 1024,
"type": "keyword"
},
"name": {
"fields": {
"keyword": {
@@ -73,6 +99,133 @@
"ignore_above": 1024,
"type": "keyword"
},
"parent": {
"properties": {
"command_line": {
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"text": {
"type": "match_only_text"
},
"keyword": {
"type": "keyword"
}
},
"type": "wildcard"
},
"entity_id": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"executable": {
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
},
"ignore_above": 1024,
"type": "keyword"
}
}
},
"pe": {
"properties": {
"architecture": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"company": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"description": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"file_version": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"original_file_name": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"product": {
"ignore_above": 1024,
"type": "keyword",
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
}
}
}
},
"pid": {
"type": "long",
"fields": {
@@ -88,6 +241,19 @@
"type": "keyword"
}
}
},
"working_directory": {
"fields": {
"security": {
"type": "text",
"analyzer": "es_security_analyzer"
},
"keyword": {
"type": "keyword"
}
},
"ignore_above": 1024,
"type": "keyword"
}
}
}

View File

@@ -33,6 +33,8 @@ while [[ "$COUNT" -le 240 ]]; do
if [ $? -eq 0 ]; then
ELASTICSEARCH_CONNECTED="yes"
echo "connected!"
# Check cluster health once connected
so-elasticsearch-query _cluster/health?wait_for_status=yellow > /dev/null 2>&1
break
else
((COUNT+=1))
@@ -48,7 +50,7 @@ fi
cd ${ELASTICSEARCH_ROLES}
echo "Loading templates..."
echo "Loading roles..."
for role in *; do
name=$(echo "$role" | cut -d. -f1)
so-elasticsearch-query _security/role/$name -XPUT -d @"$role"

View File

@@ -30,7 +30,7 @@ echo -n "Waiting for ElasticSearch..."
COUNT=0
ELASTICSEARCH_CONNECTED="no"
while [[ "$COUNT" -le 240 ]]; do
{{ ELASTICCURL }} -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
so-elasticsearch-query -k --output /dev/null --silent --head --fail
if [ $? -eq 0 ]; then
ELASTICSEARCH_CONNECTED="yes"
echo "connected!"
@@ -50,21 +50,20 @@ fi
cd ${ELASTICSEARCH_TEMPLATES}/component/ecs
echo "Loading ECS component templates..."
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE-mappings"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_component_template/$TEMPLATE-mappings -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
echo
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE-mappings"; so-elasticsearch-query _component_template/$TEMPLATE-mappings -d@$i -XPUT 2>/dev/null; echo; done
# Load SO-specific component templates
cd ${ELASTICSEARCH_TEMPLATES}/component/so
echo "Loading Security Onion component templates..."
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_component_template/$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE"; so-elasticsearch-query _component_template/$TEMPLATE -d@$i -XPUT 2>/dev/null; echo; done
echo
# Load SO index templates
cd ${ELASTICSEARCH_TEMPLATES}/index
echo "Loading Security Onion index templates..."
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_index_template/so-$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; so-elasticsearch-query _index_template/so-$TEMPLATE -d@$i -XPUT 2>/dev/null; echo; done
echo
cd - >/dev/null

View File

@@ -10,6 +10,7 @@
{%- set ZEEKVER = salt['pillar.get']('global:mdengine', 'COMMUNITY') %}
{%- set WAZUHENABLED = salt['pillar.get']('global:wazuh', '0') %}
{%- set STRELKAENABLED = salt['pillar.get']('strelka:enabled', '0') %}
{%- set RITAENABLED = salt['pillar.get']('rita:enabled', False) -%}
{%- set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) -%}
{%- set FLEETNODE = salt['pillar.get']('global:fleet_node', False) -%}
{%- set FBMEMEVENTS = salt['pillar.get']('filebeat:mem_events', 2048) -%}
@@ -264,6 +265,57 @@ filebeat.inputs:
{%- endif %}
{%- if RITAENABLED %}
- type: filestream
id: rita-beacon
paths:
- /nsm/rita/beacons.csv
exclude_lines: ['^Score', '^Source', '^Domain', '^No results']
fields:
module: rita
dataset: beacon
category: network
processors:
- drop_fields:
fields: ["source", "prospector", "input", "offset", "beat"]
fields_under_root: true
pipeline: "rita.beacon"
index: "so-rita"
- type: filestream
id: rita-connection
paths:
- /nsm/rita/long-connections.csv
- /nsm/rita/open-connections.csv
exclude_lines: ['^Source', '^No results']
fields:
module: rita
dataset: connection
category: network
processors:
- drop_fields:
fields: ["source", "prospector", "input", "offset", "beat"]
fields_under_root: true
pipeline: "rita.connection"
index: "so-rita"
- type: filestream
id: rita-dns
paths:
- /nsm/rita/exploded-dns.csv
exclude_lines: ['^Domain', '^No results']
fields:
module: rita
dataset: dns
category: network
processors:
- drop_fields:
fields: ["source", "prospector", "input", "offset", "beat"]
fields_under_root: true
pipeline: "rita.dns"
index: "so-rita"
{%- endif %}
{%- if grains['role'] in ['so-eval', 'so-standalone', 'so-manager', 'so-managersearch', 'so-import'] %}
- type: log
paths:
@@ -394,6 +446,13 @@ output.logstash:
# The Logstash hosts
hosts:
{# dont let filebeat send to a node designated as dmz #}
{% import_yaml 'logstash/dmz_nodes.yaml' as dmz_nodes -%}
{% if dmz_nodes.logstash.dmz_nodes -%}
{% set dmz_nodes = dmz_nodes.logstash.dmz_nodes -%}
{% else -%}
{% set dmz_nodes = [] -%}
{% endif -%}
{%- if grains.role in ['so-sensor', 'so-fleet', 'so-node', 'so-idh'] %}
{%- set LOGSTASH = namespace() %}
{%- set LOGSTASH.count = 0 %}
@@ -402,8 +461,10 @@ output.logstash:
{%- for node_type, node_details in node_data.items() | sort -%}
{%- if node_type in ['manager', 'managersearch', 'standalone', 'receiver' ] %}
{%- for hostname in node_data[node_type].keys() %}
{%- set LOGSTASH.count = LOGSTASH.count + 1 %}
{%- if hostname not in dmz_nodes %}
{%- set LOGSTASH.count = LOGSTASH.count + 1 %}
- "{{ hostname }}:5644" #{{ node_details[hostname].ip }}
{%- endif %}
{%- endfor %}
{%- endif %}
{%- if LOGSTASH.count > 1 %}

View File

@@ -349,6 +349,9 @@ role:
osquery_endpoint:
portgroups:
- {{ portgroups.fleet_api }}
strelka_frontend:
portgroups:
- {{ portgroups.strelka_frontend }}
syslog:
portgroups:
- {{ portgroups.syslog }}
@@ -482,6 +485,9 @@ role:
self:
portgroups:
- {{ portgroups.syslog}}
strelka_frontend:
portgroups:
- {{ portgroups.strelka_frontend }}
INPUT:
hostgroups:
anywhere:
@@ -511,6 +517,9 @@ role:
self:
portgroups:
- {{ portgroups.syslog}}
strelka_frontend:
portgroups:
- {{ portgroups.strelka_frontend }}
INPUT:
hostgroups:
anywhere:

View File

@@ -19,11 +19,37 @@
{% set VERSION = salt['pillar.get']('global:soversion', 'HH1.2.2') %}
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
{% set MANAGER = salt['grains.get']('master') %}
{% set MAININT = salt['pillar.get']('host:mainint') %}
{% set MAINIP = salt['grains.get']('ip_interfaces').get(MAININT)[0] %}
{% set RESTRICTIDHSERVICES = salt['pillar.get']('idh:restrict_management_ip', False) %}
include:
- idh.openssh.config
- firewall
# IDH State
# If True, block IDH Services from accepting connections on Managment IP
{% if RESTRICTIDHSERVICES %}
{% from 'idh/opencanary_config.map.jinja' import OPENCANARYCONFIG %}
{% set idh_services = salt['pillar.get']('idh:services', []) %}
{% for service in idh_services %}
{% if service in ["smnp","ntp", "tftp"] %}
{% set proto = 'udp' %}
{% else %}
{% set proto = 'tcp' %}
{% endif %}
block_mgt_ip_idh_services_{{ proto }}_{{ OPENCANARYCONFIG[service~'.port'] }} :
iptables.insert:
- table: filter
- chain: INPUT
- jump: DROP
- position: 1
- proto: {{ proto }}
- dport: {{ OPENCANARYCONFIG[service~'.port'] }}
- destination: {{ MAINIP }}
{% endfor %}
{% endif %}
# Create a config directory
temp:

View File

@@ -1,4 +1,4 @@
{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log') %}
{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log', shell='/bin/bash') %}
influxdb:
retention_policies:

View File

@@ -59,7 +59,7 @@ update() {
IFS=$'\r\n' GLOBIGNORE='*' command eval 'LINES=($(cat $1))'
for i in "${LINES[@]}"; do
RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.17.1" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ")
RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.17.4" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ")
echo $RESPONSE; if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
done

View File

@@ -1 +1 @@
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.17.1","id": "7.17.1","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.17.4","id": "7.17.4","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}

View File

@@ -37,7 +37,7 @@ selfservice:
ui_url: https://{{ WEBACCESS }}/login/
default_browser_return_url: https://{{ WEBACCESS }}/
whitelisted_return_urls:
allowed_return_urls:
- http://127.0.0.1
log:
@@ -59,7 +59,10 @@ hashers:
cost: 12
identity:
default_schema_url: file:///kratos-conf/schema.json
default_schema_id: default
schemas:
- id: default
url: file:///kratos-conf/schema.json
courier:
smtp:

View File

@@ -0,0 +1,9 @@
# Do not edit this file. Copy it to /opt/so/saltstack/local/salt/logstash/ and make changes there. It should be formatted as a list.
# logstash:
# dmz_nodes:
# - mydmznodehostname1
# - mydmznodehostname2
# - mydmznodehostname3
logstash:
dmz_nodes:

View File

@@ -18,7 +18,7 @@ appender.rolling.name = rolling
appender.rolling.fileName = /var/log/logstash/logstash.log
appender.rolling.layout.type = PatternLayout
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log
appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log.gz
appender.rolling.policies.type = Policies
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
appender.rolling.policies.time.interval = 1
@@ -27,7 +27,7 @@ appender.rolling.strategy.type = DefaultRolloverStrategy
appender.rolling.strategy.action.type = Delete
appender.rolling.strategy.action.basepath = /var/log/logstash
appender.rolling.strategy.action.condition.type = IfFileName
appender.rolling.strategy.action.condition.glob = logstash-*.log
appender.rolling.strategy.action.condition.glob = *.gz
appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified
appender.rolling.strategy.action.condition.nested_condition.age = 7D
rootLogger.level = info

View File

@@ -0,0 +1,22 @@
{%- if grains['role'] == 'so-eval' -%}
{%- set ES = salt['pillar.get']('manager:mainip', '') -%}
{%- else %}
{%- set ES = salt['pillar.get']('elasticsearch:mainip', '') -%}
{%- endif %}
{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
{%- set ES_PASS = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
output {
if [module] =~ "rita" and "import" not in [tags] {
elasticsearch {
pipeline => "%{module}.%{dataset}"
hosts => "{{ ES }}"
{% if salt['pillar.get']('elasticsearch:auth:enabled') is sameas true %}
user => "{{ ES_USER }}"
password => "{{ ES_PASS }}"
{% endif %}
index => "so-rita"
ssl => true
ssl_certificate_verification => false
}
}
}

View File

@@ -130,6 +130,8 @@ http {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
}
error_page 500 502 503 504 /50x.html;
location = /usr/share/nginx/html/50x.html {
@@ -330,34 +332,12 @@ http {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "Upgrade";
}
{%- endif %}
location /thehive/ {
proxy_pass http://{{ manager_ip }}:9000/thehive/;
proxy_read_timeout 90;
proxy_connect_timeout 90;
proxy_http_version 1.1; # this is essential for chunked responses to work
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
}
location /cortex/ {
proxy_pass http://{{ manager_ip }}:9001/cortex/;
proxy_read_timeout 90;
proxy_connect_timeout 90;
proxy_http_version 1.1; # this is essential for chunked responses to work
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Proxy "";
proxy_set_header X-Forwarded-Proto $scheme;
}
location /soctopus/ {
auth_request /auth/sessions/whoami;

View File

@@ -1,27 +1,52 @@
{
"name": "Playbook",
"version": "3.0",
"domain": "mitre-enterprise",
"description": "Current Coverage of Playbook",
"name": "Playbook Coverage",
"versions": {
"attack": "11",
"navigator": "4.6.4",
"layer": "4.3"
},
"domain": "enterprise-attack",
"description": "",
"filters": {
"stages": ["act"],
"platforms": [
"windows",
"linux",
"mac"
"Linux",
"macOS",
"Windows",
"Azure AD",
"Office 365",
"SaaS",
"IaaS",
"Google Workspace",
"PRE",
"Network",
"Containers"
]
},
"sorting": 0,
"viewMode": 0,
"layout": {
"layout": "side",
"aggregateFunction": "average",
"showID": false,
"showName": true,
"showAggregateScores": false,
"countUnscored": false
},
"hideDisabled": false,
"techniques": [],
"gradient": {
"colors": ["#ff6666", "#ffe766", "#8ec843"],
"colors": [
"#ff6666ff",
"#ffe766ff",
"#8ec843ff"
],
"minValue": 0,
"maxValue": 100
},
"legendItems": [],
"metadata": [],
"links": [],
"showTacticRowBackground": false,
"tacticRowBackground": "#dddddd",
"selectTechniquesAcrossTactics": true
}
"selectTechniquesAcrossTactics": true,
"selectSubtechniquesWithParent": false
}

View File

@@ -1,58 +1,62 @@
{%- set URL_BASE = salt['pillar.get']('global:url_base', '') %}
{
"enterprise_attack_url": "assets/enterprise-attack.json",
"pre_attack_url": "assets/pre-attack.json",
"mobile_data_url": "assets/mobile-attack.json",
"taxii_server": {
"enabled": false,
"url": "https://cti-taxii.mitre.org/",
"collections": {
"enterprise_attack": "95ecc380-afe9-11e4-9b6c-751b66dd541e",
"pre_attack": "062767bd-02d2-4b72-84ba-56caef0f8658",
"mobile_attack": "2f669986-b40b-4423-b720-4396ca6a462b"
"versions": [
{
"name": "ATT&CK v11",
"version": "11",
"domains": [
{
"name": "Enterprise",
"identifier": "enterprise-attack",
"data": ["assets/so/enterprise-attack.json"]
}
]
}
],
"custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}],
"default_layers": {
"enabled": true,
"urls": ["assets/so/nav_layer_playbook.json"]
},
"domain": "mitre-enterprise",
"custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}],
"default_layers": {
"enabled": true,
"urls": [
"assets/playbook.json"
]
},
"comment_color": "yellow",
"link_color": "blue",
"banner": "",
"features": [
{"name": "leave_site_dialog", "enabled": true, "description": "Disable to remove the dialog prompt when leaving site."},
{"name": "tabs", "enabled": true, "description": "Disable to remove the ability to open new tabs."},
{"name": "selecting_techniques", "enabled": true, "description": "Disable to remove the ability to select techniques."},
{"name": "header", "enabled": true, "description": "Disable to remove the header containing 'MITRE ATT&CK Navigator' and the link to the help page. The help page can still be accessed from the new tab menu."},
{"name": "subtechniques", "enabled": true, "description": "Disable to remove all sub-technique features from the interface."},
{"name": "selection_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
{"name": "search", "enabled": true, "description": "Disable to remove the technique search panel from the interface."},
{"name": "multiselect", "enabled": true, "description": "Disable to remove the multiselect panel from interface."},
{"name": "deselect_all", "enabled": true, "description": "Disable to remove the deselect all button from the interface."}
]},
{"name": "layer_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
{"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and metadata) panel from the interface. Note that the layer can still be renamed in the tab."},
{"name": "layer_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [
{"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and layer metadata) panel from the interface. Note that the layer can still be renamed in the tab."},
{"name": "download_layer", "enabled": true, "description": "Disable to remove the button to download the layer."},
{"name": "export_render", "enabled": true, "description": "Disable to the remove the button to render the current layer."},
{"name": "export_excel", "enabled": true, "description": "Disable to the remove the button to export the current layer to MS Excel (.xlsx) format."},
{"name": "filters", "enabled": true, "description": "Disable to the remove the filters panel from interface."},
{"name": "sorting", "enabled": true, "description": "Disable to the remove the sorting button from the interface."},
{"name": "color_setup", "enabled": true, "description": "Disable to the remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."},
{"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to the remove the hide disabled techniques button from the interface."},
{"name": "toggle_view_mode", "enabled": true, "description": "Disable to the remove the toggle view mode button from interface."},
{"name": "legend", "enabled": true, "description": "Disable to the remove the legend panel from the interface."}
{"name": "export_render", "enabled": true, "description": "Disable to remove the button to render the current layer."},
{"name": "export_excel", "enabled": true, "description": "Disable to remove the button to export the current layer to MS Excel (.xlsx) format."},
{"name": "filters", "enabled": true, "description": "Disable to remove the filters panel from interface."},
{"name": "sorting", "enabled": true, "description": "Disable to remove the sorting button from the interface."},
{"name": "color_setup", "enabled": true, "description": "Disable to remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."},
{"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to remove the hide disabled techniques button from the interface."},
{"name": "layout_controls", "enabled": true, "description": "Disable to remove the ability to change the current matrix layout."},
{"name": "legend", "enabled": true, "description": "Disable to remove the legend panel from the interface."}
]},
{"name": "technique_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
{"name": "disable_techniques", "enabled": true, "description": "Disable to the remove the ability to disable techniques."},
{"name": "manual_color", "enabled": true, "description": "Disable to the remove the ability to assign manual colors to techniques."},
{"name": "scoring", "enabled": true, "description": "Disable to the remove the ability to score techniques."},
{"name": "comments", "enabled": true, "description": "Disable to the remove the ability to add comments to techniques."},
{"name": "technique_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [
{"name": "disable_techniques", "enabled": true, "description": "Disable to remove the ability to disable techniques."},
{"name": "manual_color", "enabled": true, "description": "Disable to remove the ability to assign manual colors to techniques."},
{"name": "scoring", "enabled": true, "description": "Disable to remove the ability to score techniques."},
{"name": "comments", "enabled": true, "description": "Disable to remove the ability to add comments to techniques."},
{"name": "comment_underline", "enabled": true, "description": "Disable to remove the comment underline effect on techniques."},
{"name": "links", "enabled": true, "description": "Disable to remove the ability to assign hyperlinks to techniques."},
{"name": "link_underline", "enabled": true, "description": "Disable to remove the hyperlink underline effect on techniques."},
{"name": "metadata", "enabled": true, "description": "Disable to remove the ability to add metadata to techniques."},
{"name": "clear_annotations", "enabled": true, "description": "Disable to remove the button to clear all annotations on the selected techniques."}
]}
]

View File

@@ -50,7 +50,7 @@ nginxtmp:
navigatorconfig:
file.managed:
- name: /opt/so/conf/navigator/navigator_config.json
- name: /opt/so/conf/navigator/config.json
- source: salt://nginx/files/navigator_config.json
- user: 939
- group: 939
@@ -59,7 +59,7 @@ navigatorconfig:
navigatordefaultlayer:
file.managed:
- name: /opt/so/conf/navigator/nav_layer_playbook.json
- name: /opt/so/conf/navigator/layers/nav_layer_playbook.json
- source: salt://nginx/files/nav_layer_playbook.json
- user: 939
- group: 939
@@ -69,7 +69,7 @@ navigatordefaultlayer:
navigatorpreattack:
file.managed:
- name: /opt/so/conf/navigator/pre-attack.json
- name: /opt/so/conf/navigator/layers/pre-attack.json
- source: salt://nginx/files/pre-attack.json
- user: 939
- group: 939
@@ -78,7 +78,7 @@ navigatorpreattack:
navigatorenterpriseattack:
file.managed:
- name: /opt/so/conf/navigator/enterprise-attack.json
- name: /opt/so/conf/navigator/layers/enterprise-attack.json
- source: salt://nginx/files/enterprise-attack.json
- user: 939
- group: 939
@@ -99,10 +99,8 @@ so-nginx:
- /etc/pki/managerssl.crt:/etc/pki/nginx/server.crt:ro
- /etc/pki/managerssl.key:/etc/pki/nginx/server.key:ro
# ATT&CK Navigator binds
- /opt/so/conf/navigator/navigator_config.json:/opt/socore/html/navigator/assets/config.json:ro
- /opt/so/conf/navigator/nav_layer_playbook.json:/opt/socore/html/navigator/assets/playbook.json:ro
- /opt/so/conf/navigator/enterprise-attack.json:/opt/socore/html/navigator/assets/enterprise-attack.json:ro
- /opt/so/conf/navigator/pre-attack.json:/opt/socore/html/navigator/assets/pre-attack.json:ro
- /opt/so/conf/navigator/layers/:/opt/socore/html/navigator/assets/so:ro
- /opt/so/conf/navigator/config.json:/opt/socore/html/navigator/assets/config.json:ro
{% endif %}
{% if ISAIRGAP is sameas true %}
- /nsm/repo:/opt/socore/html/repo:ro

View File

@@ -42,6 +42,15 @@ query_updatwebhooks:
- connection_user: root
- connection_pass: {{ MYSQLPASS }}
query_updatename:
mysql_query.run:
- database: playbook
- query: "update custom_fields set name = 'Custom Filter' where id = 21;"
- connection_host: {{ MAINIP }}
- connection_port: 3306
- connection_user: root
- connection_pass: {{ MYSQLPASS }}
query_updatepluginurls:
mysql_query.run:
- database: playbook

View File

@@ -1,5 +1,20 @@
# this removes the repo file left by bootstrap-salt.sh without -r
remove_salt.list:
file.absent:
- name: /etc/apt/sources.list.d/salt.list
saltstack.list:
file.managed:
- name: /etc/apt/sources.list.d/saltstack.list
- contents:
- deb https://repo.securityonion.net/file/securityonion-repo/ubuntu/{{grains.osrelease}}/amd64/salt/ {{grains.oscodename}} main
apt_update:
cmd.run:
- name: apt-get update
- onchanges:
- file: saltstack.list
- timeout: 30
- retry:
attempts: 5
interval: 30

View File

@@ -11,6 +11,7 @@
{% set PYTHON3INFLUX= 'influxdb == ' ~ PYTHONINFLUXVERSION %}
{% set PYTHON3INFLUXDEPS= ['certifi', 'chardet', 'python-dateutil', 'pytz', 'requests'] %}
{% set PYTHONINSTALLER = 'pip' %}
{% set SYSTEMD_UNIT_FILE = '/lib/systemd/system/salt-minion.service' %}
{% else %}
{% set SPLITCHAR = '-' %}
{% set SALTNOTHELD = salt['cmd.run']('yum versionlock list | grep -q salt ; echo $?', python_shell=True) %}
@@ -21,6 +22,7 @@
{% set PYTHON3INFLUX= 'securityonion-python3-influxdb' %}
{% set PYTHON3INFLUXDEPS= ['python36-certifi', 'python36-chardet', 'python36-dateutil', 'python36-pytz', 'python36-requests'] %}
{% set PYTHONINSTALLER = 'pkg' %}
{% set SYSTEMD_UNIT_FILE = '/usr/lib/systemd/system/salt-minion.service' %}
{% endif %}
{% set INSTALLEDSALTVERSION = salt['pkg.version']('salt-minion').split(SPLITCHAR)[0] %}

View File

@@ -3,6 +3,7 @@
{% from 'salt/map.jinja' import INSTALLEDSALTVERSION %}
{% from 'salt/map.jinja' import SALTNOTHELD %}
{% from 'salt/map.jinja' import SALTPACKAGES %}
{% from 'salt/map.jinja' import SYSTEMD_UNIT_FILE %}
{% import_yaml 'salt/minion.defaults.yaml' as SALTMINION %}
{% set service_start_delay = SALTMINION.salt.minion.service_start_delay %}
@@ -82,7 +83,7 @@ set_log_levels:
salt_minion_service_unit_file:
file.managed:
- name: /etc/systemd/system/multi-user.target.wants/salt-minion.service
- name: {{ SYSTEMD_UNIT_FILE }}
- source: salt://salt/service/salt-minion.service.jinja
- template: jinja
- defaults:
@@ -110,6 +111,7 @@ salt_minion_service:
- file: set_log_levels
- file: salt_minion_service_unit_file
{% endif %}
- order: last
patch_pkg:

View File

@@ -0,0 +1,269 @@
# Security Onion Analyzers
Security Onion provides a means for performing data analysis on varying inputs. This data can be any data of interest sourced from event logs. Examples include hostnames, IP addresses, file hashes, URLs, etc. The analysis is conducted by one or more analyzers that understand that type of input. Analyzers come with the default installation of Security Onion. However, it is also possible to add additional analyzers to extend the analysis across additional areas or data types.
## Supported Observable Types
The built-in analyzers support the following observable types:
| Name | Domain | Hash | IP | JA3 | Mail | Other | URI | URL | User Agent |
| ------------------------|--------|-------|-------|-------|-------|-------|-------|-------|------------
| Alienvault OTX |&check; |&check;|&check;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|
| EmailRep |&cross; |&cross;|&cross;|&cross;|&check;|&cross;|&cross;|&cross;|&cross;|
| Greynoise |&cross; |&cross;|&check;|&cross;|&cross;|&cross;|&cross;|&cross;|&cross;|
| JA3er |&cross; |&cross;|&cross;|&check;|&cross;|&cross;|&cross;|&cross;|&cross;|
| LocalFile |&check; |&check;|&check;|&check;|&cross;|&check;|&cross;|&check;|&cross;|
| Malware Hash Registry |&cross; |&check;|&cross;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|
| Pulsedive |&check; |&check;|&check;|&cross;|&cross;|&cross;|&check;|&check;|&check;|
| Spamhaus |&cross; |&cross;|&check;|&cross;|&cross;|&cross;|&cross;|&cross;|&cross;|
| Urlhaus |&cross; |&cross;|&cross;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|
| Urlscan |&cross; |&cross;|&cross;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|
| Virustotal |&check; |&check;|&check;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|
| WhoisLookup |&check; |&cross;|&cross;|&cross;|&cross;|&cross;|&check;|&cross;|&cross;|
## Authentication
Many analyzers require authentication, via an API key or similar. The table below illustrates which analyzers require authentication.
| Name | Authn Req'd|
--------------------------|------------|
[AlienVault OTX](https://otx.alienvault.com/api) |&check;|
[EmailRep](https://emailrep.io/key) |&check;|
[GreyNoise](https://www.greynoise.io/plans/community) |&check;|
[JA3er](https://ja3er.com/) |&cross;|
LocalFile |&cross;|
[Malware Hash Registry](https://hash.cymru.com/docs_whois) |&cross;|
[Pulsedive](https://pulsedive.com/api/) |&check;|
[Spamhaus](https://www.spamhaus.org/dbl/) |&cross;|
[Urlhaus](https://urlhaus.abuse.ch/) |&cross;|
[Urlscan](https://urlscan.io/docs/api/) |&check;|
[VirusTotal](https://developers.virustotal.com/reference/overview) |&check;|
[WhoisLookup](https://github.com/meeb/whoisit) |&cross;|
## Developer Guide
### Python
Analyzers are Python modules, and can be made up of a single .py script, for simpler analyzers, or a complex set of scripts organized within nested directories.
The Python language was chosen because of it's wide adoption in the security industry, ease of development and testing, and the abundance of developers with Python skills.
Specifically, analyzers must be compatible with Python 3.10.
For more information about Python, see the [Python Documentation](https://docs.python.org).
### Development
Custom analyzers should be developed outside of the Security Onion cluster, in a proper software development environment, with version control or other backup mechanisms in place. The analyzer can be developed, unit tested, and integration tested without the need for a Security Onion installation. Once satisifed with the analyzer functionality the analyzer directory should be copied to the Security Onion manager node.
Developing an analyzer directly on a Security Onion manager node is strongly discouraged, as loss of source code (and time and effort) can occur, should the management node suffer a catastrophic failure with disk storage loss.
For best results, avoid long, complicated functions in favor of short, discrete functions. This has several benefits:
- Easier to troubleshoot
- Easier to maintain
- Easier to unit test
- Easier for other developers to review
### Linting
Source code should adhere to the [PEP 8 - Style Guide for Python Code](https://peps.python.org/pep-0008/). Developers can use the default configuration of `flake8` to validate conformance, or run the included `build.sh` inside the analyzers directory. Note that linting conformance is mandatory for analyzers that are contributed back to the Security Onion project.
### Testing
Python's [unitest](https://docs.python.org/3/library/unittest.html) library can be used for covering analyzer code with unit tests. Unit tests are encouraged for custom analyzers, and mandatory for public analyzers submitted back to the Security Onion project.
If you are new to unit testing, please see the included `urlhaus_test.py` as an example.
Unit tests should be named following the pattern `<scriptname>_test.py`.
### Analyzer Package Structure
Delpoyment of a custom analyzer entails copying the analyzer source directory and depenency wheel archives to the Security Onion manager node. The destination locations can be found inside the `securityonion` salt source directory tree. Using the [Saltstack](https://github.com/saltstack/salt) directory pattern allows Security Onion developers to add their own analyzers with minimal additional effort needed to upgrade to newer versions of Security Onion. When the _sensoroni_ salt state executes it will merge the default analyzers with any local analyzers, and copy the merged analyzers into the `/opt/so/conf/sensoroni` directory.
Do not modify files in the `/opt/so/conf/sensoroni` directory! This is a generated directory and changes made inside will be automatically erased on a frequent interval.
On a Security Onion manager, custom analyzers should be placed inside the `/opt/so/saltstack/local/salt/sensoroni` directory, as described in the next section.
#### Directory Tree
From within the default saltstack directory, the following files and directories exist:
```
salt
|- sensoroni
|- files
|- analyzers
|- urlhaus <- Example of an existing analyzer
| |- source-packages <- Contains wheel package bundles for this analyzer's dependencies
| |- site-packages <- Auto-generated site-packages directory (or used for custom dependencies)
| |- requirements.txt <- List of all dependencies needed for this analyzer
| |- urlhaus.py <- Source code for the analyzer
| |- urlhaus_test.py <- Unit tests for the analyzer source code
| |- urlhaus.json <- Metadata for the analyzer
| |- __init__.py <- Package initialization file, often empty
|
|- build.sh <- Simple CI tool for validating linting and unit tests
|- helpers.py <- Common functions shared by many analyzers
|- helpers_test.py <- Unit tests for the shared source code
|- pytest.ini <- Configuration options for the flake8 and pytest
|- README.md <- The file you are currently reading
```
Custom analyzers should conform to this same structure, but instead of being placed in the `/opt/so/saltstack/default` directory tree, they should be placed in the `/opt/so/saltstack/local` directory tree. This ensures future Security Onion upgrades will not overwrite customizations. Shared files like `build.sh` and `helpers.py` do not need to be duplicated. They can remain in the _default_ directory tree. Only new or modified files should exist in the _local_ directory tree.
#### Metadata
Each analyzer has certain metadata that helps describe the function of the analyzer, required inputs, artifact compatibility, optional configuration options, analyzer version, and other important details of the analyzer. This file is a static file and is not intended to be used for dynamic or custom configuration options. It should only be modified by the author of the analyzer.
The following example describes the urlhaus metadata content:
```
{
"name": "Urlhaus", <- Unique human-friendly name of this analyzer
"version": "0.1", <- The version of the analyzer
"author": "Security Onion Solutions", <- Author's name, and/or email or other contact information
"description": "This analyzer queries URLHaus...", <- A brief, concise description of the analyzer
"supportedTypes" : ["url"], <- List of types that must match the SOC observable types
"baseUrl": "https://urlhaus-api.abuse.ch/v1/url/" <- Optional hardcoded data used by the analyzer
}
```
The `supportedTypes` values should only contain the types that this analyzer can work with. In the case of the URLHaus analyzer, we know that it works with URLs. So adding "hash" to this list wouldn't make sense, since URLHaus doesn't provide information about file hashes. If an analyzer does not support a particular type then it will not show up in the analyzer results in SOC for that observable being analyzed. This is intentional, to eliminate unnecessary screen clutter in SOC. To find a list of available values for the `supportedTypes` field, login to SOC and inside of a Case, click the + button on the Observables tab. You will see a list of types and each of those can be used in this metadata field, when applicable to the analyzer.
#### Dependencies
Analyzers will often require the use of third-party packages. For example, if an analyzer needs to make a request to a remote server via HTTPS, then the `requests` package will likely be used. Each analyzer will container a `requirements.txt` file, in which all third-party dependencies can be specified, following the python [Requirements File Specification](https://pip.pypa.io/en/stable/reference/requirements-file-format/).
Additionally, to support airgapped users, the dependency packages themselves, and any transitive dependencies, should be placed inside the `source-packages` directory. To obtain the full hierarchy of dependencies, execute the following commands:
```bash
pip download -r <my-analyzer-path>/requirements.txt -d <my-analyzer-path>/source-packages
```
### Analyzer Architecture
The Sensoroni Docker container is responsible for executing analyzers. Only the manager's Sensoroni container will process analyzer jobs. Other nodes in the grid, such as sensors and search nodes, will not be assigned analyzer jobs.
When the Sensoroni Docker container starts, the `/opt/so/conf/sensoroni/analyzer` directory is mapped into the container. The initialization of the Sensoroni Analyze module will scan that directory for any subdirectories. Valid subdirectories will be added as an available analyzer.
The analyzer itself will only run when a user in SOC enqueues an analyzer job, such as via the Cases -> Observables tab. When the Sensoroni node is ready to run the job it will execute the python command interpretor separately for each loaded analyzer. The command line resembles the following:
```bash
python -m urlhaus '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}'
```
It is up to each analyzer to determine whether the provided input is compatible with that analyzer. This is assisted by the analyzer metadata, as described earlier in this document, with the use of the `supportedTypes` list.
Once the analyzer completes its functionality, it must terminate promptly. See the following sections for more details on expected internal behavior of the analyzer.
#### Configuration
Analyzers may need dynamic configuration data, such as credentials or other secrets, in order to complete their function. Optional configuration files can provide this information, and are expected to reside in the analyzer's directory. Configuration files are typically written in YAML syntax for ease of modification.
Configuration files for analyzers included with Security Onion will be pillarized, meaning they derive their custom values from the Saltstack pillar data. For example, an analyzer that requires a user supplied credential might contain a config file resembling the following, where Jinja templating syntax is used to extra Salt pillar data:
```yaml
username: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:username', '') }}
password: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:password', '') }}
```
Sensoroni will not provide any inputs to the analyzer during execution, other than the artifact input in JSON format. However, developers will likely need to test the analyzer outside of Sensoroni and without Jinja templating, therefore an alternate config file should normally be supplied as the configuration argument during testing. Analyzers should allow for this additional command line argument, but by default should automatically read a configuration file stored in the analyzer's directory.
#### Exit Code
If an analyzer determines it cannot or should not operate on the input then the analyzer should return an exit code of `126`.
If an analyzer does attempt to operate against the input then the exit code should be 0, regardless of the outcome. The outcome, be it an error, a confirmed threat detection, or perhaps an unknown outcome, should be noted in the output of the analyzer.
#### Output
The outcome of the analyzer is reflected in the analyzer's output to `stdout`. The output must be JSON formatted, and should contain the following fields.
`summary`: A very short summarization of the outcome. This should be under 50 characters, otherwise it will be truncated when displayed on the Analyzer job list.
`status`: Can be one of the following status values, which most appropriately reflects the outcome:
- `ok`: The analyzer has concluded that the provided input is not a known threat.
- `info`: This analyzer provides informative data, but does not attempt to conclude the input is a threat.
- `caution`: The data provided is inconclusive. Analysts should review this information further. This can be used in error scenarios, such as if the analyzer fails to complete, perhaps due to a remote service being offline.
- `threat`: The analyzer has detected that the input is likely related to a threat.
`error`: [Optional] If the analyzer encounters an unrecoverable error, those details, useful for administrators to troubleshoot the problem, should be placed in this field.
Additional fields are allowed, and should contain data that is specific to the analyzer.
Below is an example of a _urlhaus_ analyzer output. Note that the urlhaus raw JSON is added to a custom field called "response".
```json
{
"response": {
"blacklists": {
"spamhaus_dbl": "not listed",
"surbl": "not listed"
},
"date_added": "2022-04-07 12:39:14 UTC",
"host": "abeibaba.com",
"id": "2135795",
"larted": "false",
"last_online": null,
"payloads": null,
"query_status": "ok",
"reporter": "switchcert",
"tags": [
"Flubot"
],
"takedown_time_seconds": null,
"threat": "malware_download",
"url": "https://abeibaba.com/ian/?redacted",
"url_status": "offline",
"urlhaus_reference": "https://urlhaus.abuse.ch/url/2135795/"
},
"status": "threat",
"summary": "malware_download"
}
```
Users in SOC will be able to view the entire JSON output, therefore it is important that sensitive information, such as credentials or other secrets, is excluded from the output.
#### Internationalization
Some of the built-in analyzers use snake_case summary values, instead of human friendly words or phrases. These are identifiers that the SOC UI will use to lookup a localized translation for the user. The use of these identifiers is not required for custom analyzers. In fact, in order for an identifier to be properly localized the translations must exist in the SOC product, which is out of scope of this development guide. That said, the following generic translations might be useful for custom analyzers:
| Identifier | English |
| ------------------ | -------------------------- |
| `malicious` | Malicious |
| `suspicious` | Suspicious |
| `harmless` | Harmless |
| `internal_failure` | Analyzer Internal Failure |
| `timeout` | Remote Host Timed Out |
#### Timeout
It is expected that analyzers will finish quickly, but there is a default timeout in place that will abort the analyzer if the timeout is exceeded. By default that timeout is 15 minutes (900000 milliseconds), but can be customized via the `sensoroni:analyze_timeout_ms` salt pillar.
## Contributing
Review the Security Onion project [contribution guidelines](https://github.com/Security-Onion-Solutions/securityonion/blob/master/CONTRIBUTING.md) if you are considering contributing an analyzer to the Security Onion project.
#### Procedure
In order to make a custom analyzer into a permanent Security Onion analyzer, the following steps need to be taken:
1. Fork the [securityonion GitHub repository](https://github.com/Security-Onion-Solutions/securityonion)
2. Copy your custom analyzer directory to the forked project, under the `securityonion/salt/sensoroni/files/analyzers` directory.
3. Ensure the contribution requirements in the following section are met.
4. Submit a [pull request](https://github.com/Security-Onion-Solutions/securityonion/pulls) to merge your GitHub fork back into the `securityonion` _dev_ branch.
#### Requirements
The following requirements must be satisfied in order for analyzer pull requests to be accepted into the Security Onion GitHub project:
- Analyzer contributions must not contain licensed dependencies or source code that is incompatible with the [GPLv2 licensing](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html).
- All source code must pass the `flake8` lint check. This ensures source code conforms to the same style guides as the other analyzers. The Security Onion project will automatically run the linter after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed lint checks will result in the submitter being sent an automated email message.
- All source code must include accompanying unit test coverage. The Security Onion project will automatically run the unit tests after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed unit tests, or insufficient unit test coverage, will result in the submitter being sent an automated email message.
- Documentation of the analyzer, its input requirements, conditions for operation, and other relevant information must be clearly written in an accompanying analyzer metadata file. This file is described in more detail earlier in this document.
- Source code must be well-written and be free of security defects that can put users or their data at unnecessary risk.

View File

@@ -0,0 +1,39 @@
#!/bin/bash
COMMAND=$1
SENSORONI_CONTAINER=${SENSORONI_CONTAINER:-so-sensoroni}
function ci() {
HOME_DIR=$(dirname "$0")
TARGET_DIR=${1:-.}
PATH=$PATH:/usr/local/bin
if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then
echo "Missing dependencies. Consider running the following command:"
echo " python -m pip install flake8 pytest pytest-cov"
exit 1
fi
flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini"
pytest "$TARGET_DIR" "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100
}
function download() {
ANALYZERS=$1
if [[ $ANALYZERS = "all" ]]; then
ANALYZERS="*/"
fi
for ANALYZER in $ANALYZERS; do
rm -fr $ANALYZER/site-packages
mkdir -p $ANALYZER/source-packages
rm -fr $ANALYZER/source-packages/*
docker exec -it $SENSORONI_CONTAINER pip download -r /opt/sensoroni/analyzers/$ANALYZER/requirements.txt -d /opt/sensoroni/analyzers/$ANALYZER/source-packages
done
}
if [[ "$COMMAND" == "download" ]]; then
download "$2"
else
ci
fi

View File

@@ -0,0 +1,17 @@
# EmailRep
## Description
Submit an email address to EmailRepIO for analysis.
## Configuration Requirements
``api_key`` - API key used for communication with the EmailRepIO API
This value should be set in the ``sensoroni`` pillar, like so:
```
sensoroni:
analyzers:
emailrep:
api_key: $yourapikey
```

View File

@@ -0,0 +1,7 @@
{
"name": "EmailRep",
"version": "0.1",
"author": "Security Onion Solutions",
"description": "This analyzer queries the EmailRep API for email address reputation information",
"supportedTypes" : ["email", "mail"]
}

View File

@@ -0,0 +1,67 @@
import json
import os
import sys
import requests
import helpers
import argparse
def checkConfigRequirements(conf):
if "api_key" not in conf:
sys.exit(126)
else:
return True
def sendReq(conf, meta, email):
url = conf['base_url'] + email
headers = {"Key": conf['api_key']}
response = requests.request('GET', url=url, headers=headers)
return response.json()
def prepareResults(raw):
if "suspicious" in raw:
if raw['suspicious'] is True:
status = "caution"
summary = "suspicious"
elif raw['suspicious'] is False:
status = "ok"
summary = "harmless"
elif "status" in raw:
if raw["reason"] == "invalid email":
status = "caution"
summary = "invalid_input"
if "exceeded daily limit" in raw["reason"]:
status = "caution"
summary = "excessive_usage"
else:
status = "caution"
summary = "internal_failure"
results = {'response': raw, 'summary': summary, 'status': status}
return results
def analyze(conf, input):
checkConfigRequirements(conf)
meta = helpers.loadMetadata(__file__)
data = helpers.parseArtifact(input)
helpers.checkSupportedType(meta, data["artifactType"])
response = sendReq(conf, meta, data["value"])
return prepareResults(response)
def main():
dir = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact')
parser.add_argument('artifact', help='the artifact represented in JSON format')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/emailrep.yaml", help='optional config file to use instead of the default config file')
args = parser.parse_args()
if args.artifact:
results = analyze(helpers.loadConfig(args.config), args.artifact)
print(json.dumps(results))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,2 @@
base_url: https://emailrep.io/
api_key: "{{ salt['pillar.get']('sensoroni:analyzers:emailrep:api_key', '') }}"

View File

@@ -0,0 +1,85 @@
from io import StringIO
import sys
from unittest.mock import patch, MagicMock
from emailrep import emailrep
import unittest
class TestEmailRepMethods(unittest.TestCase):
def test_main_missing_input(self):
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stderr', new=StringIO()) as mock_stderr:
sys.argv = ["cmd"]
emailrep.main()
self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n")
sysmock.assert_called_once_with(2)
def test_main_success(self):
output = {"foo": "bar"}
with patch('sys.stdout', new=StringIO()) as mock_stdout:
with patch('emailrep.emailrep.analyze', new=MagicMock(return_value=output)) as mock:
sys.argv = ["cmd", "input"]
emailrep.main()
expected = '{"foo": "bar"}\n'
self.assertEqual(mock_stdout.getvalue(), expected)
mock.assert_called_once()
def test_checkConfigRequirements_not_present(self):
conf = {"not_a_file_path": "blahblah"}
with self.assertRaises(SystemExit) as cm:
emailrep.checkConfigRequirements(conf)
self.assertEqual(cm.exception.code, 126)
def test_sendReq(self):
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
meta = {}
conf = {"base_url": "https://myurl/", "api_key": "abcd1234"}
email = "test@abc.com"
response = emailrep.sendReq(conf=conf, meta=meta, email=email)
mock.assert_called_once_with("GET", headers={"Key": "abcd1234"}, url="https://myurl/test@abc.com")
self.assertIsNotNone(response)
def test_prepareResults_invalidEmail(self):
raw = {"status": "fail", "reason": "invalid email"}
results = emailrep.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "invalid_input")
self.assertEqual(results["status"], "caution")
def test_prepareResults_not_suspicious(self):
raw = {"email": "notsus@domain.com", "reputation": "high", "suspicious": False, "references": 21, "details": {"blacklisted": False, "malicious_activity": False, "profiles": ["twitter"]}}
results = emailrep.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "harmless")
self.assertEqual(results["status"], "ok")
def test_prepareResults_suspicious(self):
raw = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}}
results = emailrep.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "suspicious")
self.assertEqual(results["status"], "caution")
def test_prepareResults_exceeded_limit(self):
raw = {"status": "fail", "reason": "exceeded daily limit. please wait 24 hrs or visit emailrep.io/key for an api key."}
results = emailrep.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "excessive_usage")
self.assertEqual(results["status"], "caution")
def test_prepareResults_error(self):
raw = {}
results = emailrep.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "internal_failure")
self.assertEqual(results["status"], "caution")
def test_analyze(self):
output = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}}
artifactInput = '{"value":"sus@domain.com","artifactType":"email"}'
conf = {"base_url": "myurl/", "api_key": "abcd1234"}
with patch('emailrep.emailrep.sendReq', new=MagicMock(return_value=output)) as mock:
results = emailrep.analyze(conf, artifactInput)
self.assertEqual(results["summary"], "suspicious")
mock.assert_called_once()

View File

@@ -0,0 +1,2 @@
requests>=2.27.1
pyyaml>=6.0

View File

@@ -0,0 +1,19 @@
# Greynoise
## Description
Submit an IP address to Greynoise for analysis.
## Configuration Requirements
``api_key`` - API key used for communication with the Greynoise API
``api_version`` - Version of Greynoise API. Default is ``community``
This value should be set in the ``sensoroni`` pillar, like so:
```
sensoroni:
analyzers:
greynoise:
api_key: $yourapikey
```

View File

@@ -0,0 +1,7 @@
{
"name": "Greynoise IP Analyzer",
"version": "0.1",
"author": "Security Onion Solutions",
"description": "This analyzer queries Greynoise for context around an IP address",
"supportedTypes" : ["ip"]
}

View File

@@ -0,0 +1,78 @@
import json
import os
import sys
import requests
import helpers
import argparse
def checkConfigRequirements(conf):
if "api_key" not in conf or len(conf['api_key']) == 0:
sys.exit(126)
else:
return True
def sendReq(conf, meta, ip):
url = conf['base_url']
if conf['api_version'] == 'community':
url = url + 'v3/community/' + ip
elif conf['api_version'] == 'investigate' or 'automate':
url = url + 'v2/noise/context/' + ip
headers = {"key": conf['api_key']}
response = requests.request('GET', url=url, headers=headers)
return response.json()
def prepareResults(raw):
if "message" in raw:
if "Success" in raw["message"]:
if "classification" in raw:
if "benign" in raw['classification']:
status = "ok"
summary = "harmless"
elif "malicious" in raw['classification']:
status = "threat"
summary = "malicious"
elif "unknown" in raw['classification']:
status = "caution"
summary = "suspicious"
elif "IP not observed scanning the internet or contained in RIOT data set." in raw["message"]:
status = "ok"
summary = "no_results"
elif "Request is not a valid routable IPv4 address" in raw["message"]:
status = "caution"
summary = "invalid_input"
else:
status = "info"
summary = raw["message"]
else:
status = "caution"
summary = "internal_failure"
results = {'response': raw, 'summary': summary, 'status': status}
return results
def analyze(conf, input):
checkConfigRequirements(conf)
meta = helpers.loadMetadata(__file__)
data = helpers.parseArtifact(input)
helpers.checkSupportedType(meta, data["artifactType"])
response = sendReq(conf, meta, data["value"])
return prepareResults(response)
def main():
dir = os.path.dirname(os.path.realpath(__file__))
parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact')
parser.add_argument('artifact', help='the artifact represented in JSON format')
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/greynoise.yaml", help='optional config file to use instead of the default config file')
args = parser.parse_args()
if args.artifact:
results = analyze(helpers.loadConfig(args.config), args.artifact)
print(json.dumps(results))
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,3 @@
base_url: https://api.greynoise.io/
api_key: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_key', '') }}"
api_version: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_version', 'community') }}"

View File

@@ -0,0 +1,117 @@
from io import StringIO
import sys
from unittest.mock import patch, MagicMock
from greynoise import greynoise
import unittest
class TestGreynoiseMethods(unittest.TestCase):
def test_main_missing_input(self):
with patch('sys.exit', new=MagicMock()) as sysmock:
with patch('sys.stderr', new=StringIO()) as mock_stderr:
sys.argv = ["cmd"]
greynoise.main()
self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n")
sysmock.assert_called_once_with(2)
def test_main_success(self):
output = {"foo": "bar"}
with patch('sys.stdout', new=StringIO()) as mock_stdout:
with patch('greynoise.greynoise.analyze', new=MagicMock(return_value=output)) as mock:
sys.argv = ["cmd", "input"]
greynoise.main()
expected = '{"foo": "bar"}\n'
self.assertEqual(mock_stdout.getvalue(), expected)
mock.assert_called_once()
def test_checkConfigRequirements_not_present(self):
conf = {"not_a_file_path": "blahblah"}
with self.assertRaises(SystemExit) as cm:
greynoise.checkConfigRequirements(conf)
self.assertEqual(cm.exception.code, 126)
def test_sendReq_community(self):
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
meta = {}
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "community"}
ip = "192.168.1.1"
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v3/community/192.168.1.1")
self.assertIsNotNone(response)
def test_sendReq_investigate(self):
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
meta = {}
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "investigate"}
ip = "192.168.1.1"
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1")
self.assertIsNotNone(response)
def test_sendReq_automate(self):
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
meta = {}
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "automate"}
ip = "192.168.1.1"
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1")
self.assertIsNotNone(response)
def test_prepareResults_invalidIP(self):
raw = {"message": "Request is not a valid routable IPv4 address"}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "invalid_input")
self.assertEqual(results["status"], "caution")
def test_prepareResults_not_found(self):
raw = {"ip": "192.190.1.1", "noise": "false", "riot": "false", "message": "IP not observed scanning the internet or contained in RIOT data set."}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "no_results")
self.assertEqual(results["status"], "ok")
def test_prepareResults_benign(self):
raw = {"ip": "8.8.8.8", "noise": "false", "riot": "true", "classification": "benign", "name": "Google Public DNS", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "harmless")
self.assertEqual(results["status"], "ok")
def test_prepareResults_malicious(self):
raw = {"ip": "121.142.87.218", "noise": "true", "riot": "false", "classification": "malicious", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "malicious")
self.assertEqual(results["status"], "threat")
def test_prepareResults_unknown(self):
raw = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "suspicious")
self.assertEqual(results["status"], "caution")
def test_prepareResults_unknown_message(self):
raw = {"message": "unknown"}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "unknown")
self.assertEqual(results["status"], "info")
def test_prepareResults_error(self):
raw = {}
results = greynoise.prepareResults(raw)
self.assertEqual(results["response"], raw)
self.assertEqual(results["summary"], "internal_failure")
self.assertEqual(results["status"], "caution")
def test_analyze(self):
output = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
artifactInput = '{"value":"221.4.62.149","artifactType":"ip"}'
conf = {"base_url": "myurl/", "api_key": "abcd1234", "api_version": "community"}
with patch('greynoise.greynoise.sendReq', new=MagicMock(return_value=output)) as mock:
results = greynoise.analyze(conf, artifactInput)
self.assertEqual(results["summary"], "suspicious")
mock.assert_called_once()

View File

@@ -0,0 +1,2 @@
requests>=2.27.1
pyyaml>=6.0

Some files were not shown because too many files have changed in this diff Show More