mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2026-01-23 16:33:29 +01:00
Compare commits
364 Commits
2.3.110-20
...
2.3.130-20
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
65165e52f4 | ||
|
|
2cceae54df | ||
|
|
8912e241aa | ||
|
|
7357f157ec | ||
|
|
37881bd4b6 | ||
|
|
2574f0e23d | ||
|
|
c9d9804c3a | ||
|
|
73baa1d2f0 | ||
|
|
dce415297c | ||
|
|
de126647f8 | ||
|
|
c34f456151 | ||
|
|
83bff5ee87 | ||
|
|
918f431728 | ||
|
|
4a886338c8 | ||
|
|
7da1802eae | ||
|
|
ff92b524c2 | ||
|
|
395eaa39b4 | ||
|
|
2867a32931 | ||
|
|
fce43cf390 | ||
|
|
e5c9b91529 | ||
|
|
e5b74bcb78 | ||
|
|
91f8d3e5e9 | ||
|
|
269b16bbfd | ||
|
|
cd382a1b25 | ||
|
|
e1c9b0d108 | ||
|
|
9a98667e85 | ||
|
|
494ce0756d | ||
|
|
7f30a364ee | ||
|
|
c82aa89497 | ||
|
|
025677a1e6 | ||
|
|
a5361fb745 | ||
|
|
30d7801ae1 | ||
|
|
210bc556db | ||
|
|
e87e672b9e | ||
|
|
a70da41f20 | ||
|
|
8bb02763dc | ||
|
|
a59ada695b | ||
|
|
b93a108386 | ||
|
|
6089f3906d | ||
|
|
94ee45ac63 | ||
|
|
43cb78a6a8 | ||
|
|
76bb1fbbcc | ||
|
|
53d6e1d30d | ||
|
|
1bfde852f5 | ||
|
|
53883e4ade | ||
|
|
1a0ac4d253 | ||
|
|
44622350ea | ||
|
|
99864f4787 | ||
|
|
6bd02c0b99 | ||
|
|
1d0bb21908 | ||
|
|
bde06e7ec5 | ||
|
|
b93512eb01 | ||
|
|
92dee14ee8 | ||
|
|
3e6dfcfaca | ||
|
|
a6f1bf3aef | ||
|
|
88f17f037e | ||
|
|
c20859f8c3 | ||
|
|
c95bafd521 | ||
|
|
429ccb2dcc | ||
|
|
94ca3ddbda | ||
|
|
d3206a048f | ||
|
|
ff855eb8f7 | ||
|
|
8af1f19ac3 | ||
|
|
e4a7e3cba6 | ||
|
|
2688083ff1 | ||
|
|
766e9748c5 | ||
|
|
3761b491c0 | ||
|
|
e8fc3ccdf4 | ||
|
|
eb9597217c | ||
|
|
5cbb50a781 | ||
|
|
685789de33 | ||
|
|
b45b6b198b | ||
|
|
6c506bbab0 | ||
|
|
3dc266cfa9 | ||
|
|
a233c08830 | ||
|
|
58b049257d | ||
|
|
6ed3f42449 | ||
|
|
d8abc0a195 | ||
|
|
a641346c02 | ||
|
|
60b55acd6f | ||
|
|
35e47c8c3e | ||
|
|
7f797a11f8 | ||
|
|
91a7f25d3a | ||
|
|
34d57c386b | ||
|
|
000e813fbb | ||
|
|
555ca2e277 | ||
|
|
32adba6141 | ||
|
|
e19635e44a | ||
|
|
31c04aabdd | ||
|
|
dc209a37cd | ||
|
|
3f35dc54d2 | ||
|
|
0e64a9e5c3 | ||
|
|
0786191fc9 | ||
|
|
60763c38db | ||
|
|
9800f59ed7 | ||
|
|
ccac71f649 | ||
|
|
1990ba0cf0 | ||
|
|
8ff5778569 | ||
|
|
bee4cf4c52 | ||
|
|
105c95909c | ||
|
|
890bcd58f9 | ||
|
|
a96c665d04 | ||
|
|
f3a91d9fcd | ||
|
|
5a9acb3857 | ||
|
|
8b5666b238 | ||
|
|
efb229cfcb | ||
|
|
2fcb2b081d | ||
|
|
25f17a5efd | ||
|
|
66b4fe9f58 | ||
|
|
c001708707 | ||
|
|
4edd729596 | ||
|
|
76f183b112 | ||
|
|
bd63753d80 | ||
|
|
15fcaa7030 | ||
|
|
71a86b0a3c | ||
|
|
e2145720bd | ||
|
|
b4aa59c619 | ||
|
|
6975153cf4 | ||
|
|
0935f51667 | ||
|
|
f92d65737b | ||
|
|
8f5967911b | ||
|
|
80eb31368a | ||
|
|
d8fdf2b701 | ||
|
|
459d388614 | ||
|
|
fbf6e64e67 | ||
|
|
677db7c563 | ||
|
|
1bb216954c | ||
|
|
c81988ab00 | ||
|
|
542db5b7f5 | ||
|
|
b2db32a2c7 | ||
|
|
9287d6adf7 | ||
|
|
c8e189f35a | ||
|
|
5afcc8de4f | ||
|
|
d7eed52fae | ||
|
|
2910b56ea1 | ||
|
|
e608285341 | ||
|
|
04856540dc | ||
|
|
feb7eeeb8e | ||
|
|
44f4b1da7f | ||
|
|
1edb443c5d | ||
|
|
8fc03afdc0 | ||
|
|
fe09b5b0d1 | ||
|
|
c3952e94c8 | ||
|
|
3aac644da5 | ||
|
|
15ef0968d9 | ||
|
|
aeb70dad8f | ||
|
|
4129cef9fb | ||
|
|
40d9335573 | ||
|
|
807f6adf1e | ||
|
|
6339ee3bf3 | ||
|
|
5d62ece03b | ||
|
|
6905ca276a | ||
|
|
3682754399 | ||
|
|
0cb73d8f6a | ||
|
|
186258687e | ||
|
|
012ff3e1bc | ||
|
|
891a197a6a | ||
|
|
b35b505f0a | ||
|
|
2b39570b08 | ||
|
|
159122b52c | ||
|
|
3fb7399000 | ||
|
|
400879c079 | ||
|
|
62f3f13bbc | ||
|
|
0eda9a3bd7 | ||
|
|
ee00678362 | ||
|
|
ce192c2526 | ||
|
|
d60d31f723 | ||
|
|
bd19da1878 | ||
|
|
f461d01961 | ||
|
|
a69d361d1b | ||
|
|
19cba9dca9 | ||
|
|
5081a81a6c | ||
|
|
ba61057433 | ||
|
|
b8a80f76cf | ||
|
|
be2573bb7d | ||
|
|
36aef87a3c | ||
|
|
02c19da3c4 | ||
|
|
2d094a3bfc | ||
|
|
371fda09db | ||
|
|
149375115e | ||
|
|
4728bea633 | ||
|
|
3ee09db752 | ||
|
|
6477e6c5a2 | ||
|
|
2389d3fac9 | ||
|
|
ecc29b586d | ||
|
|
2977604d96 | ||
|
|
5253cb5d25 | ||
|
|
1cb5a791ca | ||
|
|
8408628b03 | ||
|
|
02f4cd9926 | ||
|
|
c1824e9f17 | ||
|
|
081d7e3a09 | ||
|
|
a7221ba2b4 | ||
|
|
aa90a016d7 | ||
|
|
dbddff7be7 | ||
|
|
f1574de827 | ||
|
|
886d69fb38 | ||
|
|
d68b6e7c9a | ||
|
|
d102ca298d | ||
|
|
9914148441 | ||
|
|
464772d7d3 | ||
|
|
13f6957ae8 | ||
|
|
2a18059ad9 | ||
|
|
01510c184a | ||
|
|
eb2d759bf8 | ||
|
|
5ed7361e3a | ||
|
|
6ed8694008 | ||
|
|
79dc2374e0 | ||
|
|
a2180a6721 | ||
|
|
f9633e7287 | ||
|
|
0b2745b342 | ||
|
|
ea34b69795 | ||
|
|
97e691c321 | ||
|
|
a3bf904e2d | ||
|
|
9ed49ef318 | ||
|
|
f7760394a1 | ||
|
|
d9416f3828 | ||
|
|
2d025e944c | ||
|
|
202ca34c6f | ||
|
|
f9568626f2 | ||
|
|
224e30c0ee | ||
|
|
ebcfbaa06d | ||
|
|
365866c9cc | ||
|
|
59d5be682a | ||
|
|
7805311ea2 | ||
|
|
8757ca0dfb | ||
|
|
3e8c687d61 | ||
|
|
13c9af5a5a | ||
|
|
a5313b330f | ||
|
|
ae8d300567 | ||
|
|
36b2d78dfe | ||
|
|
44e318e046 | ||
|
|
86771e1fe6 | ||
|
|
f5e539a05c | ||
|
|
d8defdd7b0 | ||
|
|
d2fa80e48a | ||
|
|
7906c053b1 | ||
|
|
0c7a07f5c0 | ||
|
|
04eef0d31f | ||
|
|
7df6833568 | ||
|
|
07cf3469a0 | ||
|
|
39101cafd1 | ||
|
|
5387caf6f4 | ||
|
|
07783713e6 | ||
|
|
5974279ed7 | ||
|
|
277c7d9d33 | ||
|
|
d20a07bb5f | ||
|
|
a449a91f38 | ||
|
|
76f43380d9 | ||
|
|
7c39559787 | ||
|
|
cedb23f4bc | ||
|
|
6e7b2ccedc | ||
|
|
8e9386fcd4 | ||
|
|
97fc652a97 | ||
|
|
2782c9b464 | ||
|
|
b5ce8756e9 | ||
|
|
e14463c0ab | ||
|
|
d524f3833b | ||
|
|
cb2044cee9 | ||
|
|
64e480714a | ||
|
|
2dc370c8b6 | ||
|
|
57dc848792 | ||
|
|
9947ba6e43 | ||
|
|
48fbc2290f | ||
|
|
edc6a461ec | ||
|
|
63eb15aa6d | ||
|
|
5264526ff1 | ||
|
|
c9eb188a79 | ||
|
|
ad833965a0 | ||
|
|
179aa5e29c | ||
|
|
86b311c468 | ||
|
|
fc60f64ddb | ||
|
|
1aba4da2bb | ||
|
|
a049e458c6 | ||
|
|
45f511caab | ||
|
|
f43a6757e0 | ||
|
|
c3d3806f65 | ||
|
|
dceb46888f | ||
|
|
e667bb1e59 | ||
|
|
816d0b1075 | ||
|
|
c4a4e9737b | ||
|
|
1cb48fc6a8 | ||
|
|
45161b2a39 | ||
|
|
67582be575 | ||
|
|
86e32f3e6c | ||
|
|
053ec81285 | ||
|
|
853235ca9b | ||
|
|
afb918d79c | ||
|
|
7a4d93f09b | ||
|
|
b2a96fab7e | ||
|
|
d2bf6d5618 | ||
|
|
484ef4bc31 | ||
|
|
cb491630ae | ||
|
|
0a8d24a225 | ||
|
|
3ace55dfe5 | ||
|
|
102d2507cb | ||
|
|
0d23688aa0 | ||
|
|
80af497f95 | ||
|
|
990470a765 | ||
|
|
f5095b273d | ||
|
|
e3f3af52e1 | ||
|
|
2f489895ef | ||
|
|
7f7eaf173b | ||
|
|
6004dde54a | ||
|
|
c23b87965f | ||
|
|
deb9b0e5ef | ||
|
|
0ddfaf8d74 | ||
|
|
fb7160cba5 | ||
|
|
e6599cd10e | ||
|
|
c02d7fab50 | ||
|
|
fbc86f43ec | ||
|
|
4c93217aac | ||
|
|
fe1b72655b | ||
|
|
293de159db | ||
|
|
7cfc52da8a | ||
|
|
a0841ee7a7 | ||
|
|
5160a55dcf | ||
|
|
1f2bca599f | ||
|
|
8a56c88773 | ||
|
|
57f01c70ec | ||
|
|
2487d468ab | ||
|
|
f613d8ad86 | ||
|
|
a3f8a10eb9 | ||
|
|
eda7a8d7ea | ||
|
|
f7dc5588ae | ||
|
|
c13994994b | ||
|
|
949365c636 | ||
|
|
a896348743 | ||
|
|
5b9c82a434 | ||
|
|
50477071b8 | ||
|
|
e65f2a5513 | ||
|
|
e56f90d83c | ||
|
|
aaded58131 | ||
|
|
9bf0265cea | ||
|
|
e01c1398d5 | ||
|
|
42d6c3a956 | ||
|
|
eec44a6b02 | ||
|
|
d1e1887e36 | ||
|
|
5f56c7a261 | ||
|
|
d46620ea2a | ||
|
|
408f9d6695 | ||
|
|
b810f14428 | ||
|
|
cec9cba40e | ||
|
|
8ebeeb497f | ||
|
|
9c80ff4f65 | ||
|
|
81f0aa58b8 | ||
|
|
63cef4daff | ||
|
|
db4f138a78 | ||
|
|
b5b60af16f | ||
|
|
b83fec6fd2 | ||
|
|
ff30f572d7 | ||
|
|
95195c07fc | ||
|
|
16f673d956 | ||
|
|
5a28725def | ||
|
|
ba24f75893 | ||
|
|
70ed20f691 | ||
|
|
d12ff503c2 | ||
|
|
dc258cf043 | ||
|
|
8e43a6e571 | ||
|
|
e1e8a20e11 | ||
|
|
f0e44827a5 | ||
|
|
814e16ba95 | ||
|
|
7ca06df66f | ||
|
|
6f15acd2f9 |
546
.github/.gitleaks.toml
vendored
Normal file
546
.github/.gitleaks.toml
vendored
Normal file
@@ -0,0 +1,546 @@
|
||||
title = "gitleaks config"
|
||||
|
||||
# Gitleaks rules are defined by regular expressions and entropy ranges.
|
||||
# Some secrets have unique signatures which make detecting those secrets easy.
|
||||
# Examples of those secrets would be GitLab Personal Access Tokens, AWS keys, and GitHub Access Tokens.
|
||||
# All these examples have defined prefixes like `glpat`, `AKIA`, `ghp_`, etc.
|
||||
#
|
||||
# Other secrets might just be a hash which means we need to write more complex rules to verify
|
||||
# that what we are matching is a secret.
|
||||
#
|
||||
# Here is an example of a semi-generic secret
|
||||
#
|
||||
# discord_client_secret = "8dyfuiRyq=vVc3RRr_edRk-fK__JItpZ"
|
||||
#
|
||||
# We can write a regular expression to capture the variable name (identifier),
|
||||
# the assignment symbol (like '=' or ':='), and finally the actual secret.
|
||||
# The structure of a rule to match this example secret is below:
|
||||
#
|
||||
# Beginning string
|
||||
# quotation
|
||||
# │ End string quotation
|
||||
# │ │
|
||||
# ▼ ▼
|
||||
# (?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]
|
||||
#
|
||||
# ▲ ▲ ▲
|
||||
# │ │ │
|
||||
# │ │ │
|
||||
# identifier assignment symbol
|
||||
# Secret
|
||||
#
|
||||
[[rules]]
|
||||
id = "gitlab-pat"
|
||||
description = "GitLab Personal Access Token"
|
||||
regex = '''glpat-[0-9a-zA-Z\-\_]{20}'''
|
||||
|
||||
[[rules]]
|
||||
id = "aws-access-token"
|
||||
description = "AWS"
|
||||
regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}'''
|
||||
|
||||
# Cryptographic keys
|
||||
[[rules]]
|
||||
id = "PKCS8-PK"
|
||||
description = "PKCS8 private key"
|
||||
regex = '''-----BEGIN PRIVATE KEY-----'''
|
||||
|
||||
[[rules]]
|
||||
id = "RSA-PK"
|
||||
description = "RSA private key"
|
||||
regex = '''-----BEGIN RSA PRIVATE KEY-----'''
|
||||
|
||||
[[rules]]
|
||||
id = "OPENSSH-PK"
|
||||
description = "SSH private key"
|
||||
regex = '''-----BEGIN OPENSSH PRIVATE KEY-----'''
|
||||
|
||||
[[rules]]
|
||||
id = "PGP-PK"
|
||||
description = "PGP private key"
|
||||
regex = '''-----BEGIN PGP PRIVATE KEY BLOCK-----'''
|
||||
|
||||
[[rules]]
|
||||
id = "github-pat"
|
||||
description = "GitHub Personal Access Token"
|
||||
regex = '''ghp_[0-9a-zA-Z]{36}'''
|
||||
|
||||
[[rules]]
|
||||
id = "github-oauth"
|
||||
description = "GitHub OAuth Access Token"
|
||||
regex = '''gho_[0-9a-zA-Z]{36}'''
|
||||
|
||||
[[rules]]
|
||||
id = "SSH-DSA-PK"
|
||||
description = "SSH (DSA) private key"
|
||||
regex = '''-----BEGIN DSA PRIVATE KEY-----'''
|
||||
|
||||
[[rules]]
|
||||
id = "SSH-EC-PK"
|
||||
description = "SSH (EC) private key"
|
||||
regex = '''-----BEGIN EC PRIVATE KEY-----'''
|
||||
|
||||
|
||||
[[rules]]
|
||||
id = "github-app-token"
|
||||
description = "GitHub App Token"
|
||||
regex = '''(ghu|ghs)_[0-9a-zA-Z]{36}'''
|
||||
|
||||
[[rules]]
|
||||
id = "github-refresh-token"
|
||||
description = "GitHub Refresh Token"
|
||||
regex = '''ghr_[0-9a-zA-Z]{76}'''
|
||||
|
||||
[[rules]]
|
||||
id = "shopify-shared-secret"
|
||||
description = "Shopify shared secret"
|
||||
regex = '''shpss_[a-fA-F0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "shopify-access-token"
|
||||
description = "Shopify access token"
|
||||
regex = '''shpat_[a-fA-F0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "shopify-custom-access-token"
|
||||
description = "Shopify custom app access token"
|
||||
regex = '''shpca_[a-fA-F0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "shopify-private-app-access-token"
|
||||
description = "Shopify private app access token"
|
||||
regex = '''shppa_[a-fA-F0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "slack-access-token"
|
||||
description = "Slack token"
|
||||
regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?'''
|
||||
|
||||
[[rules]]
|
||||
id = "stripe-access-token"
|
||||
description = "Stripe"
|
||||
regex = '''(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "pypi-upload-token"
|
||||
description = "PyPI upload token"
|
||||
regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}'''
|
||||
|
||||
[[rules]]
|
||||
id = "gcp-service-account"
|
||||
description = "Google (GCP) Service-account"
|
||||
regex = '''\"type\": \"service_account\"'''
|
||||
|
||||
[[rules]]
|
||||
id = "heroku-api-key"
|
||||
description = "Heroku API Key"
|
||||
regex = ''' (?i)(heroku[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "slack-web-hook"
|
||||
description = "Slack Webhook"
|
||||
regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8,12}/[a-zA-Z0-9_]{24}'''
|
||||
|
||||
[[rules]]
|
||||
id = "twilio-api-key"
|
||||
description = "Twilio API Key"
|
||||
regex = '''SK[0-9a-fA-F]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "age-secret-key"
|
||||
description = "Age secret key"
|
||||
regex = '''AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}'''
|
||||
|
||||
[[rules]]
|
||||
id = "facebook-token"
|
||||
description = "Facebook token"
|
||||
regex = '''(?i)(facebook[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "twitter-token"
|
||||
description = "Twitter token"
|
||||
regex = '''(?i)(twitter[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{35,44})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "adobe-client-id"
|
||||
description = "Adobe Client ID (Oauth Web)"
|
||||
regex = '''(?i)(adobe[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "adobe-client-secret"
|
||||
description = "Adobe Client Secret"
|
||||
regex = '''(p8e-)(?i)[a-z0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "alibaba-access-key-id"
|
||||
description = "Alibaba AccessKey ID"
|
||||
regex = '''(LTAI)(?i)[a-z0-9]{20}'''
|
||||
|
||||
[[rules]]
|
||||
id = "alibaba-secret-key"
|
||||
description = "Alibaba Secret Key"
|
||||
regex = '''(?i)(alibaba[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "asana-client-id"
|
||||
description = "Asana Client ID"
|
||||
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{16})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "asana-client-secret"
|
||||
description = "Asana Client Secret"
|
||||
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "atlassian-api-token"
|
||||
description = "Atlassian API token"
|
||||
regex = '''(?i)(atlassian[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{24})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "bitbucket-client-id"
|
||||
description = "Bitbucket client ID"
|
||||
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "bitbucket-client-secret"
|
||||
description = "Bitbucket client secret"
|
||||
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9_\-]{64})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "beamer-api-token"
|
||||
description = "Beamer API token"
|
||||
regex = '''(?i)(beamer[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](b_[a-z0-9=_\-]{44})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "clojars-api-token"
|
||||
description = "Clojars API token"
|
||||
regex = '''(CLOJARS_)(?i)[a-z0-9]{60}'''
|
||||
|
||||
[[rules]]
|
||||
id = "contentful-delivery-api-token"
|
||||
description = "Contentful delivery API token"
|
||||
regex = '''(?i)(contentful[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{43})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "databricks-api-token"
|
||||
description = "Databricks API token"
|
||||
regex = '''dapi[a-h0-9]{32}'''
|
||||
|
||||
[[rules]]
|
||||
id = "discord-api-token"
|
||||
description = "Discord API key"
|
||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "discord-client-id"
|
||||
description = "Discord client ID"
|
||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{18})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "discord-client-secret"
|
||||
description = "Discord client secret"
|
||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "doppler-api-token"
|
||||
description = "Doppler API token"
|
||||
regex = '''['\"](dp\.pt\.)(?i)[a-z0-9]{43}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "dropbox-api-secret"
|
||||
description = "Dropbox API secret/key"
|
||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "dropbox--api-key"
|
||||
description = "Dropbox API secret/key"
|
||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "dropbox-short-lived-api-token"
|
||||
description = "Dropbox short lived API token"
|
||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](sl\.[a-z0-9\-=_]{135})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "dropbox-long-lived-api-token"
|
||||
description = "Dropbox long lived API token"
|
||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"][a-z0-9]{11}(AAAAAAAAAA)[a-z0-9\-_=]{43}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "duffel-api-token"
|
||||
description = "Duffel API token"
|
||||
regex = '''['\"]duffel_(test|live)_(?i)[a-z0-9_-]{43}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "dynatrace-api-token"
|
||||
description = "Dynatrace API token"
|
||||
regex = '''['\"]dt0c01\.(?i)[a-z0-9]{24}\.[a-z0-9]{64}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "easypost-api-token"
|
||||
description = "EasyPost API token"
|
||||
regex = '''['\"]EZAK(?i)[a-z0-9]{54}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "easypost-test-api-token"
|
||||
description = "EasyPost test API token"
|
||||
regex = '''['\"]EZTK(?i)[a-z0-9]{54}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "fastly-api-token"
|
||||
description = "Fastly API token"
|
||||
regex = '''(?i)(fastly[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "finicity-client-secret"
|
||||
description = "Finicity client secret"
|
||||
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{20})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "finicity-api-token"
|
||||
description = "Finicity API token"
|
||||
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "flutterwave-public-key"
|
||||
description = "Flutterwave public key"
|
||||
regex = '''FLWPUBK_TEST-(?i)[a-h0-9]{32}-X'''
|
||||
|
||||
[[rules]]
|
||||
id = "flutterwave-secret-key"
|
||||
description = "Flutterwave secret key"
|
||||
regex = '''FLWSECK_TEST-(?i)[a-h0-9]{32}-X'''
|
||||
|
||||
[[rules]]
|
||||
id = "flutterwave-enc-key"
|
||||
description = "Flutterwave encrypted key"
|
||||
regex = '''FLWSECK_TEST[a-h0-9]{12}'''
|
||||
|
||||
[[rules]]
|
||||
id = "frameio-api-token"
|
||||
description = "Frame.io API token"
|
||||
regex = '''fio-u-(?i)[a-z0-9\-_=]{64}'''
|
||||
|
||||
[[rules]]
|
||||
id = "gocardless-api-token"
|
||||
description = "GoCardless API token"
|
||||
regex = '''['\"]live_(?i)[a-z0-9\-_=]{40}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "grafana-api-token"
|
||||
description = "Grafana API token"
|
||||
regex = '''['\"]eyJrIjoi(?i)[a-z0-9\-_=]{72,92}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "hashicorp-tf-api-token"
|
||||
description = "HashiCorp Terraform user/org API token"
|
||||
regex = '''['\"](?i)[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "hubspot-api-token"
|
||||
description = "HubSpot API token"
|
||||
regex = '''(?i)(hubspot[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "intercom-api-token"
|
||||
description = "Intercom API token"
|
||||
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_]{60})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "intercom-client-secret"
|
||||
description = "Intercom client secret/ID"
|
||||
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "ionic-api-token"
|
||||
description = "Ionic API token"
|
||||
regex = '''(?i)(ionic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](ion_[a-z0-9]{42})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "linear-api-token"
|
||||
description = "Linear API token"
|
||||
regex = '''lin_api_(?i)[a-z0-9]{40}'''
|
||||
|
||||
[[rules]]
|
||||
id = "linear-client-secret"
|
||||
description = "Linear client secret/ID"
|
||||
regex = '''(?i)(linear[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "lob-api-key"
|
||||
description = "Lob API Key"
|
||||
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((live|test)_[a-f0-9]{35})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "lob-pub-api-key"
|
||||
description = "Lob Publishable API Key"
|
||||
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((test|live)_pub_[a-f0-9]{31})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "mailchimp-api-key"
|
||||
description = "Mailchimp API key"
|
||||
regex = '''(?i)(mailchimp[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32}-us20)['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "mailgun-private-api-token"
|
||||
description = "Mailgun private API token"
|
||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](key-[a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "mailgun-pub-key"
|
||||
description = "Mailgun public validation key"
|
||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](pubkey-[a-f0-9]{32})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "mailgun-signing-key"
|
||||
description = "Mailgun webhook signing key"
|
||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{32}-[a-h0-9]{8}-[a-h0-9]{8})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "mapbox-api-token"
|
||||
description = "Mapbox API token"
|
||||
regex = '''(?i)(pk\.[a-z0-9]{60}\.[a-z0-9]{22})'''
|
||||
|
||||
[[rules]]
|
||||
id = "messagebird-api-token"
|
||||
description = "MessageBird API token"
|
||||
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{25})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "messagebird-client-id"
|
||||
description = "MessageBird API client ID"
|
||||
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "new-relic-user-api-key"
|
||||
description = "New Relic user API Key"
|
||||
regex = '''['\"](NRAK-[A-Z0-9]{27})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "new-relic-user-api-id"
|
||||
description = "New Relic user API ID"
|
||||
regex = '''(?i)(newrelic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([A-Z0-9]{64})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "new-relic-browser-api-token"
|
||||
description = "New Relic ingest browser API token"
|
||||
regex = '''['\"](NRJS-[a-f0-9]{19})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "npm-access-token"
|
||||
description = "npm access token"
|
||||
regex = '''['\"](npm_(?i)[a-z0-9]{36})['\"]'''
|
||||
|
||||
[[rules]]
|
||||
id = "planetscale-password"
|
||||
description = "PlanetScale password"
|
||||
regex = '''pscale_pw_(?i)[a-z0-9\-_\.]{43}'''
|
||||
|
||||
[[rules]]
|
||||
id = "planetscale-api-token"
|
||||
description = "PlanetScale API token"
|
||||
regex = '''pscale_tkn_(?i)[a-z0-9\-_\.]{43}'''
|
||||
|
||||
[[rules]]
|
||||
id = "postman-api-token"
|
||||
description = "Postman API token"
|
||||
regex = '''PMAK-(?i)[a-f0-9]{24}\-[a-f0-9]{34}'''
|
||||
|
||||
[[rules]]
|
||||
id = "pulumi-api-token"
|
||||
description = "Pulumi API token"
|
||||
regex = '''pul-[a-f0-9]{40}'''
|
||||
|
||||
[[rules]]
|
||||
id = "rubygems-api-token"
|
||||
description = "Rubygem API token"
|
||||
regex = '''rubygems_[a-f0-9]{48}'''
|
||||
|
||||
[[rules]]
|
||||
id = "sendgrid-api-token"
|
||||
description = "SendGrid API token"
|
||||
regex = '''SG\.(?i)[a-z0-9_\-\.]{66}'''
|
||||
|
||||
[[rules]]
|
||||
id = "sendinblue-api-token"
|
||||
description = "Sendinblue API token"
|
||||
regex = '''xkeysib-[a-f0-9]{64}\-(?i)[a-z0-9]{16}'''
|
||||
|
||||
[[rules]]
|
||||
id = "shippo-api-token"
|
||||
description = "Shippo API token"
|
||||
regex = '''shippo_(live|test)_[a-f0-9]{40}'''
|
||||
|
||||
[[rules]]
|
||||
id = "linkedin-client-secret"
|
||||
description = "LinkedIn Client secret"
|
||||
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z]{16})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "linkedin-client-id"
|
||||
description = "LinkedIn Client ID"
|
||||
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{14})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "twitch-api-token"
|
||||
description = "Twitch API token"
|
||||
regex = '''(?i)(twitch[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "typeform-api-token"
|
||||
description = "Typeform API token"
|
||||
regex = '''(?i)(typeform[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}(tfp_[a-z0-9\-_\.=]{59})'''
|
||||
secretGroup = 3
|
||||
|
||||
[[rules]]
|
||||
id = "generic-api-key"
|
||||
description = "Generic API Key"
|
||||
regex = '''(?i)((key|api[^Version]|token|secret|password)[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9a-zA-Z\-_=]{8,64})['\"]'''
|
||||
entropy = 3.7
|
||||
secretGroup = 4
|
||||
|
||||
|
||||
[allowlist]
|
||||
description = "global allow lists"
|
||||
regexes = ['''219-09-9999''', '''078-05-1120''', '''(9[0-9]{2}|666)-\d{2}-\d{4}''']
|
||||
paths = [
|
||||
'''gitleaks.toml''',
|
||||
'''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
|
||||
'''(go.mod|go.sum)$''',
|
||||
|
||||
'''salt/nginx/files/enterprise-attack.json'''
|
||||
]
|
||||
2
.github/workflows/leaktest.yml
vendored
2
.github/workflows/leaktest.yml
vendored
@@ -13,3 +13,5 @@ jobs:
|
||||
|
||||
- name: Gitleaks
|
||||
uses: zricethezav/gitleaks-action@master
|
||||
with:
|
||||
config-path: .github/.gitleaks.toml
|
||||
|
||||
31
.github/workflows/pythontest.yml
vendored
Normal file
31
.github/workflows/pythontest.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: python-test
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.10"]
|
||||
python-code-path: ["salt/sensoroni/files/analyzers"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
python -m pip install flake8 pytest pytest-cov
|
||||
find . -name requirements.txt -exec pip install -r {} \;
|
||||
- name: Lint with flake8
|
||||
run: |
|
||||
flake8 ${{ matrix.python-code-path }} --show-source --max-complexity=12 --doctests --max-line-length=200 --statistics
|
||||
- name: Test with pytest
|
||||
run: |
|
||||
pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=100 --cov-config=${{ matrix.python-code-path }}/pytest.ini
|
||||
13
.gitignore
vendored
13
.gitignore
vendored
@@ -56,4 +56,15 @@ $RECYCLE.BIN/
|
||||
# Windows shortcuts
|
||||
*.lnk
|
||||
|
||||
# End of https://www.gitignore.io/api/macos,windows
|
||||
# End of https://www.gitignore.io/api/macos,windows
|
||||
|
||||
# Pytest output
|
||||
__pycache__
|
||||
.pytest_cache
|
||||
.coverage
|
||||
*.pyc
|
||||
.venv
|
||||
|
||||
# Analyzer dev/test config files
|
||||
*_dev.yaml
|
||||
site-packages
|
||||
14
README.md
14
README.md
@@ -1,14 +1,20 @@
|
||||
## Security Onion 2.3.110
|
||||
## Security Onion 2.3.130
|
||||
|
||||
Security Onion 2.3.110 is here!
|
||||
Security Onion 2.3.130 is here!
|
||||
|
||||
## Screenshots
|
||||
|
||||
Alerts
|
||||

|
||||

|
||||
|
||||
Dashboards
|
||||

|
||||
|
||||
Hunt
|
||||

|
||||

|
||||
|
||||
Cases
|
||||

|
||||
|
||||
### Release Notes
|
||||
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
### 2.3.110-20220407 ISO image built on 2022/04/07
|
||||
### 2.3.130-20220607 ISO image built on 2022/06/07
|
||||
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.3.110-20220407 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220407.iso
|
||||
2.3.130-20220607 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso
|
||||
|
||||
MD5: 928D589709731EFE9942CA134A6F4C6B
|
||||
SHA1: CA588A684586CC0D5BDE5E0E41C935FFB939B6C7
|
||||
SHA256: CBF8743838AF2C7323E629FB6B28D5DD00AE6658B0E29E4D0916411D2D526BD2
|
||||
MD5: 0034D6A9461C04357AFF512875408A4C
|
||||
SHA1: BF80EEB101C583153CAD8E185A7DB3173FD5FFE8
|
||||
SHA256: 15943623B96D8BB4A204A78668447F36B54A63ABA5F8467FBDF0B25C5E4E6078
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220407.iso.sig
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS
|
||||
@@ -26,22 +26,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/ma
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.110-20220407.iso.sig
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.130-20220607.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.110-20220407.iso
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.130-20220607.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.3.110-20220407.iso.sig securityonion-2.3.110-20220407.iso
|
||||
gpg --verify securityonion-2.3.130-20220607.iso.sig securityonion-2.3.130-20220607.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Thu 07 Apr 2022 03:30:03 PM EDT using RSA key ID FE507013
|
||||
gpg: Signature made Tue 07 Jun 2022 01:27:20 PM EDT using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 245 KiB |
BIN
assets/images/screenshots/alerts.png
Normal file
BIN
assets/images/screenshots/alerts.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 186 KiB |
BIN
assets/images/screenshots/cases-comments.png
Normal file
BIN
assets/images/screenshots/cases-comments.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 201 KiB |
BIN
assets/images/screenshots/dashboards.png
Normal file
BIN
assets/images/screenshots/dashboards.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 386 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 168 KiB |
BIN
assets/images/screenshots/hunt.png
Normal file
BIN
assets/images/screenshots/hunt.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 191 KiB |
2
pillar/elasticsearch/index_templates.sls
Normal file
2
pillar/elasticsearch/index_templates.sls
Normal file
@@ -0,0 +1,2 @@
|
||||
elasticsearch:
|
||||
index_settings:
|
||||
@@ -2,7 +2,7 @@
|
||||
{% set cached_grains = salt.saltutil.runner('cache.grains', tgt='*') %}
|
||||
{% for minionid, ip in salt.saltutil.runner(
|
||||
'mine.get',
|
||||
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix ',
|
||||
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-node or G@role:so-heavynode or G@role:so-receiver or G@role:so-helix',
|
||||
fun='network.ip_addrs',
|
||||
tgt_type='compound') | dictsort()
|
||||
%}
|
||||
|
||||
@@ -13,4 +13,5 @@ logstash:
|
||||
- so/9600_output_ossec.conf.jinja
|
||||
- so/9700_output_strelka.conf.jinja
|
||||
- so/9800_output_logscan.conf.jinja
|
||||
- so/9801_output_rita.conf.jinja
|
||||
- so/9900_output_endgame.conf.jinja
|
||||
|
||||
@@ -15,12 +15,12 @@ base:
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- elasticsearch.search
|
||||
- elasticsearch.index_templates
|
||||
|
||||
'*_manager':
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- elasticsearch.manager
|
||||
- elasticsearch.index_templates
|
||||
|
||||
'*_manager or *_managersearch':
|
||||
- match: compound
|
||||
@@ -46,7 +46,7 @@ base:
|
||||
- zeeklogs
|
||||
- secrets
|
||||
- healthcheck.eval
|
||||
- elasticsearch.eval
|
||||
- elasticsearch.index_templates
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
{% endif %}
|
||||
@@ -60,7 +60,7 @@ base:
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- elasticsearch.search
|
||||
- elasticsearch.index_templates
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
{% endif %}
|
||||
@@ -106,7 +106,7 @@ base:
|
||||
'*_searchnode':
|
||||
- logstash
|
||||
- logstash.search
|
||||
- elasticsearch.search
|
||||
- elasticsearch.index_templates
|
||||
- elasticsearch.auth
|
||||
- global
|
||||
- minions.{{ grains.id }}
|
||||
@@ -122,7 +122,7 @@ base:
|
||||
'*_import':
|
||||
- zeeklogs
|
||||
- secrets
|
||||
- elasticsearch.eval
|
||||
- elasticsearch.index_templates
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
{% endif %}
|
||||
@@ -131,3 +131,6 @@ base:
|
||||
{% endif %}
|
||||
- global
|
||||
- minions.{{ grains.id }}
|
||||
|
||||
'*_workstation':
|
||||
- minions.{{ grains.id }}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
|
||||
{% set WAZUH = salt['pillar.get']('global:wazuh', '0') %}
|
||||
{% set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||
{% set FREQSERVER = salt['pillar.get']('manager:freq', '0') %}
|
||||
{% set DOMAINSTATS = salt['pillar.get']('manager:domainstats', '0') %}
|
||||
@@ -218,6 +217,8 @@
|
||||
'schedule',
|
||||
'docker_clean'
|
||||
],
|
||||
'so-workstation': [
|
||||
],
|
||||
}, grain='role') %}
|
||||
|
||||
{% if FILEBEAT and grains.role in ['so-helixsensor', 'so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-import', 'so-receiver'] %}
|
||||
@@ -273,10 +274,6 @@
|
||||
{% do allowed_states.append('elastalert') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (THEHIVE != 0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('thehive') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (PLAYBOOK !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('playbook') %}
|
||||
{% endif %}
|
||||
|
||||
@@ -300,8 +300,17 @@ sostatus_log:
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
|
||||
{% if role in ['eval', 'manager', 'managersearch', 'standalone'] %}
|
||||
# Install cron job to determine size of influxdb for telegraf
|
||||
'du -s -k /nsm/influxdb | cut -f1 > /opt/so/log/telegraf/influxdb_size.log 2>&1':
|
||||
cron.present:
|
||||
- user: root
|
||||
- minute: '*/1'
|
||||
- hour: '*'
|
||||
- daymonth: '*'
|
||||
- month: '*'
|
||||
- dayweek: '*'
|
||||
|
||||
# Lock permissions on the backup directory
|
||||
backupdir:
|
||||
file.directory:
|
||||
|
||||
@@ -15,295 +15,86 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo "This script must be run using sudo!"
|
||||
exit 1
|
||||
fi
|
||||
doc_workstation_url="https://docs.securityonion.net/en/2.3/analyst-vm.html"
|
||||
{# we only want the script to install the workstation if it is CentOS -#}
|
||||
{% if grains.os == 'CentOS' -%}
|
||||
{# if this is a manager -#}
|
||||
{% if grains.master == grains.id.split('_')|first -%}
|
||||
|
||||
INSTALL_LOG=/root/so-analyst-install.log
|
||||
exec &> >(tee -a "$INSTALL_LOG")
|
||||
source /usr/sbin/so-common
|
||||
pillar_file="/opt/so/saltstack/local/pillar/minions/{{grains.id}}.sls"
|
||||
|
||||
log() {
|
||||
msg=$1
|
||||
level=${2:-I}
|
||||
now=$(TZ=GMT date +"%Y-%m-%dT%H:%M:%SZ")
|
||||
echo -e "$now | $level | $msg" >> "$INSTALL_LOG" 2>&1
|
||||
}
|
||||
if [ -f "$pillar_file" ]; then
|
||||
if ! grep -q "^workstation:$" "$pillar_file"; then
|
||||
|
||||
error() {
|
||||
log "$1" "E"
|
||||
}
|
||||
|
||||
info() {
|
||||
log "$1" "I"
|
||||
}
|
||||
|
||||
title() {
|
||||
echo -e "\n-----------------------------\n $1\n-----------------------------\n" >> "$INSTALL_LOG" 2>&1
|
||||
}
|
||||
|
||||
logCmd() {
|
||||
cmd=$1
|
||||
info "Executing command: $cmd"
|
||||
$cmd >> "$INSTALL_LOG" 2>&1
|
||||
}
|
||||
|
||||
analyze_system() {
|
||||
title "System Characteristics"
|
||||
logCmd "uptime"
|
||||
logCmd "uname -a"
|
||||
logCmd "free -h"
|
||||
logCmd "lscpu"
|
||||
logCmd "df -h"
|
||||
logCmd "ip a"
|
||||
}
|
||||
|
||||
analyze_system
|
||||
|
||||
OS=$(grep PRETTY_NAME /etc/os-release | grep 'CentOS Linux 7')
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "This is an unsupported OS. Please use CentOS 7 to install the analyst node."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "$manufacturer" == "Security Onion Solutions" && "$family" == "Automated" ]]; then
|
||||
INSTALL=yes
|
||||
CURLCONTINUE=no
|
||||
else
|
||||
INSTALL=''
|
||||
CURLCONTINUE=''
|
||||
fi
|
||||
|
||||
FIRSTPASS=yes
|
||||
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
|
||||
if [[ "$FIRSTPASS" == "yes" ]]; then
|
||||
clear
|
||||
echo "###########################################"
|
||||
echo "## ** W A R N I N G ** ##"
|
||||
echo "## _______________________________ ##"
|
||||
echo "## ##"
|
||||
echo "## Installing the Security Onion ##"
|
||||
echo "## analyst node on this device will ##"
|
||||
echo "## make permanent changes to ##"
|
||||
echo "## the system. ##"
|
||||
echo "## ##"
|
||||
echo "###########################################"
|
||||
echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)"
|
||||
FIRSTPASS=no
|
||||
else
|
||||
echo "Please type 'yes' to continue or 'no' to exit."
|
||||
fi
|
||||
read INSTALL
|
||||
done
|
||||
|
||||
if [[ $INSTALL == "no" ]]; then
|
||||
echo "Exiting analyst node installation."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Testing for internet connection with curl https://securityonionsolutions.com/"
|
||||
CANCURL=$(curl -sI https://securityonionsolutions.com/ | grep "200 OK")
|
||||
if [ $? -ne 0 ]; then
|
||||
FIRSTPASS=yes
|
||||
while [[ $CURLCONTINUE != "yes" ]] && [[ $CURLCONTINUE != "no" ]]; do
|
||||
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
|
||||
if [[ "$FIRSTPASS" == "yes" ]]; then
|
||||
echo "We could not access https://securityonionsolutions.com/."
|
||||
echo "Since packages are downloaded from the internet, internet access is required."
|
||||
echo "If you would like to ignore this warning and continue anyway, please type 'yes'."
|
||||
echo "Otherwise, type 'no' to exit."
|
||||
echo "###########################################"
|
||||
echo "## ** W A R N I N G ** ##"
|
||||
echo "## _______________________________ ##"
|
||||
echo "## ##"
|
||||
echo "## Installing the Security Onion ##"
|
||||
echo "## analyst node on this device will ##"
|
||||
echo "## make permanent changes to ##"
|
||||
echo "## the system. ##"
|
||||
echo "## A system reboot will be required ##"
|
||||
echo "## to complete the install. ##"
|
||||
echo "## ##"
|
||||
echo "###########################################"
|
||||
echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)"
|
||||
FIRSTPASS=no
|
||||
else
|
||||
echo "Please type 'yes' to continue or 'no' to exit."
|
||||
fi
|
||||
read CURLCONTINUE
|
||||
fi
|
||||
read INSTALL
|
||||
done
|
||||
if [[ "$CURLCONTINUE" == "no" ]]; then
|
||||
|
||||
if [[ $INSTALL == "no" ]]; then
|
||||
echo "Exiting analyst node installation."
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
echo "We were able to curl https://securityonionsolutions.com/."
|
||||
sleep 3
|
||||
|
||||
# Add workstation pillar to the minion's pillar file
|
||||
printf '%s\n'\
|
||||
"workstation:"\
|
||||
" gui:"\
|
||||
" enabled: true"\
|
||||
"" >> "$pillar_file"
|
||||
echo "Applying the workstation state. This could take some time since there are many packages that need to be installed."
|
||||
if salt-call state.apply workstation -linfo queue=True; then # make sure the state ran successfully
|
||||
echo ""
|
||||
echo "Analyst workstation has been installed!"
|
||||
echo "Press ENTER to reboot or Ctrl-C to cancel."
|
||||
read pause
|
||||
|
||||
reboot;
|
||||
else
|
||||
echo "There was an issue applying the workstation state. Please review the log above or at /opt/so/logs/salt/minion."
|
||||
fi
|
||||
else # workstation is already added
|
||||
echo "The workstation pillar already exists in $pillar_file."
|
||||
echo "To enable/disable the gui, set 'workstation:gui:enabled' to true or false in $pillar_file."
|
||||
echo "Additional documentation can be found at $doc_workstation_url."
|
||||
fi
|
||||
|
||||
# Install a GUI text editor
|
||||
yum -y install gedit
|
||||
|
||||
# Install misc utils
|
||||
yum -y install wget curl unzip epel-release yum-plugin-versionlock;
|
||||
|
||||
# Install xWindows
|
||||
yum -y groupinstall "X Window System";
|
||||
yum -y install gnome-classic-session gnome-terminal nautilus-open-terminal control-center liberation-mono-fonts;
|
||||
unlink /etc/systemd/system/default.target;
|
||||
ln -sf /lib/systemd/system/graphical.target /etc/systemd/system/default.target;
|
||||
yum -y install file-roller
|
||||
|
||||
# Install Mono - prereq for NetworkMiner
|
||||
yum -y install mono-core mono-basic mono-winforms expect
|
||||
|
||||
# Install NetworkMiner
|
||||
yum -y install libcanberra-gtk2;
|
||||
wget https://www.netresec.com/?download=NetworkMiner -O /tmp/nm.zip;
|
||||
mkdir -p /opt/networkminer/
|
||||
unzip /tmp/nm.zip -d /opt/networkminer/;
|
||||
rm /tmp/nm.zip;
|
||||
mv /opt/networkminer/NetworkMiner_*/* /opt/networkminer/
|
||||
chmod +x /opt/networkminer/NetworkMiner.exe;
|
||||
chmod -R go+w /opt/networkminer/AssembledFiles/;
|
||||
chmod -R go+w /opt/networkminer/Captures/;
|
||||
# Create networkminer shim
|
||||
cat << EOF >> /bin/networkminer
|
||||
#!/bin/bash
|
||||
/bin/mono /opt/networkminer/NetworkMiner.exe --noupdatecheck "\$@"
|
||||
EOF
|
||||
chmod +x /bin/networkminer
|
||||
# Convert networkminer ico file to png format
|
||||
yum -y install ImageMagick
|
||||
convert /opt/networkminer/networkminericon.ico /opt/networkminer/networkminericon.png
|
||||
# Create menu entry
|
||||
cat << EOF >> /usr/share/applications/networkminer.desktop
|
||||
[Desktop Entry]
|
||||
Name=NetworkMiner
|
||||
Comment=NetworkMiner
|
||||
Encoding=UTF-8
|
||||
Exec=/bin/networkminer %f
|
||||
Icon=/opt/networkminer/networkminericon-4.png
|
||||
StartupNotify=true
|
||||
Terminal=false
|
||||
X-MultipleArgs=false
|
||||
Type=Application
|
||||
MimeType=application/x-pcap;
|
||||
Categories=Network;
|
||||
EOF
|
||||
|
||||
# Set default monospace font to Liberation
|
||||
cat << EOF >> /etc/fonts/local.conf
|
||||
<match target="pattern">
|
||||
<test name="family" qual="any">
|
||||
<string>monospace</string>
|
||||
</test>
|
||||
<edit binding="strong" mode="prepend" name="family">
|
||||
<string>Liberation Mono</string>
|
||||
</edit>
|
||||
</match>
|
||||
EOF
|
||||
|
||||
# Install Wireshark for Gnome
|
||||
yum -y install wireshark-gnome;
|
||||
|
||||
# Install dnsiff
|
||||
yum -y install dsniff;
|
||||
|
||||
# Install hping3
|
||||
yum -y install hping3;
|
||||
|
||||
# Install netsed
|
||||
yum -y install netsed;
|
||||
|
||||
# Install ngrep
|
||||
yum -y install ngrep;
|
||||
|
||||
# Install scapy
|
||||
yum -y install python36-scapy;
|
||||
|
||||
# Install ssldump
|
||||
yum -y install ssldump;
|
||||
|
||||
# Install tcpdump
|
||||
yum -y install tcpdump;
|
||||
|
||||
# Install tcpflow
|
||||
yum -y install tcpflow;
|
||||
|
||||
# Install tcpxtract
|
||||
yum -y install tcpxtract;
|
||||
|
||||
# Install whois
|
||||
yum -y install whois;
|
||||
|
||||
# Install foremost
|
||||
yum -y install https://forensics.cert.org/centos/cert/7/x86_64//foremost-1.5.7-13.1.el7.x86_64.rpm;
|
||||
|
||||
# Install chromium
|
||||
yum -y install chromium;
|
||||
|
||||
# Install tcpstat
|
||||
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcpstat-1.5.0/securityonion-tcpstat-1.5.0.rpm;
|
||||
|
||||
# Install tcptrace
|
||||
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcptrace-6.6.7/securityonion-tcptrace-6.6.7.rpm;
|
||||
|
||||
# Install sslsplit
|
||||
yum -y install libevent;
|
||||
yum -y install sslsplit;
|
||||
|
||||
# Install Bit-Twist
|
||||
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-bittwist-2.0.0/securityonion-bittwist-2.0.0.rpm;
|
||||
|
||||
# Install chaosreader
|
||||
yum -y install perl-IO-Compress perl-Net-DNS;
|
||||
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-chaosreader-0.95.10/securityonion-chaosreader-0.95.10.rpm;
|
||||
chmod +x /bin/chaosreader;
|
||||
|
||||
if [ -f ../../files/analyst/README ]; then
|
||||
cp ../../files/analyst/README /;
|
||||
cp ../../files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
|
||||
cp ../../files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
|
||||
cp ../../files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
|
||||
else
|
||||
cp /opt/so/saltstack/default/salt/common/files/analyst/README /;
|
||||
cp /opt/so/saltstack/default/salt/common/files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
|
||||
cp /opt/so/saltstack/default/salt/common/files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
|
||||
cp /opt/so/saltstack/default/salt/common/files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
|
||||
else # if the pillar file doesn't exist
|
||||
echo "Could not find $pillar_file and add the workstation pillar."
|
||||
fi
|
||||
|
||||
# Set background wallpaper
|
||||
cat << EOF >> /etc/dconf/db/local.d/00-background
|
||||
# Specify the dconf path
|
||||
[org/gnome/desktop/background]
|
||||
{#- if this is not a manager #}
|
||||
{% else -%}
|
||||
|
||||
# Specify the path to the desktop background image file
|
||||
picture-uri='file:///usr/share/backgrounds/so-wallpaper.jpg'
|
||||
# Specify one of the rendering options for the background image:
|
||||
# 'none', 'wallpaper', 'centered', 'scaled', 'stretched', 'zoom', 'spanned'
|
||||
picture-options='zoom'
|
||||
# Specify the left or top color when drawing gradients or the solid color
|
||||
primary-color='000000'
|
||||
# Specify the right or bottom color when drawing gradients
|
||||
secondary-color='FFFFFF'
|
||||
EOF
|
||||
echo "Since this is not a manager, the pillar values to enable analyst workstation must be set manually. Please view the documentation at $doc_workstation_url."
|
||||
|
||||
# Set lock screen
|
||||
cat << EOF >> /etc/dconf/db/local.d/00-screensaver
|
||||
[org/gnome/desktop/session]
|
||||
idle-delay=uint32 180
|
||||
{#- endif if this is a manager #}
|
||||
{% endif -%}
|
||||
|
||||
[org/gnome/desktop/screensaver]
|
||||
lock-enabled=true
|
||||
lock-delay=uint32 120
|
||||
picture-options='zoom'
|
||||
picture-uri='file:///usr/share/backgrounds/so-lockscreen.jpg'
|
||||
EOF
|
||||
{#- if not CentOS #}
|
||||
{%- else %}
|
||||
|
||||
cat << EOF >> /etc/dconf/db/local.d/locks/screensaver
|
||||
/org/gnome/desktop/session/idle-delay
|
||||
/org/gnome/desktop/screensaver/lock-enabled
|
||||
/org/gnome/desktop/screensaver/lock-delay
|
||||
EOF
|
||||
echo "The Analyst Workstation can only be installed on CentOS. Please view the documentation at $doc_workstation_url."
|
||||
|
||||
# Do not show the user list at login screen
|
||||
cat << EOF >> /etc/dconf/db/local.d/00-login-screen
|
||||
[org/gnome/login-screen]
|
||||
logo='/usr/share/pixmaps/so-login-logo-dark.svg'
|
||||
disable-user-list=true
|
||||
EOF
|
||||
{#- endif grains.os == CentOS #}
|
||||
{% endif -%}
|
||||
|
||||
dconf update;
|
||||
|
||||
echo
|
||||
echo "Analyst workstation has been installed!"
|
||||
echo "Press ENTER to reboot or Ctrl-C to cancel."
|
||||
read pause
|
||||
|
||||
reboot;
|
||||
exit 0
|
||||
|
||||
@@ -120,6 +120,30 @@ check_elastic_license() {
|
||||
fi
|
||||
}
|
||||
|
||||
check_salt_master_status() {
|
||||
local timeout=$1
|
||||
echo "Checking if we can talk to the salt master"
|
||||
salt-call state.show_top concurrent=true
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
check_salt_minion_status() {
|
||||
local timeout=$1
|
||||
echo "Checking if the salt minion will respond to jobs" >> "$setup_log" 2>&1
|
||||
salt "$MINION_ID" test.ping -t $timeout > /dev/null 2>&1
|
||||
local status=$?
|
||||
if [ $status -gt 0 ]; then
|
||||
echo " Minion did not respond" >> "$setup_log" 2>&1
|
||||
else
|
||||
echo " Received job response from salt minion" >> "$setup_log" 2>&1
|
||||
fi
|
||||
|
||||
return $status
|
||||
}
|
||||
|
||||
|
||||
|
||||
copy_new_files() {
|
||||
# Copy new files over to the salt dir
|
||||
cd $UPDATE_DIR
|
||||
@@ -367,6 +391,7 @@ run_check_net_err() {
|
||||
exit $exit_code
|
||||
fi
|
||||
}
|
||||
|
||||
set_cron_service_name() {
|
||||
if [[ "$OS" == "centos" ]]; then
|
||||
cron_service_name="crond"
|
||||
|
||||
@@ -17,5 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop cortex $1
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop cortex $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,38 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <new-user-name>"
|
||||
echo ""
|
||||
echo "Adds a new user to Cortex. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||
CORTEX_ORG_NAME=$(lookup_pillar cortexorgname)
|
||||
CORTEX_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs CORTEX_PASS
|
||||
|
||||
# Create new user in Cortex
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user" -d "{\"name\": \"$CORTEX_USER\",\"roles\": [\"read\",\"analyze\",\"orgadmin\"],\"organization\": \"$CORTEX_ORG_NAME\",\"login\": \"$CORTEX_USER\",\"password\" : \"$CORTEX_PASS\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully added user to Cortex."
|
||||
else
|
||||
echo "Unable to add user to Cortex; user might already exist."
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name> <true|false>"
|
||||
echo ""
|
||||
echo "Enables or disables a user in Cortex."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||
CORTEX_USER=$USER
|
||||
|
||||
case "${2^^}" in
|
||||
FALSE | NO | 0)
|
||||
CORTEX_STATUS=Locked
|
||||
;;
|
||||
TRUE | YES | 1)
|
||||
CORTEX_STATUS=Ok
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
|
||||
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user/${CORTEX_USER}" -d "{\"status\":\"${CORTEX_STATUS}\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully updated user in Cortex."
|
||||
else
|
||||
echo "Failed to update user in Cortex."
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,9 +17,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('manager:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
|
||||
ESPORT=9200
|
||||
THEHIVEESPORT=9400
|
||||
|
||||
echo "Removing read only attributes for indices..."
|
||||
echo
|
||||
{{ ELASTICCURL }} -s -k -XPUT -H "Content-Type: application/json" -L https://$IP:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
|
||||
{{ ELASTICCURL }} -XPUT -H "Content-Type: application/json" -L http://$IP:9400/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
|
||||
|
||||
@@ -75,9 +75,6 @@ container_list() {
|
||||
"so-strelka-manager"
|
||||
"so-suricata"
|
||||
"so-telegraf"
|
||||
"so-thehive"
|
||||
"so-thehive-cortex"
|
||||
"so-thehive-es"
|
||||
"so-wazuh"
|
||||
"so-zeek"
|
||||
)
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
. /usr/sbin/so-common
|
||||
|
||||
# Check to see if we are already running
|
||||
IS_RUNNING=$(ps aux | pgrep -f "so-playbook-sync" | wc -l)
|
||||
[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - Multiple Playbook Sync processes already running...exiting." && exit 0
|
||||
NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-playbook-sync")
|
||||
[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING Playbook sync processes running...exiting." && exit 0
|
||||
|
||||
docker exec so-soctopus python3 playbook_play-sync.py
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Usage: so-restart filebeat | kibana | playbook | thehive
|
||||
# Usage: so-restart filebeat | kibana | playbook
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
@@ -31,7 +31,6 @@ if [ $# -ge 1 ]; then
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
"cortex") docker stop so-thehive-cortex so-thehive && docker rm so-thehive-cortex so-thehive && salt-call state.apply hive queue=True;;
|
||||
"steno") docker stop so-steno && docker rm so-steno && salt-call state.apply pcap queue=True;;
|
||||
*) docker stop so-$1 ; docker rm so-$1 ; salt-call state.apply $1 queue=True;;
|
||||
esac
|
||||
|
||||
@@ -32,11 +32,17 @@ copy_new_files() {
|
||||
# Copy new files over to the salt dir
|
||||
cd /tmp/sogh/securityonion
|
||||
git checkout $BRANCH
|
||||
VERSION=$(cat VERSION)
|
||||
# We need to overwrite if there is a repo file
|
||||
if [ -d /opt/so/repo ]; then
|
||||
tar -czf /opt/so/repo/"$VERSION".tar.gz -C "$(pwd)/.." .
|
||||
fi
|
||||
rsync -a salt $default_salt_dir/
|
||||
rsync -a pillar $default_salt_dir/
|
||||
chown -R socore:socore $default_salt_dir/salt
|
||||
chown -R socore:socore $default_salt_dir/pillar
|
||||
chmod 755 $default_salt_dir/pillar/firewall/addfirewall.sh
|
||||
|
||||
rm -rf /tmp/sogh
|
||||
}
|
||||
|
||||
|
||||
@@ -115,8 +115,8 @@ clean() {
|
||||
}
|
||||
|
||||
# Check to see if we are already running
|
||||
IS_RUNNING=$(ps aux | pgrep -f "so-sensor-clean" | wc -l)
|
||||
[ "$IS_RUNNING" -gt 3 ] && echo "$(date) - $IS_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0
|
||||
NUM_RUNNING=$(pgrep -cf "/bin/bash /usr/sbin/so-sensor-clean")
|
||||
[ "$NUM_RUNNING" -gt 1 ] && echo "$(date) - $NUM_RUNNING sensor clean script processes running...exiting." >>$LOG && exit 0
|
||||
|
||||
if [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; then
|
||||
while [ "$CUR_USAGE" -gt "$CRIT_DISK_USAGE" ]; do
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Usage: so-start all | filebeat | kibana | playbook | thehive
|
||||
# Usage: so-start all | filebeat | kibana | playbook
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
|
||||
@@ -17,5 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive-es $1
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive-es $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-restart thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,38 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <new-user-name>"
|
||||
echo ""
|
||||
echo "Adds a new user to TheHive. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs THEHIVE_PASS
|
||||
|
||||
check_password_and_exit "$THEHIVE_PASS"
|
||||
|
||||
# Create new user in TheHive
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user" -d "{\"login\" : \"$THEHIVE_USER\",\"name\" : \"$THEHIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$THEHIVE_PASS\"}")
|
||||
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully added user to TheHive"
|
||||
else
|
||||
echo "Unable to add user to TheHive; user might already exist"
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name> <true|false>"
|
||||
echo ""
|
||||
echo "Enables or disables a user in TheHive."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
case "${2^^}" in
|
||||
FALSE | NO | 0)
|
||||
THEHIVE_STATUS=Locked
|
||||
;;
|
||||
TRUE | YES | 1)
|
||||
THEHIVE_STATUS=Ok
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
|
||||
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}" -d "{\"status\":\"${THEHIVE_STATUS}\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully updated user in TheHive"
|
||||
else
|
||||
echo "Failed to update user in TheHive"
|
||||
echo "$resp"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name>"
|
||||
echo ""
|
||||
echo "Update password for an existing TheHive user. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs THEHIVE_PASS
|
||||
|
||||
if ! check_password "$THEHIVE_PASS"; then
|
||||
echo "Password is invalid. Please exclude single quotes, double quotes, dollar signs, and backslashes from the password."
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Change password for user in TheHive
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}/password/set" -d "{\"password\" : \"$THEHIVE_PASS\"}")
|
||||
if [[ -z "$resp" ]]; then
|
||||
echo "Successfully updated TheHive user password"
|
||||
else
|
||||
echo "Unable to update TheHive user password"
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -44,7 +44,7 @@ operation=$1
|
||||
email=$2
|
||||
role=$3
|
||||
|
||||
kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434}
|
||||
kratosUrl=${KRATOS_URL:-http://127.0.0.1:4434/admin}
|
||||
databasePath=${KRATOS_DB_PATH:-/opt/so/conf/kratos/db/db.sqlite}
|
||||
databaseTimeout=${KRATOS_DB_TIMEOUT:-5000}
|
||||
bcryptRounds=${BCRYPT_ROUNDS:-12}
|
||||
@@ -408,7 +408,7 @@ function migrateLockedUsers() {
|
||||
# This is a migration function to convert locked users from prior to 2.3.90
|
||||
# to inactive users using the newer Kratos functionality. This should only
|
||||
# find locked users once.
|
||||
lockedEmails=$(curl -s http://localhost:4434/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email')
|
||||
lockedEmails=$(curl -s ${kratosUrl}/identities | jq -r '.[] | select(.traits.status == "locked") | .traits.email')
|
||||
if [[ -n "$lockedEmails" ]]; then
|
||||
echo "Disabling locked users..."
|
||||
for email in $lockedEmails; do
|
||||
@@ -476,7 +476,6 @@ case "${operation}" in
|
||||
createUser "$email" "${role:-$DEFAULT_ROLE}"
|
||||
syncAll
|
||||
echo "Successfully added new user to SOC"
|
||||
check_container thehive && echo "$password" | so-thehive-user-add "$email"
|
||||
check_container fleet && echo "$password" | so-fleet-user-add "$email"
|
||||
;;
|
||||
|
||||
@@ -528,7 +527,6 @@ case "${operation}" in
|
||||
updateStatus "$email" 'active'
|
||||
syncAll
|
||||
echo "Successfully enabled user"
|
||||
check_container thehive && so-thehive-user-enable "$email" true
|
||||
echo "Fleet user will need to be recreated manually with so-fleet-user-add"
|
||||
;;
|
||||
|
||||
@@ -540,7 +538,6 @@ case "${operation}" in
|
||||
updateStatus "$email" 'locked'
|
||||
syncAll
|
||||
echo "Successfully disabled user"
|
||||
check_container thehive && so-thehive-user-enable "$email" false
|
||||
check_container fleet && so-fleet-user-delete "$email"
|
||||
;;
|
||||
|
||||
@@ -552,7 +549,6 @@ case "${operation}" in
|
||||
deleteUser "$email"
|
||||
syncAll
|
||||
echo "Successfully deleted user"
|
||||
check_container thehive && so-thehive-user-enable "$email" false
|
||||
check_container fleet && so-fleet-user-delete "$email"
|
||||
;;
|
||||
|
||||
|
||||
@@ -34,7 +34,15 @@ check_err() {
|
||||
local err_msg="Unhandled error occured, please check $SOUP_LOG for details."
|
||||
|
||||
[[ $ERR_HANDLED == true ]] && exit $exit_code
|
||||
|
||||
if [[ $exit_code -ne 0 ]]; then
|
||||
|
||||
set +e
|
||||
systemctl_func "start" "$cron_service_name"
|
||||
systemctl_func "start" "salt-master"
|
||||
systemctl_func "start" "salt-minion"
|
||||
enable_highstate
|
||||
|
||||
printf '%s' "Soup failed with error $exit_code: "
|
||||
case $exit_code in
|
||||
2)
|
||||
@@ -91,9 +99,7 @@ check_err() {
|
||||
if [[ $exit_code -ge 64 && $exit_code -le 113 ]]; then
|
||||
echo "$err_msg"
|
||||
fi
|
||||
set +e
|
||||
systemctl_func "start" "$cron_service_name"
|
||||
enable_highstate
|
||||
|
||||
exit $exit_code
|
||||
fi
|
||||
|
||||
@@ -416,6 +422,8 @@ preupgrade_changes() {
|
||||
[[ "$INSTALLEDVERSION" == 2.3.80 ]] && up_to_2.3.90
|
||||
[[ "$INSTALLEDVERSION" == 2.3.90 || "$INSTALLEDVERSION" == 2.3.91 ]] && up_to_2.3.100
|
||||
[[ "$INSTALLEDVERSION" == 2.3.100 ]] && up_to_2.3.110
|
||||
[[ "$INSTALLEDVERISON" == 2.3.110 ]] && up_to_2.3.120
|
||||
[[ "$INSTALLEDVERISON" == 2.3.120 ]] && up_to_2.3.130
|
||||
true
|
||||
}
|
||||
|
||||
@@ -429,6 +437,10 @@ postupgrade_changes() {
|
||||
[[ "$POSTVERSION" == 2.3.60 || "$POSTVERSION" == 2.3.61 || "$POSTVERSION" == 2.3.70 || "$POSTVERSION" == 2.3.80 ]] && post_to_2.3.90
|
||||
[[ "$POSTVERSION" == 2.3.90 || "$POSTVERSION" == 2.3.91 ]] && post_to_2.3.100
|
||||
[[ "$POSTVERSION" == 2.3.100 ]] && post_to_2.3.110
|
||||
[[ "$POSTVERSION" == 2.3.110 ]] && post_to_2.3.120
|
||||
[[ "$POSTVERSION" == 2.3.120 ]] && post_to_2.3.130
|
||||
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
@@ -492,6 +504,19 @@ post_to_2.3.110() {
|
||||
POSTVERSION=2.3.110
|
||||
}
|
||||
|
||||
post_to_2.3.120() {
|
||||
echo "Post Processing for 2.3.120"
|
||||
POSTVERSION=2.3.120
|
||||
sed -i '/so-thehive-es/d;/so-thehive/d;/so-cortex/d' /opt/so/conf/so-status/so-status.conf
|
||||
}
|
||||
|
||||
post_to_2.3.130() {
|
||||
echo "Post Processing for 2.3.130"
|
||||
POSTVERSION=2.3.130
|
||||
}
|
||||
|
||||
|
||||
|
||||
stop_salt_master() {
|
||||
# kill all salt jobs across the grid because the hang indefinitely if they are queued and salt-master restarts
|
||||
set +e
|
||||
@@ -728,9 +753,6 @@ up_to_2.3.90() {
|
||||
up_to_2.3.100() {
|
||||
fix_wazuh
|
||||
|
||||
echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again."
|
||||
rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched
|
||||
|
||||
echo "Adding receiver hostgroup with so-firewall"
|
||||
if so-firewall addhostgroup receiver 2>&1 | grep -q 'Already exists'; then
|
||||
echo 'receiver hostgroup already exists'
|
||||
@@ -743,11 +765,21 @@ up_to_2.3.100() {
|
||||
}
|
||||
|
||||
up_to_2.3.110() {
|
||||
echo "Updating to Security Onion 2.3.110"
|
||||
echo "Updating shard settings for Elasticsearch index templates"
|
||||
sed -i 's|shards|index_template:\n template:\n settings:\n index:\n number_of_shards|g' /opt/so/saltstack/local/pillar/global.sls
|
||||
}
|
||||
|
||||
up_to_2.3.120() {
|
||||
# Stop thehive services since these will be broken in .120
|
||||
so-thehive-stop
|
||||
so-thehive-es-stop
|
||||
so-cortex-stop
|
||||
}
|
||||
|
||||
up_to_2.3.130() {
|
||||
# Remove file for nav update
|
||||
rm -f /opt/so/conf/navigator/layers/nav_layer_playbook.json
|
||||
}
|
||||
|
||||
verify_upgradespace() {
|
||||
CURRENTSPACE=$(df -BG / | grep -v Avail | awk '{print $4}' | sed 's/.$//')
|
||||
if [ "$CURRENTSPACE" -lt "10" ]; then
|
||||
@@ -770,29 +802,6 @@ upgrade_space() {
|
||||
fi
|
||||
}
|
||||
|
||||
thehive_maint() {
|
||||
echo -n "Waiting for TheHive..."
|
||||
COUNT=0
|
||||
THEHIVE_CONNECTED="no"
|
||||
while [[ "$COUNT" -le 240 ]]; do
|
||||
curl --output /dev/null --silent --head --fail -k "https://localhost/thehive/api/alert"
|
||||
if [ $? -eq 0 ]; then
|
||||
THEHIVE_CONNECTED="yes"
|
||||
echo "connected!"
|
||||
break
|
||||
else
|
||||
((COUNT+=1))
|
||||
sleep 1
|
||||
echo -n "."
|
||||
fi
|
||||
done
|
||||
if [ "$THEHIVE_CONNECTED" == "yes" ]; then
|
||||
echo "Migrating thehive databases if needed."
|
||||
curl -v -k -XPOST -L "https://localhost/thehive/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
|
||||
curl -v -k -XPOST -L "https://localhost/cortex/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
|
||||
fi
|
||||
}
|
||||
|
||||
unmount_update() {
|
||||
cd /tmp
|
||||
umount /tmp/soagupdate
|
||||
@@ -908,6 +917,8 @@ upgrade_salt() {
|
||||
else
|
||||
echo "Salt upgrade success."
|
||||
echo ""
|
||||
echo "Removing /opt/so/state files for patched Salt InfluxDB module and state. This is due to Salt being upgraded and needing to patch the files again."
|
||||
rm -vrf /opt/so/state/influxdb_continuous_query.py.patched /opt/so/state/influxdb_retention_policy.py.patched /opt/so/state/influxdbmod.py.patched
|
||||
fi
|
||||
|
||||
}
|
||||
@@ -1037,6 +1048,17 @@ main() {
|
||||
echo "### Preparing soup at $(date) ###"
|
||||
echo ""
|
||||
|
||||
set_os
|
||||
set_cron_service_name
|
||||
if ! check_salt_master_status; then
|
||||
echo "Could not talk to salt master"
|
||||
echo "Please run 'systemctl status salt-master' to ensure the salt-master service is running and check the log at /opt/so/log/salt/master."
|
||||
echo "SOUP will now attempt to start the salt-master service and exit."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "This node can communicate with the salt-master."
|
||||
|
||||
echo "Checking to see if this is a manager."
|
||||
echo ""
|
||||
require_manager
|
||||
@@ -1072,8 +1094,6 @@ main() {
|
||||
echo "Verifying we have the latest soup script."
|
||||
verify_latest_update_script
|
||||
echo ""
|
||||
set_os
|
||||
set_cron_service_name
|
||||
set_palette
|
||||
check_elastic_license
|
||||
echo ""
|
||||
@@ -1222,7 +1242,6 @@ main() {
|
||||
salt-call state.highstate -l info queue=True
|
||||
postupgrade_changes
|
||||
[[ $is_airgap -eq 0 ]] && unmount_update
|
||||
thehive_maint
|
||||
|
||||
echo ""
|
||||
echo "Upgrade to $NEWVERSION complete."
|
||||
|
||||
@@ -4067,7 +4067,7 @@ elasticsearch:
|
||||
field: "@timestamp"
|
||||
order: desc
|
||||
refresh_interval: 30s
|
||||
number_of_shards: 1
|
||||
number_of_shards: 2
|
||||
number_of_replicas: 0
|
||||
composed_of:
|
||||
- agent-mappings
|
||||
|
||||
127
salt/elasticsearch/files/ingest/rita.beacon
Normal file
127
salt/elasticsearch/files/ingest/rita.beacon
Normal file
@@ -0,0 +1,127 @@
|
||||
{
|
||||
"description": "RITA Beacons",
|
||||
"processors": [
|
||||
{
|
||||
"set": {
|
||||
"field": "_index",
|
||||
"value": "so-rita",
|
||||
"override": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"csv": {
|
||||
"field": "message",
|
||||
"target_fields": [
|
||||
"beacon.score",
|
||||
"source.ip",
|
||||
"destination.ip",
|
||||
"network.connections",
|
||||
"network.average_bytes",
|
||||
"beacon.interval.range",
|
||||
"beacon.size.range",
|
||||
"beacon.interval.top",
|
||||
"beacon.size.top",
|
||||
"beacon.interval.top_count",
|
||||
"beacon.size.top_count",
|
||||
"beacon.interval.skew",
|
||||
"beacon.size.skew",
|
||||
"beacon.interval.dispersion",
|
||||
"beacon.size.dispersion",
|
||||
"network.bytes"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.score",
|
||||
"type": "float"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "network.connections",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "network.average_bytes",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.interval.range",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.size.range",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.interval.top",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.size.top",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.interval.top_count",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.size.top_count",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.interval.skew",
|
||||
"type": "float"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.size.skew",
|
||||
"type": "float"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.interval.dispersion",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "beacon.size.dispersion",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "network.bytes",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{ "set": { "if": "ctx.beacon?.score == 1", "field": "dataset", "value": "alert", "override": true }},
|
||||
{ "set": { "if": "ctx.beacon?.score == 1", "field": "rule.name", "value": "Potential C2 Beacon Activity", "override": true }},
|
||||
{ "set": { "if": "ctx.beacon?.score == 1", "field": "event.severity", "value": 3, "override": true }},
|
||||
{
|
||||
"pipeline": {
|
||||
"name": "common"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
36
salt/elasticsearch/files/ingest/rita.connection
Normal file
36
salt/elasticsearch/files/ingest/rita.connection
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"description": "RITA Connections",
|
||||
"processors": [
|
||||
{
|
||||
"set": {
|
||||
"field": "_index",
|
||||
"value": "so-rita",
|
||||
"override": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"dissect": {
|
||||
"field": "message",
|
||||
"pattern": "%{source.ip},%{destination.ip},%{network.port}:%{network.protocol}:%{network.service},%{connection.duration},%{connection.state}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "connection.duration",
|
||||
"type": "float"
|
||||
}
|
||||
},
|
||||
{
|
||||
"set": {
|
||||
"field": "event.duration",
|
||||
"value": "{{ connection.duration }}",
|
||||
"override": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"pipeline": {
|
||||
"name": "common"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
39
salt/elasticsearch/files/ingest/rita.dns
Normal file
39
salt/elasticsearch/files/ingest/rita.dns
Normal file
@@ -0,0 +1,39 @@
|
||||
{
|
||||
"description": "RITA DNS",
|
||||
"processors": [
|
||||
{
|
||||
"set": {
|
||||
"field": "_index",
|
||||
"value": "so-rita",
|
||||
"override": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"csv": {
|
||||
"field": "message",
|
||||
"target_fields": [
|
||||
"dns.question.name",
|
||||
"dns.question.subdomain_count",
|
||||
"dns.question.count"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "dns.question.subdomain_count",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"convert": {
|
||||
"field": "dns.question.count",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"pipeline": {
|
||||
"name": "common"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,36 +1,157 @@
|
||||
{
|
||||
"description" : "syslog",
|
||||
"description" : "syslog pipeline",
|
||||
"processors" : [
|
||||
{
|
||||
"dissect": {
|
||||
"field": "message",
|
||||
"pattern" : "%{message}",
|
||||
"on_failure": [ { "drop" : { } } ]
|
||||
},
|
||||
"remove": {
|
||||
"field": [ "type", "agent" ],
|
||||
"ignore_failure": true
|
||||
}
|
||||
"dissect": {
|
||||
"field": "message",
|
||||
"pattern" : "%{message}",
|
||||
"on_failure": [ { "drop" : { } } ]
|
||||
},
|
||||
"remove": {
|
||||
"field": [ "type", "agent" ],
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"grok": {
|
||||
"field": "message",
|
||||
"patterns": [
|
||||
"^<%{INT:syslog.priority:int}>%{TIMESTAMP_ISO8601:syslog.timestamp} +%{IPORHOST:syslog.host} +%{PROG:syslog.program}(?:\\[%{POSINT:syslog.pid:int}\\])?: %{GREEDYDATA:real_message}$",
|
||||
|
||||
"^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$",
|
||||
|
||||
"^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$"
|
||||
],
|
||||
"ignore_failure": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"grok":
|
||||
{
|
||||
"field": "message",
|
||||
"patterns": [
|
||||
"^<%{INT:syslog.priority}>%{DATA:syslog.timestamp} %{WORD:source.application}(\\[%{DATA:pid}\\])?: %{GREEDYDATA:real_message}$",
|
||||
"^%{SYSLOGTIMESTAMP:syslog.timestamp} %{SYSLOGHOST:syslog.host} %{SYSLOGPROG:syslog.program}: CEF:0\\|%{DATA:vendor}\\|%{DATA:product}\\|%{GREEDYDATA:message2}$"
|
||||
],
|
||||
"ignore_failure": true
|
||||
}
|
||||
"convert" : {
|
||||
"if": "ctx?.syslog?.priority != null",
|
||||
"field" : "syslog.priority",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
{ "set": { "if": "ctx.source?.application == 'filterlog'", "field": "dataset", "value": "firewall", "ignore_failure": true } },
|
||||
{ "set": { "if": "ctx.vendor != null", "field": "module", "value": "{{ vendor }}", "ignore_failure": true } },
|
||||
{ "set": { "if": "ctx.product != null", "field": "dataset", "value": "{{ product }}", "ignore_failure": true } },
|
||||
{ "set": { "field": "event.ingested", "value": "{{ @timestamp }}" } },
|
||||
{ "date": { "if": "ctx.syslog?.timestamp != null", "field": "syslog.timestamp", "target_field": "@timestamp", "formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"], "ignore_failure": true } },
|
||||
{ "remove": { "field": ["pid", "program"], "ignore_missing": true, "ignore_failure": true } },
|
||||
{ "pipeline": { "if": "ctx.vendor != null && ctx.product != null", "name": "{{ vendor }}.{{ product }}", "ignore_failure": true } },
|
||||
{ "pipeline": { "if": "ctx.dataset == 'firewall'", "name": "filterlog", "ignore_failure": true } },
|
||||
{ "pipeline": { "name": "common" } }
|
||||
{
|
||||
"script": {
|
||||
"description": "Map syslog priority into facility and level",
|
||||
"lang": "painless",
|
||||
"params" : {
|
||||
"level": [
|
||||
"emerg",
|
||||
"alert",
|
||||
"crit",
|
||||
"err",
|
||||
"warn",
|
||||
"notice",
|
||||
"info",
|
||||
"debug"
|
||||
],
|
||||
"facility" : [
|
||||
"kern",
|
||||
"user",
|
||||
"mail",
|
||||
"daemon",
|
||||
"auth",
|
||||
"syslog",
|
||||
"lpr",
|
||||
"news",
|
||||
"uucp",
|
||||
"cron",
|
||||
"authpriv",
|
||||
"ftp",
|
||||
"ntp",
|
||||
"security",
|
||||
"console",
|
||||
"solaris-cron",
|
||||
"local0",
|
||||
"local1",
|
||||
"local2",
|
||||
"local3",
|
||||
"local4",
|
||||
"local5",
|
||||
"local6",
|
||||
"local7"
|
||||
]
|
||||
},
|
||||
"source": "if (ctx['syslog'] != null && ctx['syslog']['priority'] != null) { int p = ctx['syslog']['priority']; int f = p / 8; int l = p - (f * 8); ctx['syslog']['facility_label'] = [ : ]; ctx['syslog']['severity_label'] = [ : ]; ctx['syslog'].put('severity', l); ctx['syslog'].put('severity_label', params.level[l].toUpperCase()); ctx['syslog'].put('facility', f); ctx['syslog'].put('facility_label', params.facility[f].toUpperCase()); }"
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
"set": {
|
||||
"if": "ctx.syslog?.host != null",
|
||||
"field": "host.name",
|
||||
"value": "{{ syslog.host }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"if": "ctx.syslog?.program != null",
|
||||
"field": "process.name",
|
||||
"value": "{{ syslog.program }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"if": "ctx.syslog?.pid != null",
|
||||
"field": "process.id",
|
||||
"value": "{{ syslog.pid }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"if": "ctx.source?.application == 'filterlog'",
|
||||
"field": "dataset",
|
||||
"value": "firewall",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"if": "ctx.vendor != null",
|
||||
"field": "module",
|
||||
"value": "{{ vendor }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"if": "ctx.product != null",
|
||||
"field": "dataset",
|
||||
"value": "{{ product }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"set": {
|
||||
"field": "ingest.timestamp",
|
||||
"value": "{{ @timestamp }}"
|
||||
}
|
||||
}, {
|
||||
"date": {
|
||||
"if": "ctx.syslog?.timestamp != null",
|
||||
"field": "syslog.timestamp",
|
||||
"target_field": "@timestamp",
|
||||
"formats": ["MMM d HH:mm:ss", "MMM dd HH:mm:ss", "ISO8601", "UNIX"],
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"remove": {
|
||||
"field": ["pid", "program"],
|
||||
"ignore_missing": true,
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"pipeline": {
|
||||
"if": "ctx.vendor != null && ctx.product != null",
|
||||
"name": "{{ vendor }}.{{ product }}",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"pipeline": {
|
||||
"if": "ctx.dataset == 'firewall'",
|
||||
"name": "filterlog",
|
||||
"ignore_failure": true
|
||||
}
|
||||
}, {
|
||||
"pipeline": { "name": "common" }
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -11,10 +11,17 @@ appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.action.type = Delete
|
||||
appender.rolling.strategy.action.basepath = /var/log/elasticsearch
|
||||
appender.rolling.strategy.action.condition.type = IfFileName
|
||||
appender.rolling.strategy.action.condition.glob = *.gz
|
||||
appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified
|
||||
appender.rolling.strategy.action.condition.nested_condition.age = 7D
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
|
||||
@@ -207,6 +207,7 @@ escomponenttemplates:
|
||||
|
||||
# Auto-generate templates from defaults file
|
||||
{% for index, settings in ES_INDEX_SETTINGS.items() %}
|
||||
{% if settings.index_template is defined %}
|
||||
es_index_template_{{index}}:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/elasticsearch/templates/index/{{ index }}-template.json
|
||||
@@ -216,6 +217,7 @@ es_index_template_{{index}}:
|
||||
- template: jinja
|
||||
- onchanges_in:
|
||||
- cmd: so-elasticsearch-templates
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
{% if TEMPLATES %}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
{% import_yaml 'elasticsearch/defaults.yaml' as ESCONFIG with context %}
|
||||
{%- set ES_INDEX_SETTINGS = salt['pillar.get']('elasticsearch:index_settings', default=ESCONFIG.elasticsearch.index_settings, merge=True) %}
|
||||
{% for index, settings in ES_INDEX_SETTINGS.items() %}
|
||||
{% if settings.index_sorting, False %}
|
||||
{% do settings.index_template.template.settings.index.pop('sort') %}
|
||||
{% if settings.index_template is defined %}
|
||||
{% if not settings.get('index_sorting', False) | to_bool and settings.index_template.template.settings.index.sort is defined %}
|
||||
{% do settings.index_template.template.settings.index.pop('sort') %}
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
@@ -60,6 +60,32 @@
|
||||
},
|
||||
"type": "wildcard"
|
||||
},
|
||||
"entity_id": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"executable": {
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"fields": {
|
||||
"keyword": {
|
||||
@@ -73,6 +99,133 @@
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
},
|
||||
"parent": {
|
||||
"properties": {
|
||||
"command_line": {
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"text": {
|
||||
"type": "match_only_text"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"type": "wildcard"
|
||||
},
|
||||
"entity_id": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"executable": {
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"pe": {
|
||||
"properties": {
|
||||
"architecture": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"company": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"file_version": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"original_file_name": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"product": {
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword",
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"pid": {
|
||||
"type": "long",
|
||||
"fields": {
|
||||
@@ -88,6 +241,19 @@
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"working_directory": {
|
||||
"fields": {
|
||||
"security": {
|
||||
"type": "text",
|
||||
"analyzer": "es_security_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
},
|
||||
"ignore_above": 1024,
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,8 @@ while [[ "$COUNT" -le 240 ]]; do
|
||||
if [ $? -eq 0 ]; then
|
||||
ELASTICSEARCH_CONNECTED="yes"
|
||||
echo "connected!"
|
||||
# Check cluster health once connected
|
||||
so-elasticsearch-query _cluster/health?wait_for_status=yellow > /dev/null 2>&1
|
||||
break
|
||||
else
|
||||
((COUNT+=1))
|
||||
@@ -48,7 +50,7 @@ fi
|
||||
|
||||
cd ${ELASTICSEARCH_ROLES}
|
||||
|
||||
echo "Loading templates..."
|
||||
echo "Loading roles..."
|
||||
for role in *; do
|
||||
name=$(echo "$role" | cut -d. -f1)
|
||||
so-elasticsearch-query _security/role/$name -XPUT -d @"$role"
|
||||
|
||||
@@ -30,7 +30,7 @@ echo -n "Waiting for ElasticSearch..."
|
||||
COUNT=0
|
||||
ELASTICSEARCH_CONNECTED="no"
|
||||
while [[ "$COUNT" -le 240 ]]; do
|
||||
{{ ELASTICCURL }} -k --output /dev/null --silent --head --fail -L https://"$ELASTICSEARCH_HOST":"$ELASTICSEARCH_PORT"
|
||||
so-elasticsearch-query -k --output /dev/null --silent --head --fail
|
||||
if [ $? -eq 0 ]; then
|
||||
ELASTICSEARCH_CONNECTED="yes"
|
||||
echo "connected!"
|
||||
@@ -50,21 +50,20 @@ fi
|
||||
cd ${ELASTICSEARCH_TEMPLATES}/component/ecs
|
||||
|
||||
echo "Loading ECS component templates..."
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE-mappings"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_component_template/$TEMPLATE-mappings -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
|
||||
echo
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE-mappings"; so-elasticsearch-query _component_template/$TEMPLATE-mappings -d@$i -XPUT 2>/dev/null; echo; done
|
||||
|
||||
# Load SO-specific component templates
|
||||
cd ${ELASTICSEARCH_TEMPLATES}/component/so
|
||||
|
||||
echo "Loading Security Onion component templates..."
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_component_template/$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '.' -f1); echo "$TEMPLATE"; so-elasticsearch-query _component_template/$TEMPLATE -d@$i -XPUT 2>/dev/null; echo; done
|
||||
echo
|
||||
|
||||
# Load SO index templates
|
||||
cd ${ELASTICSEARCH_TEMPLATES}/index
|
||||
|
||||
echo "Loading Security Onion index templates..."
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; {{ ELASTICCURL }} -k ${ELASTICSEARCH_AUTH} -s -XPUT -L https://${ELASTICSEARCH_HOST}:${ELASTICSEARCH_PORT}/_index_template/so-$TEMPLATE -H 'Content-Type: application/json' -d@$i 2>/dev/null; echo; done
|
||||
for i in *; do TEMPLATE=$(echo $i | cut -d '-' -f2); echo "so-$TEMPLATE"; so-elasticsearch-query _index_template/so-$TEMPLATE -d@$i -XPUT 2>/dev/null; echo; done
|
||||
echo
|
||||
|
||||
cd - >/dev/null
|
||||
|
||||
@@ -10,6 +10,7 @@
|
||||
{%- set ZEEKVER = salt['pillar.get']('global:mdengine', 'COMMUNITY') %}
|
||||
{%- set WAZUHENABLED = salt['pillar.get']('global:wazuh', '0') %}
|
||||
{%- set STRELKAENABLED = salt['pillar.get']('strelka:enabled', '0') %}
|
||||
{%- set RITAENABLED = salt['pillar.get']('rita:enabled', False) -%}
|
||||
{%- set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) -%}
|
||||
{%- set FLEETNODE = salt['pillar.get']('global:fleet_node', False) -%}
|
||||
{%- set FBMEMEVENTS = salt['pillar.get']('filebeat:mem_events', 2048) -%}
|
||||
@@ -264,6 +265,57 @@ filebeat.inputs:
|
||||
|
||||
{%- endif %}
|
||||
|
||||
{%- if RITAENABLED %}
|
||||
- type: filestream
|
||||
id: rita-beacon
|
||||
paths:
|
||||
- /nsm/rita/beacons.csv
|
||||
exclude_lines: ['^Score', '^Source', '^Domain', '^No results']
|
||||
fields:
|
||||
module: rita
|
||||
dataset: beacon
|
||||
category: network
|
||||
processors:
|
||||
- drop_fields:
|
||||
fields: ["source", "prospector", "input", "offset", "beat"]
|
||||
fields_under_root: true
|
||||
pipeline: "rita.beacon"
|
||||
index: "so-rita"
|
||||
|
||||
- type: filestream
|
||||
id: rita-connection
|
||||
paths:
|
||||
- /nsm/rita/long-connections.csv
|
||||
- /nsm/rita/open-connections.csv
|
||||
exclude_lines: ['^Source', '^No results']
|
||||
fields:
|
||||
module: rita
|
||||
dataset: connection
|
||||
category: network
|
||||
processors:
|
||||
- drop_fields:
|
||||
fields: ["source", "prospector", "input", "offset", "beat"]
|
||||
fields_under_root: true
|
||||
pipeline: "rita.connection"
|
||||
index: "so-rita"
|
||||
|
||||
- type: filestream
|
||||
id: rita-dns
|
||||
paths:
|
||||
- /nsm/rita/exploded-dns.csv
|
||||
exclude_lines: ['^Domain', '^No results']
|
||||
fields:
|
||||
module: rita
|
||||
dataset: dns
|
||||
category: network
|
||||
processors:
|
||||
- drop_fields:
|
||||
fields: ["source", "prospector", "input", "offset", "beat"]
|
||||
fields_under_root: true
|
||||
pipeline: "rita.dns"
|
||||
index: "so-rita"
|
||||
{%- endif %}
|
||||
|
||||
{%- if grains['role'] in ['so-eval', 'so-standalone', 'so-manager', 'so-managersearch', 'so-import'] %}
|
||||
- type: log
|
||||
paths:
|
||||
@@ -394,6 +446,13 @@ output.logstash:
|
||||
|
||||
# The Logstash hosts
|
||||
hosts:
|
||||
{# dont let filebeat send to a node designated as dmz #}
|
||||
{% import_yaml 'logstash/dmz_nodes.yaml' as dmz_nodes -%}
|
||||
{% if dmz_nodes.logstash.dmz_nodes -%}
|
||||
{% set dmz_nodes = dmz_nodes.logstash.dmz_nodes -%}
|
||||
{% else -%}
|
||||
{% set dmz_nodes = [] -%}
|
||||
{% endif -%}
|
||||
{%- if grains.role in ['so-sensor', 'so-fleet', 'so-node', 'so-idh'] %}
|
||||
{%- set LOGSTASH = namespace() %}
|
||||
{%- set LOGSTASH.count = 0 %}
|
||||
@@ -402,8 +461,10 @@ output.logstash:
|
||||
{%- for node_type, node_details in node_data.items() | sort -%}
|
||||
{%- if node_type in ['manager', 'managersearch', 'standalone', 'receiver' ] %}
|
||||
{%- for hostname in node_data[node_type].keys() %}
|
||||
{%- set LOGSTASH.count = LOGSTASH.count + 1 %}
|
||||
{%- if hostname not in dmz_nodes %}
|
||||
{%- set LOGSTASH.count = LOGSTASH.count + 1 %}
|
||||
- "{{ hostname }}:5644" #{{ node_details[hostname].ip }}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
{%- endif %}
|
||||
{%- if LOGSTASH.count > 1 %}
|
||||
|
||||
@@ -349,6 +349,9 @@ role:
|
||||
osquery_endpoint:
|
||||
portgroups:
|
||||
- {{ portgroups.fleet_api }}
|
||||
strelka_frontend:
|
||||
portgroups:
|
||||
- {{ portgroups.strelka_frontend }}
|
||||
syslog:
|
||||
portgroups:
|
||||
- {{ portgroups.syslog }}
|
||||
@@ -482,6 +485,9 @@ role:
|
||||
self:
|
||||
portgroups:
|
||||
- {{ portgroups.syslog}}
|
||||
strelka_frontend:
|
||||
portgroups:
|
||||
- {{ portgroups.strelka_frontend }}
|
||||
INPUT:
|
||||
hostgroups:
|
||||
anywhere:
|
||||
@@ -511,6 +517,9 @@ role:
|
||||
self:
|
||||
portgroups:
|
||||
- {{ portgroups.syslog}}
|
||||
strelka_frontend:
|
||||
portgroups:
|
||||
- {{ portgroups.strelka_frontend }}
|
||||
INPUT:
|
||||
hostgroups:
|
||||
anywhere:
|
||||
|
||||
@@ -19,11 +19,37 @@
|
||||
{% set VERSION = salt['pillar.get']('global:soversion', 'HH1.2.2') %}
|
||||
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||
{% set MANAGER = salt['grains.get']('master') %}
|
||||
{% set MAININT = salt['pillar.get']('host:mainint') %}
|
||||
{% set MAINIP = salt['grains.get']('ip_interfaces').get(MAININT)[0] %}
|
||||
{% set RESTRICTIDHSERVICES = salt['pillar.get']('idh:restrict_management_ip', False) %}
|
||||
|
||||
include:
|
||||
- idh.openssh.config
|
||||
- firewall
|
||||
|
||||
# IDH State
|
||||
|
||||
# If True, block IDH Services from accepting connections on Managment IP
|
||||
{% if RESTRICTIDHSERVICES %}
|
||||
{% from 'idh/opencanary_config.map.jinja' import OPENCANARYCONFIG %}
|
||||
{% set idh_services = salt['pillar.get']('idh:services', []) %}
|
||||
|
||||
{% for service in idh_services %}
|
||||
{% if service in ["smnp","ntp", "tftp"] %}
|
||||
{% set proto = 'udp' %}
|
||||
{% else %}
|
||||
{% set proto = 'tcp' %}
|
||||
{% endif %}
|
||||
block_mgt_ip_idh_services_{{ proto }}_{{ OPENCANARYCONFIG[service~'.port'] }} :
|
||||
iptables.insert:
|
||||
- table: filter
|
||||
- chain: INPUT
|
||||
- jump: DROP
|
||||
- position: 1
|
||||
- proto: {{ proto }}
|
||||
- dport: {{ OPENCANARYCONFIG[service~'.port'] }}
|
||||
- destination: {{ MAINIP }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
# Create a config directory
|
||||
temp:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log') %}
|
||||
{% set measurements = salt['cmd.shell']('docker exec -t so-influxdb influx -format json -ssl -unsafeSsl -database telegraf -execute "show measurements" 2> /root/measurement_query.log | jq -r .results[0].series[0].values[]?[0] 2>> /root/measurement_query.log', shell='/bin/bash') %}
|
||||
|
||||
influxdb:
|
||||
retention_policies:
|
||||
|
||||
@@ -59,7 +59,7 @@ update() {
|
||||
|
||||
IFS=$'\r\n' GLOBIGNORE='*' command eval 'LINES=($(cat $1))'
|
||||
for i in "${LINES[@]}"; do
|
||||
RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.17.1" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ")
|
||||
RESPONSE=$({{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.17.4" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i ")
|
||||
echo $RESPONSE; if [[ "$RESPONSE" != *"\"success\":true"* ]] && [[ "$RESPONSE" != *"updated_at"* ]] ; then RETURN_CODE=1;fi
|
||||
done
|
||||
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.17.1","id": "7.17.1","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}
|
||||
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.17.4","id": "7.17.4","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}
|
||||
|
||||
@@ -37,7 +37,7 @@ selfservice:
|
||||
ui_url: https://{{ WEBACCESS }}/login/
|
||||
|
||||
default_browser_return_url: https://{{ WEBACCESS }}/
|
||||
whitelisted_return_urls:
|
||||
allowed_return_urls:
|
||||
- http://127.0.0.1
|
||||
|
||||
log:
|
||||
@@ -59,7 +59,10 @@ hashers:
|
||||
cost: 12
|
||||
|
||||
identity:
|
||||
default_schema_url: file:///kratos-conf/schema.json
|
||||
default_schema_id: default
|
||||
schemas:
|
||||
- id: default
|
||||
url: file:///kratos-conf/schema.json
|
||||
|
||||
courier:
|
||||
smtp:
|
||||
|
||||
9
salt/logstash/dmz_nodes.yaml
Normal file
9
salt/logstash/dmz_nodes.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
# Do not edit this file. Copy it to /opt/so/saltstack/local/salt/logstash/ and make changes there. It should be formatted as a list.
|
||||
# logstash:
|
||||
# dmz_nodes:
|
||||
# - mydmznodehostname1
|
||||
# - mydmznodehostname2
|
||||
# - mydmznodehostname3
|
||||
|
||||
logstash:
|
||||
dmz_nodes:
|
||||
@@ -18,7 +18,7 @@ appender.rolling.name = rolling
|
||||
appender.rolling.fileName = /var/log/logstash/logstash.log
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
|
||||
appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log
|
||||
appender.rolling.filePattern = /var/log/logstash/logstash-%d{yyyy-MM-dd}.log.gz
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
@@ -27,7 +27,7 @@ appender.rolling.strategy.type = DefaultRolloverStrategy
|
||||
appender.rolling.strategy.action.type = Delete
|
||||
appender.rolling.strategy.action.basepath = /var/log/logstash
|
||||
appender.rolling.strategy.action.condition.type = IfFileName
|
||||
appender.rolling.strategy.action.condition.glob = logstash-*.log
|
||||
appender.rolling.strategy.action.condition.glob = *.gz
|
||||
appender.rolling.strategy.action.condition.nested_condition.type = IfLastModified
|
||||
appender.rolling.strategy.action.condition.nested_condition.age = 7D
|
||||
rootLogger.level = info
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
{%- if grains['role'] == 'so-eval' -%}
|
||||
{%- set ES = salt['pillar.get']('manager:mainip', '') -%}
|
||||
{%- else %}
|
||||
{%- set ES = salt['pillar.get']('elasticsearch:mainip', '') -%}
|
||||
{%- endif %}
|
||||
{%- set ES_USER = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:user', '') %}
|
||||
{%- set ES_PASS = salt['pillar.get']('elasticsearch:auth:users:so_elastic_user:pass', '') %}
|
||||
output {
|
||||
if [module] =~ "rita" and "import" not in [tags] {
|
||||
elasticsearch {
|
||||
pipeline => "%{module}.%{dataset}"
|
||||
hosts => "{{ ES }}"
|
||||
{% if salt['pillar.get']('elasticsearch:auth:enabled') is sameas true %}
|
||||
user => "{{ ES_USER }}"
|
||||
password => "{{ ES_PASS }}"
|
||||
{% endif %}
|
||||
index => "so-rita"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -130,6 +130,8 @@ http {
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
}
|
||||
error_page 500 502 503 504 /50x.html;
|
||||
location = /usr/share/nginx/html/50x.html {
|
||||
@@ -330,34 +332,12 @@ http {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "Upgrade";
|
||||
}
|
||||
|
||||
{%- endif %}
|
||||
|
||||
location /thehive/ {
|
||||
proxy_pass http://{{ manager_ip }}:9000/thehive/;
|
||||
proxy_read_timeout 90;
|
||||
proxy_connect_timeout 90;
|
||||
proxy_http_version 1.1; # this is essential for chunked responses to work
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /cortex/ {
|
||||
proxy_pass http://{{ manager_ip }}:9001/cortex/;
|
||||
proxy_read_timeout 90;
|
||||
proxy_connect_timeout 90;
|
||||
proxy_http_version 1.1; # this is essential for chunked responses to work
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /soctopus/ {
|
||||
auth_request /auth/sessions/whoami;
|
||||
|
||||
@@ -1,27 +1,52 @@
|
||||
{
|
||||
"name": "Playbook",
|
||||
"version": "3.0",
|
||||
"domain": "mitre-enterprise",
|
||||
"description": "Current Coverage of Playbook",
|
||||
"name": "Playbook Coverage",
|
||||
"versions": {
|
||||
"attack": "11",
|
||||
"navigator": "4.6.4",
|
||||
"layer": "4.3"
|
||||
},
|
||||
"domain": "enterprise-attack",
|
||||
"description": "",
|
||||
"filters": {
|
||||
"stages": ["act"],
|
||||
"platforms": [
|
||||
"windows",
|
||||
"linux",
|
||||
"mac"
|
||||
"Linux",
|
||||
"macOS",
|
||||
"Windows",
|
||||
"Azure AD",
|
||||
"Office 365",
|
||||
"SaaS",
|
||||
"IaaS",
|
||||
"Google Workspace",
|
||||
"PRE",
|
||||
"Network",
|
||||
"Containers"
|
||||
]
|
||||
},
|
||||
"sorting": 0,
|
||||
"viewMode": 0,
|
||||
"layout": {
|
||||
"layout": "side",
|
||||
"aggregateFunction": "average",
|
||||
"showID": false,
|
||||
"showName": true,
|
||||
"showAggregateScores": false,
|
||||
"countUnscored": false
|
||||
},
|
||||
"hideDisabled": false,
|
||||
"techniques": [],
|
||||
"gradient": {
|
||||
"colors": ["#ff6666", "#ffe766", "#8ec843"],
|
||||
"colors": [
|
||||
"#ff6666ff",
|
||||
"#ffe766ff",
|
||||
"#8ec843ff"
|
||||
],
|
||||
"minValue": 0,
|
||||
"maxValue": 100
|
||||
},
|
||||
"legendItems": [],
|
||||
"metadata": [],
|
||||
"links": [],
|
||||
"showTacticRowBackground": false,
|
||||
"tacticRowBackground": "#dddddd",
|
||||
"selectTechniquesAcrossTactics": true
|
||||
}
|
||||
"selectTechniquesAcrossTactics": true,
|
||||
"selectSubtechniquesWithParent": false
|
||||
}
|
||||
@@ -1,58 +1,62 @@
|
||||
{%- set URL_BASE = salt['pillar.get']('global:url_base', '') %}
|
||||
|
||||
{
|
||||
"enterprise_attack_url": "assets/enterprise-attack.json",
|
||||
"pre_attack_url": "assets/pre-attack.json",
|
||||
"mobile_data_url": "assets/mobile-attack.json",
|
||||
"taxii_server": {
|
||||
"enabled": false,
|
||||
"url": "https://cti-taxii.mitre.org/",
|
||||
"collections": {
|
||||
"enterprise_attack": "95ecc380-afe9-11e4-9b6c-751b66dd541e",
|
||||
"pre_attack": "062767bd-02d2-4b72-84ba-56caef0f8658",
|
||||
"mobile_attack": "2f669986-b40b-4423-b720-4396ca6a462b"
|
||||
"versions": [
|
||||
{
|
||||
"name": "ATT&CK v11",
|
||||
"version": "11",
|
||||
"domains": [
|
||||
{
|
||||
"name": "Enterprise",
|
||||
"identifier": "enterprise-attack",
|
||||
"data": ["assets/so/enterprise-attack.json"]
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
|
||||
"custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}],
|
||||
|
||||
"default_layers": {
|
||||
"enabled": true,
|
||||
"urls": ["assets/so/nav_layer_playbook.json"]
|
||||
},
|
||||
|
||||
"domain": "mitre-enterprise",
|
||||
|
||||
"custom_context_menu_items": [ {"label": "view related plays","url": " https://{{URL_BASE}}/playbook/projects/detection-playbooks/issues?utf8=%E2%9C%93&set_filter=1&sort=id%3Adesc&f%5B%5D=cf_15&op%5Bcf_15%5D=%3D&f%5B%5D=&c%5B%5D=status&c%5B%5D=cf_10&c%5B%5D=cf_13&c%5B%5D=cf_18&c%5B%5D=cf_19&c%5B%5D=cf_1&c%5B%5D=updated_on&v%5Bcf_15%5D%5B%5D=~Technique_ID~"}],
|
||||
|
||||
"default_layers": {
|
||||
"enabled": true,
|
||||
"urls": [
|
||||
"assets/playbook.json"
|
||||
]
|
||||
},
|
||||
|
||||
"comment_color": "yellow",
|
||||
|
||||
"link_color": "blue",
|
||||
"banner": "",
|
||||
"features": [
|
||||
{"name": "leave_site_dialog", "enabled": true, "description": "Disable to remove the dialog prompt when leaving site."},
|
||||
{"name": "tabs", "enabled": true, "description": "Disable to remove the ability to open new tabs."},
|
||||
{"name": "selecting_techniques", "enabled": true, "description": "Disable to remove the ability to select techniques."},
|
||||
{"name": "header", "enabled": true, "description": "Disable to remove the header containing 'MITRE ATT&CK Navigator' and the link to the help page. The help page can still be accessed from the new tab menu."},
|
||||
{"name": "subtechniques", "enabled": true, "description": "Disable to remove all sub-technique features from the interface."},
|
||||
{"name": "selection_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
|
||||
{"name": "search", "enabled": true, "description": "Disable to remove the technique search panel from the interface."},
|
||||
{"name": "multiselect", "enabled": true, "description": "Disable to remove the multiselect panel from interface."},
|
||||
{"name": "deselect_all", "enabled": true, "description": "Disable to remove the deselect all button from the interface."}
|
||||
]},
|
||||
{"name": "layer_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
|
||||
{"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and metadata) panel from the interface. Note that the layer can still be renamed in the tab."},
|
||||
{"name": "layer_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [
|
||||
{"name": "layer_info", "enabled": true, "description": "Disable to remove the layer info (name, description and layer metadata) panel from the interface. Note that the layer can still be renamed in the tab."},
|
||||
{"name": "download_layer", "enabled": true, "description": "Disable to remove the button to download the layer."},
|
||||
{"name": "export_render", "enabled": true, "description": "Disable to the remove the button to render the current layer."},
|
||||
{"name": "export_excel", "enabled": true, "description": "Disable to the remove the button to export the current layer to MS Excel (.xlsx) format."},
|
||||
{"name": "filters", "enabled": true, "description": "Disable to the remove the filters panel from interface."},
|
||||
{"name": "sorting", "enabled": true, "description": "Disable to the remove the sorting button from the interface."},
|
||||
{"name": "color_setup", "enabled": true, "description": "Disable to the remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."},
|
||||
{"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to the remove the hide disabled techniques button from the interface."},
|
||||
{"name": "toggle_view_mode", "enabled": true, "description": "Disable to the remove the toggle view mode button from interface."},
|
||||
{"name": "legend", "enabled": true, "description": "Disable to the remove the legend panel from the interface."}
|
||||
{"name": "export_render", "enabled": true, "description": "Disable to remove the button to render the current layer."},
|
||||
{"name": "export_excel", "enabled": true, "description": "Disable to remove the button to export the current layer to MS Excel (.xlsx) format."},
|
||||
{"name": "filters", "enabled": true, "description": "Disable to remove the filters panel from interface."},
|
||||
{"name": "sorting", "enabled": true, "description": "Disable to remove the sorting button from the interface."},
|
||||
{"name": "color_setup", "enabled": true, "description": "Disable to remove the color setup panel from interface, containing customization controls for scoring gradient and tactic row color."},
|
||||
{"name": "toggle_hide_disabled", "enabled": true, "description": "Disable to remove the hide disabled techniques button from the interface."},
|
||||
{"name": "layout_controls", "enabled": true, "description": "Disable to remove the ability to change the current matrix layout."},
|
||||
{"name": "legend", "enabled": true, "description": "Disable to remove the legend panel from the interface."}
|
||||
]},
|
||||
{"name": "technique_controls", "enabled": true, "description": "Disable to to disable all subfeatures", "subfeatures": [
|
||||
{"name": "disable_techniques", "enabled": true, "description": "Disable to the remove the ability to disable techniques."},
|
||||
{"name": "manual_color", "enabled": true, "description": "Disable to the remove the ability to assign manual colors to techniques."},
|
||||
{"name": "scoring", "enabled": true, "description": "Disable to the remove the ability to score techniques."},
|
||||
{"name": "comments", "enabled": true, "description": "Disable to the remove the ability to add comments to techniques."},
|
||||
{"name": "technique_controls", "enabled": true, "description": "Disable to disable all subfeatures", "subfeatures": [
|
||||
{"name": "disable_techniques", "enabled": true, "description": "Disable to remove the ability to disable techniques."},
|
||||
{"name": "manual_color", "enabled": true, "description": "Disable to remove the ability to assign manual colors to techniques."},
|
||||
{"name": "scoring", "enabled": true, "description": "Disable to remove the ability to score techniques."},
|
||||
{"name": "comments", "enabled": true, "description": "Disable to remove the ability to add comments to techniques."},
|
||||
{"name": "comment_underline", "enabled": true, "description": "Disable to remove the comment underline effect on techniques."},
|
||||
{"name": "links", "enabled": true, "description": "Disable to remove the ability to assign hyperlinks to techniques."},
|
||||
{"name": "link_underline", "enabled": true, "description": "Disable to remove the hyperlink underline effect on techniques."},
|
||||
{"name": "metadata", "enabled": true, "description": "Disable to remove the ability to add metadata to techniques."},
|
||||
{"name": "clear_annotations", "enabled": true, "description": "Disable to remove the button to clear all annotations on the selected techniques."}
|
||||
]}
|
||||
]
|
||||
|
||||
@@ -50,7 +50,7 @@ nginxtmp:
|
||||
|
||||
navigatorconfig:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/navigator/navigator_config.json
|
||||
- name: /opt/so/conf/navigator/config.json
|
||||
- source: salt://nginx/files/navigator_config.json
|
||||
- user: 939
|
||||
- group: 939
|
||||
@@ -59,7 +59,7 @@ navigatorconfig:
|
||||
|
||||
navigatordefaultlayer:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/navigator/nav_layer_playbook.json
|
||||
- name: /opt/so/conf/navigator/layers/nav_layer_playbook.json
|
||||
- source: salt://nginx/files/nav_layer_playbook.json
|
||||
- user: 939
|
||||
- group: 939
|
||||
@@ -69,7 +69,7 @@ navigatordefaultlayer:
|
||||
|
||||
navigatorpreattack:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/navigator/pre-attack.json
|
||||
- name: /opt/so/conf/navigator/layers/pre-attack.json
|
||||
- source: salt://nginx/files/pre-attack.json
|
||||
- user: 939
|
||||
- group: 939
|
||||
@@ -78,7 +78,7 @@ navigatorpreattack:
|
||||
|
||||
navigatorenterpriseattack:
|
||||
file.managed:
|
||||
- name: /opt/so/conf/navigator/enterprise-attack.json
|
||||
- name: /opt/so/conf/navigator/layers/enterprise-attack.json
|
||||
- source: salt://nginx/files/enterprise-attack.json
|
||||
- user: 939
|
||||
- group: 939
|
||||
@@ -99,10 +99,8 @@ so-nginx:
|
||||
- /etc/pki/managerssl.crt:/etc/pki/nginx/server.crt:ro
|
||||
- /etc/pki/managerssl.key:/etc/pki/nginx/server.key:ro
|
||||
# ATT&CK Navigator binds
|
||||
- /opt/so/conf/navigator/navigator_config.json:/opt/socore/html/navigator/assets/config.json:ro
|
||||
- /opt/so/conf/navigator/nav_layer_playbook.json:/opt/socore/html/navigator/assets/playbook.json:ro
|
||||
- /opt/so/conf/navigator/enterprise-attack.json:/opt/socore/html/navigator/assets/enterprise-attack.json:ro
|
||||
- /opt/so/conf/navigator/pre-attack.json:/opt/socore/html/navigator/assets/pre-attack.json:ro
|
||||
- /opt/so/conf/navigator/layers/:/opt/socore/html/navigator/assets/so:ro
|
||||
- /opt/so/conf/navigator/config.json:/opt/socore/html/navigator/assets/config.json:ro
|
||||
{% endif %}
|
||||
{% if ISAIRGAP is sameas true %}
|
||||
- /nsm/repo:/opt/socore/html/repo:ro
|
||||
|
||||
@@ -42,6 +42,15 @@ query_updatwebhooks:
|
||||
- connection_user: root
|
||||
- connection_pass: {{ MYSQLPASS }}
|
||||
|
||||
query_updatename:
|
||||
mysql_query.run:
|
||||
- database: playbook
|
||||
- query: "update custom_fields set name = 'Custom Filter' where id = 21;"
|
||||
- connection_host: {{ MAINIP }}
|
||||
- connection_port: 3306
|
||||
- connection_user: root
|
||||
- connection_pass: {{ MYSQLPASS }}
|
||||
|
||||
query_updatepluginurls:
|
||||
mysql_query.run:
|
||||
- database: playbook
|
||||
|
||||
@@ -11,6 +11,7 @@
|
||||
{% set PYTHON3INFLUX= 'influxdb == ' ~ PYTHONINFLUXVERSION %}
|
||||
{% set PYTHON3INFLUXDEPS= ['certifi', 'chardet', 'python-dateutil', 'pytz', 'requests'] %}
|
||||
{% set PYTHONINSTALLER = 'pip' %}
|
||||
{% set SYSTEMD_UNIT_FILE = '/lib/systemd/system/salt-minion.service' %}
|
||||
{% else %}
|
||||
{% set SPLITCHAR = '-' %}
|
||||
{% set SALTNOTHELD = salt['cmd.run']('yum versionlock list | grep -q salt ; echo $?', python_shell=True) %}
|
||||
@@ -21,6 +22,7 @@
|
||||
{% set PYTHON3INFLUX= 'securityonion-python3-influxdb' %}
|
||||
{% set PYTHON3INFLUXDEPS= ['python36-certifi', 'python36-chardet', 'python36-dateutil', 'python36-pytz', 'python36-requests'] %}
|
||||
{% set PYTHONINSTALLER = 'pkg' %}
|
||||
{% set SYSTEMD_UNIT_FILE = '/usr/lib/systemd/system/salt-minion.service' %}
|
||||
{% endif %}
|
||||
|
||||
{% set INSTALLEDSALTVERSION = salt['pkg.version']('salt-minion').split(SPLITCHAR)[0] %}
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
{% from 'salt/map.jinja' import INSTALLEDSALTVERSION %}
|
||||
{% from 'salt/map.jinja' import SALTNOTHELD %}
|
||||
{% from 'salt/map.jinja' import SALTPACKAGES %}
|
||||
{% from 'salt/map.jinja' import SYSTEMD_UNIT_FILE %}
|
||||
{% import_yaml 'salt/minion.defaults.yaml' as SALTMINION %}
|
||||
{% set service_start_delay = SALTMINION.salt.minion.service_start_delay %}
|
||||
|
||||
@@ -82,7 +83,7 @@ set_log_levels:
|
||||
|
||||
salt_minion_service_unit_file:
|
||||
file.managed:
|
||||
- name: /etc/systemd/system/multi-user.target.wants/salt-minion.service
|
||||
- name: {{ SYSTEMD_UNIT_FILE }}
|
||||
- source: salt://salt/service/salt-minion.service.jinja
|
||||
- template: jinja
|
||||
- defaults:
|
||||
@@ -110,6 +111,7 @@ salt_minion_service:
|
||||
- file: set_log_levels
|
||||
- file: salt_minion_service_unit_file
|
||||
{% endif %}
|
||||
- order: last
|
||||
|
||||
|
||||
patch_pkg:
|
||||
|
||||
269
salt/sensoroni/files/analyzers/README.md
Normal file
269
salt/sensoroni/files/analyzers/README.md
Normal file
@@ -0,0 +1,269 @@
|
||||
# Security Onion Analyzers
|
||||
|
||||
Security Onion provides a means for performing data analysis on varying inputs. This data can be any data of interest sourced from event logs. Examples include hostnames, IP addresses, file hashes, URLs, etc. The analysis is conducted by one or more analyzers that understand that type of input. Analyzers come with the default installation of Security Onion. However, it is also possible to add additional analyzers to extend the analysis across additional areas or data types.
|
||||
|
||||
## Supported Observable Types
|
||||
The built-in analyzers support the following observable types:
|
||||
|
||||
| Name | Domain | Hash | IP | JA3 | Mail | Other | URI | URL | User Agent |
|
||||
| ------------------------|--------|-------|-------|-------|-------|-------|-------|-------|------------
|
||||
| Alienvault OTX |✓ |✓|✓|✗|✗|✗|✗|✓|✗|
|
||||
| EmailRep |✗ |✗|✗|✗|✓|✗|✗|✗|✗|
|
||||
| Greynoise |✗ |✗|✓|✗|✗|✗|✗|✗|✗|
|
||||
| JA3er |✗ |✗|✗|✓|✗|✗|✗|✗|✗|
|
||||
| LocalFile |✓ |✓|✓|✓|✗|✓|✗|✓|✗|
|
||||
| Malware Hash Registry |✗ |✓|✗|✗|✗|✗|✗|✓|✗|
|
||||
| Pulsedive |✓ |✓|✓|✗|✗|✗|✓|✓|✓|
|
||||
| Spamhaus |✗ |✗|✓|✗|✗|✗|✗|✗|✗|
|
||||
| Urlhaus |✗ |✗|✗|✗|✗|✗|✗|✓|✗|
|
||||
| Urlscan |✗ |✗|✗|✗|✗|✗|✗|✓|✗|
|
||||
| Virustotal |✓ |✓|✓|✗|✗|✗|✗|✓|✗|
|
||||
| WhoisLookup |✓ |✗|✗|✗|✗|✗|✓|✗|✗|
|
||||
|
||||
## Authentication
|
||||
Many analyzers require authentication, via an API key or similar. The table below illustrates which analyzers require authentication.
|
||||
|
||||
| Name | Authn Req'd|
|
||||
--------------------------|------------|
|
||||
[AlienVault OTX](https://otx.alienvault.com/api) |✓|
|
||||
[EmailRep](https://emailrep.io/key) |✓|
|
||||
[GreyNoise](https://www.greynoise.io/plans/community) |✓|
|
||||
[JA3er](https://ja3er.com/) |✗|
|
||||
LocalFile |✗|
|
||||
[Malware Hash Registry](https://hash.cymru.com/docs_whois) |✗|
|
||||
[Pulsedive](https://pulsedive.com/api/) |✓|
|
||||
[Spamhaus](https://www.spamhaus.org/dbl/) |✗|
|
||||
[Urlhaus](https://urlhaus.abuse.ch/) |✗|
|
||||
[Urlscan](https://urlscan.io/docs/api/) |✓|
|
||||
[VirusTotal](https://developers.virustotal.com/reference/overview) |✓|
|
||||
[WhoisLookup](https://github.com/meeb/whoisit) |✗|
|
||||
|
||||
|
||||
## Developer Guide
|
||||
|
||||
### Python
|
||||
|
||||
Analyzers are Python modules, and can be made up of a single .py script, for simpler analyzers, or a complex set of scripts organized within nested directories.
|
||||
|
||||
The Python language was chosen because of it's wide adoption in the security industry, ease of development and testing, and the abundance of developers with Python skills.
|
||||
|
||||
Specifically, analyzers must be compatible with Python 3.10.
|
||||
|
||||
For more information about Python, see the [Python Documentation](https://docs.python.org).
|
||||
|
||||
### Development
|
||||
|
||||
Custom analyzers should be developed outside of the Security Onion cluster, in a proper software development environment, with version control or other backup mechanisms in place. The analyzer can be developed, unit tested, and integration tested without the need for a Security Onion installation. Once satisifed with the analyzer functionality the analyzer directory should be copied to the Security Onion manager node.
|
||||
|
||||
Developing an analyzer directly on a Security Onion manager node is strongly discouraged, as loss of source code (and time and effort) can occur, should the management node suffer a catastrophic failure with disk storage loss.
|
||||
|
||||
For best results, avoid long, complicated functions in favor of short, discrete functions. This has several benefits:
|
||||
|
||||
- Easier to troubleshoot
|
||||
- Easier to maintain
|
||||
- Easier to unit test
|
||||
- Easier for other developers to review
|
||||
|
||||
### Linting
|
||||
|
||||
Source code should adhere to the [PEP 8 - Style Guide for Python Code](https://peps.python.org/pep-0008/). Developers can use the default configuration of `flake8` to validate conformance, or run the included `build.sh` inside the analyzers directory. Note that linting conformance is mandatory for analyzers that are contributed back to the Security Onion project.
|
||||
|
||||
### Testing
|
||||
|
||||
Python's [unitest](https://docs.python.org/3/library/unittest.html) library can be used for covering analyzer code with unit tests. Unit tests are encouraged for custom analyzers, and mandatory for public analyzers submitted back to the Security Onion project.
|
||||
|
||||
If you are new to unit testing, please see the included `urlhaus_test.py` as an example.
|
||||
|
||||
Unit tests should be named following the pattern `<scriptname>_test.py`.
|
||||
|
||||
|
||||
### Analyzer Package Structure
|
||||
|
||||
Delpoyment of a custom analyzer entails copying the analyzer source directory and depenency wheel archives to the Security Onion manager node. The destination locations can be found inside the `securityonion` salt source directory tree. Using the [Saltstack](https://github.com/saltstack/salt) directory pattern allows Security Onion developers to add their own analyzers with minimal additional effort needed to upgrade to newer versions of Security Onion. When the _sensoroni_ salt state executes it will merge the default analyzers with any local analyzers, and copy the merged analyzers into the `/opt/so/conf/sensoroni` directory.
|
||||
|
||||
Do not modify files in the `/opt/so/conf/sensoroni` directory! This is a generated directory and changes made inside will be automatically erased on a frequent interval.
|
||||
|
||||
On a Security Onion manager, custom analyzers should be placed inside the `/opt/so/saltstack/local/salt/sensoroni` directory, as described in the next section.
|
||||
|
||||
#### Directory Tree
|
||||
|
||||
From within the default saltstack directory, the following files and directories exist:
|
||||
|
||||
```
|
||||
salt
|
||||
|- sensoroni
|
||||
|- files
|
||||
|- analyzers
|
||||
|- urlhaus <- Example of an existing analyzer
|
||||
| |- source-packages <- Contains wheel package bundles for this analyzer's dependencies
|
||||
| |- site-packages <- Auto-generated site-packages directory (or used for custom dependencies)
|
||||
| |- requirements.txt <- List of all dependencies needed for this analyzer
|
||||
| |- urlhaus.py <- Source code for the analyzer
|
||||
| |- urlhaus_test.py <- Unit tests for the analyzer source code
|
||||
| |- urlhaus.json <- Metadata for the analyzer
|
||||
| |- __init__.py <- Package initialization file, often empty
|
||||
|
|
||||
|- build.sh <- Simple CI tool for validating linting and unit tests
|
||||
|- helpers.py <- Common functions shared by many analyzers
|
||||
|- helpers_test.py <- Unit tests for the shared source code
|
||||
|- pytest.ini <- Configuration options for the flake8 and pytest
|
||||
|- README.md <- The file you are currently reading
|
||||
```
|
||||
|
||||
Custom analyzers should conform to this same structure, but instead of being placed in the `/opt/so/saltstack/default` directory tree, they should be placed in the `/opt/so/saltstack/local` directory tree. This ensures future Security Onion upgrades will not overwrite customizations. Shared files like `build.sh` and `helpers.py` do not need to be duplicated. They can remain in the _default_ directory tree. Only new or modified files should exist in the _local_ directory tree.
|
||||
|
||||
#### Metadata
|
||||
|
||||
Each analyzer has certain metadata that helps describe the function of the analyzer, required inputs, artifact compatibility, optional configuration options, analyzer version, and other important details of the analyzer. This file is a static file and is not intended to be used for dynamic or custom configuration options. It should only be modified by the author of the analyzer.
|
||||
|
||||
The following example describes the urlhaus metadata content:
|
||||
|
||||
```
|
||||
{
|
||||
"name": "Urlhaus", <- Unique human-friendly name of this analyzer
|
||||
"version": "0.1", <- The version of the analyzer
|
||||
"author": "Security Onion Solutions", <- Author's name, and/or email or other contact information
|
||||
"description": "This analyzer queries URLHaus...", <- A brief, concise description of the analyzer
|
||||
"supportedTypes" : ["url"], <- List of types that must match the SOC observable types
|
||||
"baseUrl": "https://urlhaus-api.abuse.ch/v1/url/" <- Optional hardcoded data used by the analyzer
|
||||
}
|
||||
```
|
||||
|
||||
The `supportedTypes` values should only contain the types that this analyzer can work with. In the case of the URLHaus analyzer, we know that it works with URLs. So adding "hash" to this list wouldn't make sense, since URLHaus doesn't provide information about file hashes. If an analyzer does not support a particular type then it will not show up in the analyzer results in SOC for that observable being analyzed. This is intentional, to eliminate unnecessary screen clutter in SOC. To find a list of available values for the `supportedTypes` field, login to SOC and inside of a Case, click the + button on the Observables tab. You will see a list of types and each of those can be used in this metadata field, when applicable to the analyzer.
|
||||
|
||||
#### Dependencies
|
||||
|
||||
Analyzers will often require the use of third-party packages. For example, if an analyzer needs to make a request to a remote server via HTTPS, then the `requests` package will likely be used. Each analyzer will container a `requirements.txt` file, in which all third-party dependencies can be specified, following the python [Requirements File Specification](https://pip.pypa.io/en/stable/reference/requirements-file-format/).
|
||||
|
||||
Additionally, to support airgapped users, the dependency packages themselves, and any transitive dependencies, should be placed inside the `source-packages` directory. To obtain the full hierarchy of dependencies, execute the following commands:
|
||||
|
||||
```bash
|
||||
pip download -r <my-analyzer-path>/requirements.txt -d <my-analyzer-path>/source-packages
|
||||
```
|
||||
|
||||
|
||||
### Analyzer Architecture
|
||||
|
||||
The Sensoroni Docker container is responsible for executing analyzers. Only the manager's Sensoroni container will process analyzer jobs. Other nodes in the grid, such as sensors and search nodes, will not be assigned analyzer jobs.
|
||||
|
||||
When the Sensoroni Docker container starts, the `/opt/so/conf/sensoroni/analyzer` directory is mapped into the container. The initialization of the Sensoroni Analyze module will scan that directory for any subdirectories. Valid subdirectories will be added as an available analyzer.
|
||||
|
||||
The analyzer itself will only run when a user in SOC enqueues an analyzer job, such as via the Cases -> Observables tab. When the Sensoroni node is ready to run the job it will execute the python command interpretor separately for each loaded analyzer. The command line resembles the following:
|
||||
|
||||
```bash
|
||||
python -m urlhaus '{"artifactType":"url","value":"https://bigbadbotnet.invalid",...}'
|
||||
```
|
||||
|
||||
It is up to each analyzer to determine whether the provided input is compatible with that analyzer. This is assisted by the analyzer metadata, as described earlier in this document, with the use of the `supportedTypes` list.
|
||||
|
||||
Once the analyzer completes its functionality, it must terminate promptly. See the following sections for more details on expected internal behavior of the analyzer.
|
||||
|
||||
#### Configuration
|
||||
|
||||
Analyzers may need dynamic configuration data, such as credentials or other secrets, in order to complete their function. Optional configuration files can provide this information, and are expected to reside in the analyzer's directory. Configuration files are typically written in YAML syntax for ease of modification.
|
||||
|
||||
Configuration files for analyzers included with Security Onion will be pillarized, meaning they derive their custom values from the Saltstack pillar data. For example, an analyzer that requires a user supplied credential might contain a config file resembling the following, where Jinja templating syntax is used to extra Salt pillar data:
|
||||
|
||||
```yaml
|
||||
username: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:username', '') }}
|
||||
password: {{ salt['pillar.get']('sensoroni:analyzers:myanalyzer:password', '') }}
|
||||
```
|
||||
|
||||
Sensoroni will not provide any inputs to the analyzer during execution, other than the artifact input in JSON format. However, developers will likely need to test the analyzer outside of Sensoroni and without Jinja templating, therefore an alternate config file should normally be supplied as the configuration argument during testing. Analyzers should allow for this additional command line argument, but by default should automatically read a configuration file stored in the analyzer's directory.
|
||||
|
||||
#### Exit Code
|
||||
|
||||
If an analyzer determines it cannot or should not operate on the input then the analyzer should return an exit code of `126`.
|
||||
|
||||
If an analyzer does attempt to operate against the input then the exit code should be 0, regardless of the outcome. The outcome, be it an error, a confirmed threat detection, or perhaps an unknown outcome, should be noted in the output of the analyzer.
|
||||
|
||||
#### Output
|
||||
|
||||
The outcome of the analyzer is reflected in the analyzer's output to `stdout`. The output must be JSON formatted, and should contain the following fields.
|
||||
|
||||
`summary`: A very short summarization of the outcome. This should be under 50 characters, otherwise it will be truncated when displayed on the Analyzer job list.
|
||||
|
||||
`status`: Can be one of the following status values, which most appropriately reflects the outcome:
|
||||
- `ok`: The analyzer has concluded that the provided input is not a known threat.
|
||||
- `info`: This analyzer provides informative data, but does not attempt to conclude the input is a threat.
|
||||
- `caution`: The data provided is inconclusive. Analysts should review this information further. This can be used in error scenarios, such as if the analyzer fails to complete, perhaps due to a remote service being offline.
|
||||
- `threat`: The analyzer has detected that the input is likely related to a threat.
|
||||
|
||||
`error`: [Optional] If the analyzer encounters an unrecoverable error, those details, useful for administrators to troubleshoot the problem, should be placed in this field.
|
||||
|
||||
Additional fields are allowed, and should contain data that is specific to the analyzer.
|
||||
|
||||
Below is an example of a _urlhaus_ analyzer output. Note that the urlhaus raw JSON is added to a custom field called "response".
|
||||
|
||||
```json
|
||||
{
|
||||
"response": {
|
||||
"blacklists": {
|
||||
"spamhaus_dbl": "not listed",
|
||||
"surbl": "not listed"
|
||||
},
|
||||
"date_added": "2022-04-07 12:39:14 UTC",
|
||||
"host": "abeibaba.com",
|
||||
"id": "2135795",
|
||||
"larted": "false",
|
||||
"last_online": null,
|
||||
"payloads": null,
|
||||
"query_status": "ok",
|
||||
"reporter": "switchcert",
|
||||
"tags": [
|
||||
"Flubot"
|
||||
],
|
||||
"takedown_time_seconds": null,
|
||||
"threat": "malware_download",
|
||||
"url": "https://abeibaba.com/ian/?redacted",
|
||||
"url_status": "offline",
|
||||
"urlhaus_reference": "https://urlhaus.abuse.ch/url/2135795/"
|
||||
},
|
||||
"status": "threat",
|
||||
"summary": "malware_download"
|
||||
}
|
||||
```
|
||||
|
||||
Users in SOC will be able to view the entire JSON output, therefore it is important that sensitive information, such as credentials or other secrets, is excluded from the output.
|
||||
|
||||
#### Internationalization
|
||||
|
||||
Some of the built-in analyzers use snake_case summary values, instead of human friendly words or phrases. These are identifiers that the SOC UI will use to lookup a localized translation for the user. The use of these identifiers is not required for custom analyzers. In fact, in order for an identifier to be properly localized the translations must exist in the SOC product, which is out of scope of this development guide. That said, the following generic translations might be useful for custom analyzers:
|
||||
|
||||
| Identifier | English |
|
||||
| ------------------ | -------------------------- |
|
||||
| `malicious` | Malicious |
|
||||
| `suspicious` | Suspicious |
|
||||
| `harmless` | Harmless |
|
||||
| `internal_failure` | Analyzer Internal Failure |
|
||||
| `timeout` | Remote Host Timed Out |
|
||||
|
||||
#### Timeout
|
||||
|
||||
It is expected that analyzers will finish quickly, but there is a default timeout in place that will abort the analyzer if the timeout is exceeded. By default that timeout is 15 minutes (900000 milliseconds), but can be customized via the `sensoroni:analyze_timeout_ms` salt pillar.
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
Review the Security Onion project [contribution guidelines](https://github.com/Security-Onion-Solutions/securityonion/blob/master/CONTRIBUTING.md) if you are considering contributing an analyzer to the Security Onion project.
|
||||
|
||||
#### Procedure
|
||||
|
||||
In order to make a custom analyzer into a permanent Security Onion analyzer, the following steps need to be taken:
|
||||
|
||||
1. Fork the [securityonion GitHub repository](https://github.com/Security-Onion-Solutions/securityonion)
|
||||
2. Copy your custom analyzer directory to the forked project, under the `securityonion/salt/sensoroni/files/analyzers` directory.
|
||||
3. Ensure the contribution requirements in the following section are met.
|
||||
4. Submit a [pull request](https://github.com/Security-Onion-Solutions/securityonion/pulls) to merge your GitHub fork back into the `securityonion` _dev_ branch.
|
||||
|
||||
#### Requirements
|
||||
|
||||
The following requirements must be satisfied in order for analyzer pull requests to be accepted into the Security Onion GitHub project:
|
||||
|
||||
- Analyzer contributions must not contain licensed dependencies or source code that is incompatible with the [GPLv2 licensing](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html).
|
||||
- All source code must pass the `flake8` lint check. This ensures source code conforms to the same style guides as the other analyzers. The Security Onion project will automatically run the linter after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed lint checks will result in the submitter being sent an automated email message.
|
||||
- All source code must include accompanying unit test coverage. The Security Onion project will automatically run the unit tests after each push to a `securityonion` repository fork, and again when submitting a pull request. Failed unit tests, or insufficient unit test coverage, will result in the submitter being sent an automated email message.
|
||||
- Documentation of the analyzer, its input requirements, conditions for operation, and other relevant information must be clearly written in an accompanying analyzer metadata file. This file is described in more detail earlier in this document.
|
||||
- Source code must be well-written and be free of security defects that can put users or their data at unnecessary risk.
|
||||
|
||||
|
||||
39
salt/sensoroni/files/analyzers/build.sh
Executable file
39
salt/sensoroni/files/analyzers/build.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/bin/bash
|
||||
|
||||
COMMAND=$1
|
||||
SENSORONI_CONTAINER=${SENSORONI_CONTAINER:-so-sensoroni}
|
||||
|
||||
function ci() {
|
||||
HOME_DIR=$(dirname "$0")
|
||||
TARGET_DIR=${1:-.}
|
||||
|
||||
PATH=$PATH:/usr/local/bin
|
||||
|
||||
if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then
|
||||
echo "Missing dependencies. Consider running the following command:"
|
||||
echo " python -m pip install flake8 pytest pytest-cov"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini"
|
||||
pytest "$TARGET_DIR" "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100
|
||||
}
|
||||
|
||||
function download() {
|
||||
ANALYZERS=$1
|
||||
if [[ $ANALYZERS = "all" ]]; then
|
||||
ANALYZERS="*/"
|
||||
fi
|
||||
for ANALYZER in $ANALYZERS; do
|
||||
rm -fr $ANALYZER/site-packages
|
||||
mkdir -p $ANALYZER/source-packages
|
||||
rm -fr $ANALYZER/source-packages/*
|
||||
docker exec -it $SENSORONI_CONTAINER pip download -r /opt/sensoroni/analyzers/$ANALYZER/requirements.txt -d /opt/sensoroni/analyzers/$ANALYZER/source-packages
|
||||
done
|
||||
}
|
||||
|
||||
if [[ "$COMMAND" == "download" ]]; then
|
||||
download "$2"
|
||||
else
|
||||
ci
|
||||
fi
|
||||
17
salt/sensoroni/files/analyzers/emailrep/README.md
Normal file
17
salt/sensoroni/files/analyzers/emailrep/README.md
Normal file
@@ -0,0 +1,17 @@
|
||||
# EmailRep
|
||||
|
||||
## Description
|
||||
Submit an email address to EmailRepIO for analysis.
|
||||
|
||||
## Configuration Requirements
|
||||
|
||||
``api_key`` - API key used for communication with the EmailRepIO API
|
||||
|
||||
This value should be set in the ``sensoroni`` pillar, like so:
|
||||
|
||||
```
|
||||
sensoroni:
|
||||
analyzers:
|
||||
emailrep:
|
||||
api_key: $yourapikey
|
||||
```
|
||||
0
salt/sensoroni/files/analyzers/emailrep/__init__.py
Normal file
0
salt/sensoroni/files/analyzers/emailrep/__init__.py
Normal file
7
salt/sensoroni/files/analyzers/emailrep/emailrep.json
Normal file
7
salt/sensoroni/files/analyzers/emailrep/emailrep.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "EmailRep",
|
||||
"version": "0.1",
|
||||
"author": "Security Onion Solutions",
|
||||
"description": "This analyzer queries the EmailRep API for email address reputation information",
|
||||
"supportedTypes" : ["email", "mail"]
|
||||
}
|
||||
67
salt/sensoroni/files/analyzers/emailrep/emailrep.py
Executable file
67
salt/sensoroni/files/analyzers/emailrep/emailrep.py
Executable file
@@ -0,0 +1,67 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
import helpers
|
||||
import argparse
|
||||
|
||||
|
||||
def checkConfigRequirements(conf):
|
||||
if "api_key" not in conf:
|
||||
sys.exit(126)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def sendReq(conf, meta, email):
|
||||
url = conf['base_url'] + email
|
||||
headers = {"Key": conf['api_key']}
|
||||
response = requests.request('GET', url=url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
|
||||
def prepareResults(raw):
|
||||
if "suspicious" in raw:
|
||||
if raw['suspicious'] is True:
|
||||
status = "caution"
|
||||
summary = "suspicious"
|
||||
elif raw['suspicious'] is False:
|
||||
status = "ok"
|
||||
summary = "harmless"
|
||||
elif "status" in raw:
|
||||
if raw["reason"] == "invalid email":
|
||||
status = "caution"
|
||||
summary = "invalid_input"
|
||||
if "exceeded daily limit" in raw["reason"]:
|
||||
status = "caution"
|
||||
summary = "excessive_usage"
|
||||
else:
|
||||
status = "caution"
|
||||
summary = "internal_failure"
|
||||
results = {'response': raw, 'summary': summary, 'status': status}
|
||||
return results
|
||||
|
||||
|
||||
def analyze(conf, input):
|
||||
checkConfigRequirements(conf)
|
||||
meta = helpers.loadMetadata(__file__)
|
||||
data = helpers.parseArtifact(input)
|
||||
helpers.checkSupportedType(meta, data["artifactType"])
|
||||
response = sendReq(conf, meta, data["value"])
|
||||
return prepareResults(response)
|
||||
|
||||
|
||||
def main():
|
||||
dir = os.path.dirname(os.path.realpath(__file__))
|
||||
parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact')
|
||||
parser.add_argument('artifact', help='the artifact represented in JSON format')
|
||||
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/emailrep.yaml", help='optional config file to use instead of the default config file')
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.artifact:
|
||||
results = analyze(helpers.loadConfig(args.config), args.artifact)
|
||||
print(json.dumps(results))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
2
salt/sensoroni/files/analyzers/emailrep/emailrep.yaml
Normal file
2
salt/sensoroni/files/analyzers/emailrep/emailrep.yaml
Normal file
@@ -0,0 +1,2 @@
|
||||
base_url: https://emailrep.io/
|
||||
api_key: "{{ salt['pillar.get']('sensoroni:analyzers:emailrep:api_key', '') }}"
|
||||
85
salt/sensoroni/files/analyzers/emailrep/emailrep_test.py
Normal file
85
salt/sensoroni/files/analyzers/emailrep/emailrep_test.py
Normal file
@@ -0,0 +1,85 @@
|
||||
from io import StringIO
|
||||
import sys
|
||||
from unittest.mock import patch, MagicMock
|
||||
from emailrep import emailrep
|
||||
import unittest
|
||||
|
||||
|
||||
class TestEmailRepMethods(unittest.TestCase):
|
||||
|
||||
def test_main_missing_input(self):
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stderr', new=StringIO()) as mock_stderr:
|
||||
sys.argv = ["cmd"]
|
||||
emailrep.main()
|
||||
self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n")
|
||||
sysmock.assert_called_once_with(2)
|
||||
|
||||
def test_main_success(self):
|
||||
output = {"foo": "bar"}
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
with patch('emailrep.emailrep.analyze', new=MagicMock(return_value=output)) as mock:
|
||||
sys.argv = ["cmd", "input"]
|
||||
emailrep.main()
|
||||
expected = '{"foo": "bar"}\n'
|
||||
self.assertEqual(mock_stdout.getvalue(), expected)
|
||||
mock.assert_called_once()
|
||||
|
||||
def test_checkConfigRequirements_not_present(self):
|
||||
conf = {"not_a_file_path": "blahblah"}
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
emailrep.checkConfigRequirements(conf)
|
||||
self.assertEqual(cm.exception.code, 126)
|
||||
|
||||
def test_sendReq(self):
|
||||
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
|
||||
meta = {}
|
||||
conf = {"base_url": "https://myurl/", "api_key": "abcd1234"}
|
||||
email = "test@abc.com"
|
||||
response = emailrep.sendReq(conf=conf, meta=meta, email=email)
|
||||
mock.assert_called_once_with("GET", headers={"Key": "abcd1234"}, url="https://myurl/test@abc.com")
|
||||
self.assertIsNotNone(response)
|
||||
|
||||
def test_prepareResults_invalidEmail(self):
|
||||
raw = {"status": "fail", "reason": "invalid email"}
|
||||
results = emailrep.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "invalid_input")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_prepareResults_not_suspicious(self):
|
||||
raw = {"email": "notsus@domain.com", "reputation": "high", "suspicious": False, "references": 21, "details": {"blacklisted": False, "malicious_activity": False, "profiles": ["twitter"]}}
|
||||
results = emailrep.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "harmless")
|
||||
self.assertEqual(results["status"], "ok")
|
||||
|
||||
def test_prepareResults_suspicious(self):
|
||||
raw = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}}
|
||||
results = emailrep.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "suspicious")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_prepareResults_exceeded_limit(self):
|
||||
raw = {"status": "fail", "reason": "exceeded daily limit. please wait 24 hrs or visit emailrep.io/key for an api key."}
|
||||
results = emailrep.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "excessive_usage")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_prepareResults_error(self):
|
||||
raw = {}
|
||||
results = emailrep.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "internal_failure")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_analyze(self):
|
||||
output = {"email": "sus@domain.com", "reputation": "none", "suspicious": True, "references": 0, "details": {"blacklisted": False, "malicious_activity": False, "profiles": []}}
|
||||
artifactInput = '{"value":"sus@domain.com","artifactType":"email"}'
|
||||
conf = {"base_url": "myurl/", "api_key": "abcd1234"}
|
||||
with patch('emailrep.emailrep.sendReq', new=MagicMock(return_value=output)) as mock:
|
||||
results = emailrep.analyze(conf, artifactInput)
|
||||
self.assertEqual(results["summary"], "suspicious")
|
||||
mock.assert_called_once()
|
||||
2
salt/sensoroni/files/analyzers/emailrep/requirements.txt
Normal file
2
salt/sensoroni/files/analyzers/emailrep/requirements.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
requests>=2.27.1
|
||||
pyyaml>=6.0
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
19
salt/sensoroni/files/analyzers/greynoise/README.md
Normal file
19
salt/sensoroni/files/analyzers/greynoise/README.md
Normal file
@@ -0,0 +1,19 @@
|
||||
# Greynoise
|
||||
|
||||
## Description
|
||||
Submit an IP address to Greynoise for analysis.
|
||||
|
||||
## Configuration Requirements
|
||||
|
||||
``api_key`` - API key used for communication with the Greynoise API
|
||||
``api_version`` - Version of Greynoise API. Default is ``community``
|
||||
|
||||
|
||||
This value should be set in the ``sensoroni`` pillar, like so:
|
||||
|
||||
```
|
||||
sensoroni:
|
||||
analyzers:
|
||||
greynoise:
|
||||
api_key: $yourapikey
|
||||
```
|
||||
7
salt/sensoroni/files/analyzers/greynoise/greynoise.json
Normal file
7
salt/sensoroni/files/analyzers/greynoise/greynoise.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "Greynoise IP Analyzer",
|
||||
"version": "0.1",
|
||||
"author": "Security Onion Solutions",
|
||||
"description": "This analyzer queries Greynoise for context around an IP address",
|
||||
"supportedTypes" : ["ip"]
|
||||
}
|
||||
78
salt/sensoroni/files/analyzers/greynoise/greynoise.py
Executable file
78
salt/sensoroni/files/analyzers/greynoise/greynoise.py
Executable file
@@ -0,0 +1,78 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
import helpers
|
||||
import argparse
|
||||
|
||||
|
||||
def checkConfigRequirements(conf):
|
||||
if "api_key" not in conf or len(conf['api_key']) == 0:
|
||||
sys.exit(126)
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def sendReq(conf, meta, ip):
|
||||
url = conf['base_url']
|
||||
if conf['api_version'] == 'community':
|
||||
url = url + 'v3/community/' + ip
|
||||
elif conf['api_version'] == 'investigate' or 'automate':
|
||||
url = url + 'v2/noise/context/' + ip
|
||||
headers = {"key": conf['api_key']}
|
||||
response = requests.request('GET', url=url, headers=headers)
|
||||
return response.json()
|
||||
|
||||
|
||||
def prepareResults(raw):
|
||||
if "message" in raw:
|
||||
if "Success" in raw["message"]:
|
||||
if "classification" in raw:
|
||||
if "benign" in raw['classification']:
|
||||
status = "ok"
|
||||
summary = "harmless"
|
||||
elif "malicious" in raw['classification']:
|
||||
status = "threat"
|
||||
summary = "malicious"
|
||||
elif "unknown" in raw['classification']:
|
||||
status = "caution"
|
||||
summary = "suspicious"
|
||||
elif "IP not observed scanning the internet or contained in RIOT data set." in raw["message"]:
|
||||
status = "ok"
|
||||
summary = "no_results"
|
||||
elif "Request is not a valid routable IPv4 address" in raw["message"]:
|
||||
status = "caution"
|
||||
summary = "invalid_input"
|
||||
else:
|
||||
status = "info"
|
||||
summary = raw["message"]
|
||||
else:
|
||||
status = "caution"
|
||||
summary = "internal_failure"
|
||||
results = {'response': raw, 'summary': summary, 'status': status}
|
||||
return results
|
||||
|
||||
|
||||
def analyze(conf, input):
|
||||
checkConfigRequirements(conf)
|
||||
meta = helpers.loadMetadata(__file__)
|
||||
data = helpers.parseArtifact(input)
|
||||
helpers.checkSupportedType(meta, data["artifactType"])
|
||||
response = sendReq(conf, meta, data["value"])
|
||||
return prepareResults(response)
|
||||
|
||||
|
||||
def main():
|
||||
dir = os.path.dirname(os.path.realpath(__file__))
|
||||
parser = argparse.ArgumentParser(description='Search Greynoise for a given artifact')
|
||||
parser.add_argument('artifact', help='the artifact represented in JSON format')
|
||||
parser.add_argument('-c', '--config', metavar="CONFIG_FILE", default=dir + "/greynoise.yaml", help='optional config file to use instead of the default config file')
|
||||
|
||||
args = parser.parse_args()
|
||||
if args.artifact:
|
||||
results = analyze(helpers.loadConfig(args.config), args.artifact)
|
||||
print(json.dumps(results))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
3
salt/sensoroni/files/analyzers/greynoise/greynoise.yaml
Normal file
3
salt/sensoroni/files/analyzers/greynoise/greynoise.yaml
Normal file
@@ -0,0 +1,3 @@
|
||||
base_url: https://api.greynoise.io/
|
||||
api_key: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_key', '') }}"
|
||||
api_version: "{{ salt['pillar.get']('sensoroni:analyzers:greynoise:api_version', 'community') }}"
|
||||
117
salt/sensoroni/files/analyzers/greynoise/greynoise_test.py
Normal file
117
salt/sensoroni/files/analyzers/greynoise/greynoise_test.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from io import StringIO
|
||||
import sys
|
||||
from unittest.mock import patch, MagicMock
|
||||
from greynoise import greynoise
|
||||
import unittest
|
||||
|
||||
|
||||
class TestGreynoiseMethods(unittest.TestCase):
|
||||
|
||||
def test_main_missing_input(self):
|
||||
with patch('sys.exit', new=MagicMock()) as sysmock:
|
||||
with patch('sys.stderr', new=StringIO()) as mock_stderr:
|
||||
sys.argv = ["cmd"]
|
||||
greynoise.main()
|
||||
self.assertEqual(mock_stderr.getvalue(), "usage: cmd [-h] [-c CONFIG_FILE] artifact\ncmd: error: the following arguments are required: artifact\n")
|
||||
sysmock.assert_called_once_with(2)
|
||||
|
||||
def test_main_success(self):
|
||||
output = {"foo": "bar"}
|
||||
with patch('sys.stdout', new=StringIO()) as mock_stdout:
|
||||
with patch('greynoise.greynoise.analyze', new=MagicMock(return_value=output)) as mock:
|
||||
sys.argv = ["cmd", "input"]
|
||||
greynoise.main()
|
||||
expected = '{"foo": "bar"}\n'
|
||||
self.assertEqual(mock_stdout.getvalue(), expected)
|
||||
mock.assert_called_once()
|
||||
|
||||
def test_checkConfigRequirements_not_present(self):
|
||||
conf = {"not_a_file_path": "blahblah"}
|
||||
with self.assertRaises(SystemExit) as cm:
|
||||
greynoise.checkConfigRequirements(conf)
|
||||
self.assertEqual(cm.exception.code, 126)
|
||||
|
||||
def test_sendReq_community(self):
|
||||
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
|
||||
meta = {}
|
||||
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "community"}
|
||||
ip = "192.168.1.1"
|
||||
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
|
||||
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v3/community/192.168.1.1")
|
||||
self.assertIsNotNone(response)
|
||||
|
||||
def test_sendReq_investigate(self):
|
||||
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
|
||||
meta = {}
|
||||
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "investigate"}
|
||||
ip = "192.168.1.1"
|
||||
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
|
||||
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1")
|
||||
self.assertIsNotNone(response)
|
||||
|
||||
def test_sendReq_automate(self):
|
||||
with patch('requests.request', new=MagicMock(return_value=MagicMock())) as mock:
|
||||
meta = {}
|
||||
conf = {"base_url": "https://myurl/", "api_key": "abcd1234", "api_version": "automate"}
|
||||
ip = "192.168.1.1"
|
||||
response = greynoise.sendReq(conf=conf, meta=meta, ip=ip)
|
||||
mock.assert_called_once_with("GET", headers={'key': 'abcd1234'}, url="https://myurl/v2/noise/context/192.168.1.1")
|
||||
self.assertIsNotNone(response)
|
||||
|
||||
def test_prepareResults_invalidIP(self):
|
||||
raw = {"message": "Request is not a valid routable IPv4 address"}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "invalid_input")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_prepareResults_not_found(self):
|
||||
raw = {"ip": "192.190.1.1", "noise": "false", "riot": "false", "message": "IP not observed scanning the internet or contained in RIOT data set."}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "no_results")
|
||||
self.assertEqual(results["status"], "ok")
|
||||
|
||||
def test_prepareResults_benign(self):
|
||||
raw = {"ip": "8.8.8.8", "noise": "false", "riot": "true", "classification": "benign", "name": "Google Public DNS", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "harmless")
|
||||
self.assertEqual(results["status"], "ok")
|
||||
|
||||
def test_prepareResults_malicious(self):
|
||||
raw = {"ip": "121.142.87.218", "noise": "true", "riot": "false", "classification": "malicious", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "malicious")
|
||||
self.assertEqual(results["status"], "threat")
|
||||
|
||||
def test_prepareResults_unknown(self):
|
||||
raw = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "suspicious")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_prepareResults_unknown_message(self):
|
||||
raw = {"message": "unknown"}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "unknown")
|
||||
self.assertEqual(results["status"], "info")
|
||||
|
||||
def test_prepareResults_error(self):
|
||||
raw = {}
|
||||
results = greynoise.prepareResults(raw)
|
||||
self.assertEqual(results["response"], raw)
|
||||
self.assertEqual(results["summary"], "internal_failure")
|
||||
self.assertEqual(results["status"], "caution")
|
||||
|
||||
def test_analyze(self):
|
||||
output = {"ip": "221.4.62.149", "noise": "true", "riot": "false", "classification": "unknown", "name": "unknown", "link": "https://viz.gn.io", "last_seen": "2022-04-26", "message": "Success"}
|
||||
artifactInput = '{"value":"221.4.62.149","artifactType":"ip"}'
|
||||
conf = {"base_url": "myurl/", "api_key": "abcd1234", "api_version": "community"}
|
||||
with patch('greynoise.greynoise.sendReq', new=MagicMock(return_value=output)) as mock:
|
||||
results = greynoise.analyze(conf, artifactInput)
|
||||
self.assertEqual(results["summary"], "suspicious")
|
||||
mock.assert_called_once()
|
||||
@@ -0,0 +1,2 @@
|
||||
requests>=2.27.1
|
||||
pyyaml>=6.0
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user