Compare commits
11 Commits
kilo
...
feature/us
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62c702e269 | ||
|
|
f10290246f | ||
|
|
c919f6bca0 | ||
|
|
51b421a165 | ||
|
|
86ff54e844 | ||
|
|
b8cb3f5815 | ||
|
|
381a51271f | ||
|
|
10500178d5 | ||
|
|
e81e66f40d | ||
|
|
f6bd74aadf | ||
|
|
322c2804fc |
545
.github/.gitleaks.toml
vendored
@@ -1,545 +0,0 @@
|
|||||||
title = "gitleaks config"
|
|
||||||
|
|
||||||
# Gitleaks rules are defined by regular expressions and entropy ranges.
|
|
||||||
# Some secrets have unique signatures which make detecting those secrets easy.
|
|
||||||
# Examples of those secrets would be GitLab Personal Access Tokens, AWS keys, and GitHub Access Tokens.
|
|
||||||
# All these examples have defined prefixes like `glpat`, `AKIA`, `ghp_`, etc.
|
|
||||||
#
|
|
||||||
# Other secrets might just be a hash which means we need to write more complex rules to verify
|
|
||||||
# that what we are matching is a secret.
|
|
||||||
#
|
|
||||||
# Here is an example of a semi-generic secret
|
|
||||||
#
|
|
||||||
# discord_client_secret = "8dyfuiRyq=vVc3RRr_edRk-fK__JItpZ"
|
|
||||||
#
|
|
||||||
# We can write a regular expression to capture the variable name (identifier),
|
|
||||||
# the assignment symbol (like '=' or ':='), and finally the actual secret.
|
|
||||||
# The structure of a rule to match this example secret is below:
|
|
||||||
#
|
|
||||||
# Beginning string
|
|
||||||
# quotation
|
|
||||||
# │ End string quotation
|
|
||||||
# │ │
|
|
||||||
# ▼ ▼
|
|
||||||
# (?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]
|
|
||||||
#
|
|
||||||
# ▲ ▲ ▲
|
|
||||||
# │ │ │
|
|
||||||
# │ │ │
|
|
||||||
# identifier assignment symbol
|
|
||||||
# Secret
|
|
||||||
#
|
|
||||||
[[rules]]
|
|
||||||
id = "gitlab-pat"
|
|
||||||
description = "GitLab Personal Access Token"
|
|
||||||
regex = '''glpat-[0-9a-zA-Z\-\_]{20}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "aws-access-token"
|
|
||||||
description = "AWS"
|
|
||||||
regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}'''
|
|
||||||
|
|
||||||
# Cryptographic keys
|
|
||||||
[[rules]]
|
|
||||||
id = "PKCS8-PK"
|
|
||||||
description = "PKCS8 private key"
|
|
||||||
regex = '''-----BEGIN PRIVATE KEY-----'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "RSA-PK"
|
|
||||||
description = "RSA private key"
|
|
||||||
regex = '''-----BEGIN RSA PRIVATE KEY-----'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "OPENSSH-PK"
|
|
||||||
description = "SSH private key"
|
|
||||||
regex = '''-----BEGIN OPENSSH PRIVATE KEY-----'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "PGP-PK"
|
|
||||||
description = "PGP private key"
|
|
||||||
regex = '''-----BEGIN PGP PRIVATE KEY BLOCK-----'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "github-pat"
|
|
||||||
description = "GitHub Personal Access Token"
|
|
||||||
regex = '''ghp_[0-9a-zA-Z]{36}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "github-oauth"
|
|
||||||
description = "GitHub OAuth Access Token"
|
|
||||||
regex = '''gho_[0-9a-zA-Z]{36}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "SSH-DSA-PK"
|
|
||||||
description = "SSH (DSA) private key"
|
|
||||||
regex = '''-----BEGIN DSA PRIVATE KEY-----'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "SSH-EC-PK"
|
|
||||||
description = "SSH (EC) private key"
|
|
||||||
regex = '''-----BEGIN EC PRIVATE KEY-----'''
|
|
||||||
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "github-app-token"
|
|
||||||
description = "GitHub App Token"
|
|
||||||
regex = '''(ghu|ghs)_[0-9a-zA-Z]{36}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "github-refresh-token"
|
|
||||||
description = "GitHub Refresh Token"
|
|
||||||
regex = '''ghr_[0-9a-zA-Z]{76}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "shopify-shared-secret"
|
|
||||||
description = "Shopify shared secret"
|
|
||||||
regex = '''shpss_[a-fA-F0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "shopify-access-token"
|
|
||||||
description = "Shopify access token"
|
|
||||||
regex = '''shpat_[a-fA-F0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "shopify-custom-access-token"
|
|
||||||
description = "Shopify custom app access token"
|
|
||||||
regex = '''shpca_[a-fA-F0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "shopify-private-app-access-token"
|
|
||||||
description = "Shopify private app access token"
|
|
||||||
regex = '''shppa_[a-fA-F0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "slack-access-token"
|
|
||||||
description = "Slack token"
|
|
||||||
regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "stripe-access-token"
|
|
||||||
description = "Stripe"
|
|
||||||
regex = '''(?i)(sk|pk)_(test|live)_[0-9a-z]{10,32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "pypi-upload-token"
|
|
||||||
description = "PyPI upload token"
|
|
||||||
regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "gcp-service-account"
|
|
||||||
description = "Google (GCP) Service-account"
|
|
||||||
regex = '''\"type\": \"service_account\"'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "heroku-api-key"
|
|
||||||
description = "Heroku API Key"
|
|
||||||
regex = ''' (?i)(heroku[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9A-F]{8}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{4}-[0-9A-F]{12})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "slack-web-hook"
|
|
||||||
description = "Slack Webhook"
|
|
||||||
regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8,12}/[a-zA-Z0-9_]{24}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "twilio-api-key"
|
|
||||||
description = "Twilio API Key"
|
|
||||||
regex = '''SK[0-9a-fA-F]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "age-secret-key"
|
|
||||||
description = "Age secret key"
|
|
||||||
regex = '''AGE-SECRET-KEY-1[QPZRY9X8GF2TVDW0S3JN54KHCE6MUA7L]{58}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "facebook-token"
|
|
||||||
description = "Facebook token"
|
|
||||||
regex = '''(?i)(facebook[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "twitter-token"
|
|
||||||
description = "Twitter token"
|
|
||||||
regex = '''(?i)(twitter[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{35,44})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "adobe-client-id"
|
|
||||||
description = "Adobe Client ID (Oauth Web)"
|
|
||||||
regex = '''(?i)(adobe[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "adobe-client-secret"
|
|
||||||
description = "Adobe Client Secret"
|
|
||||||
regex = '''(p8e-)(?i)[a-z0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "alibaba-access-key-id"
|
|
||||||
description = "Alibaba AccessKey ID"
|
|
||||||
regex = '''(LTAI)(?i)[a-z0-9]{20}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "alibaba-secret-key"
|
|
||||||
description = "Alibaba Secret Key"
|
|
||||||
regex = '''(?i)(alibaba[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "asana-client-id"
|
|
||||||
description = "Asana Client ID"
|
|
||||||
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{16})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "asana-client-secret"
|
|
||||||
description = "Asana Client Secret"
|
|
||||||
regex = '''(?i)(asana[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "atlassian-api-token"
|
|
||||||
description = "Atlassian API token"
|
|
||||||
regex = '''(?i)(atlassian[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{24})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "bitbucket-client-id"
|
|
||||||
description = "Bitbucket client ID"
|
|
||||||
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "bitbucket-client-secret"
|
|
||||||
description = "Bitbucket client secret"
|
|
||||||
regex = '''(?i)(bitbucket[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9_\-]{64})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "beamer-api-token"
|
|
||||||
description = "Beamer API token"
|
|
||||||
regex = '''(?i)(beamer[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](b_[a-z0-9=_\-]{44})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "clojars-api-token"
|
|
||||||
description = "Clojars API token"
|
|
||||||
regex = '''(CLOJARS_)(?i)[a-z0-9]{60}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "contentful-delivery-api-token"
|
|
||||||
description = "Contentful delivery API token"
|
|
||||||
regex = '''(?i)(contentful[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{43})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "databricks-api-token"
|
|
||||||
description = "Databricks API token"
|
|
||||||
regex = '''dapi[a-h0-9]{32}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "discord-api-token"
|
|
||||||
description = "Discord API key"
|
|
||||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "discord-client-id"
|
|
||||||
description = "Discord client ID"
|
|
||||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9]{18})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "discord-client-secret"
|
|
||||||
description = "Discord client secret"
|
|
||||||
regex = '''(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_\-]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "doppler-api-token"
|
|
||||||
description = "Doppler API token"
|
|
||||||
regex = '''['\"](dp\.pt\.)(?i)[a-z0-9]{43}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "dropbox-api-secret"
|
|
||||||
description = "Dropbox API secret/key"
|
|
||||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "dropbox--api-key"
|
|
||||||
description = "Dropbox API secret/key"
|
|
||||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{15})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "dropbox-short-lived-api-token"
|
|
||||||
description = "Dropbox short lived API token"
|
|
||||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](sl\.[a-z0-9\-=_]{135})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "dropbox-long-lived-api-token"
|
|
||||||
description = "Dropbox long lived API token"
|
|
||||||
regex = '''(?i)(dropbox[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"][a-z0-9]{11}(AAAAAAAAAA)[a-z0-9\-_=]{43}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "duffel-api-token"
|
|
||||||
description = "Duffel API token"
|
|
||||||
regex = '''['\"]duffel_(test|live)_(?i)[a-z0-9_-]{43}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "dynatrace-api-token"
|
|
||||||
description = "Dynatrace API token"
|
|
||||||
regex = '''['\"]dt0c01\.(?i)[a-z0-9]{24}\.[a-z0-9]{64}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "easypost-api-token"
|
|
||||||
description = "EasyPost API token"
|
|
||||||
regex = '''['\"]EZAK(?i)[a-z0-9]{54}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "easypost-test-api-token"
|
|
||||||
description = "EasyPost test API token"
|
|
||||||
regex = '''['\"]EZTK(?i)[a-z0-9]{54}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "fastly-api-token"
|
|
||||||
description = "Fastly API token"
|
|
||||||
regex = '''(?i)(fastly[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9\-=_]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "finicity-client-secret"
|
|
||||||
description = "Finicity client secret"
|
|
||||||
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{20})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "finicity-api-token"
|
|
||||||
description = "Finicity API token"
|
|
||||||
regex = '''(?i)(finicity[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "flutterwave-public-key"
|
|
||||||
description = "Flutterwave public key"
|
|
||||||
regex = '''FLWPUBK_TEST-(?i)[a-h0-9]{32}-X'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "flutterwave-secret-key"
|
|
||||||
description = "Flutterwave secret key"
|
|
||||||
regex = '''FLWSECK_TEST-(?i)[a-h0-9]{32}-X'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "flutterwave-enc-key"
|
|
||||||
description = "Flutterwave encrypted key"
|
|
||||||
regex = '''FLWSECK_TEST[a-h0-9]{12}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "frameio-api-token"
|
|
||||||
description = "Frame.io API token"
|
|
||||||
regex = '''fio-u-(?i)[a-z0-9\-_=]{64}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "gocardless-api-token"
|
|
||||||
description = "GoCardless API token"
|
|
||||||
regex = '''['\"]live_(?i)[a-z0-9\-_=]{40}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "grafana-api-token"
|
|
||||||
description = "Grafana API token"
|
|
||||||
regex = '''['\"]eyJrIjoi(?i)[a-z0-9\-_=]{72,92}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "hashicorp-tf-api-token"
|
|
||||||
description = "HashiCorp Terraform user/org API token"
|
|
||||||
regex = '''['\"](?i)[a-z0-9]{14}\.atlasv1\.[a-z0-9\-_=]{60,70}['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "hubspot-api-token"
|
|
||||||
description = "HubSpot API token"
|
|
||||||
regex = '''(?i)(hubspot[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "intercom-api-token"
|
|
||||||
description = "Intercom API token"
|
|
||||||
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9=_]{60})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "intercom-client-secret"
|
|
||||||
description = "Intercom client secret/ID"
|
|
||||||
regex = '''(?i)(intercom[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "ionic-api-token"
|
|
||||||
description = "Ionic API token"
|
|
||||||
regex = '''(?i)(ionic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](ion_[a-z0-9]{42})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "linear-api-token"
|
|
||||||
description = "Linear API token"
|
|
||||||
regex = '''lin_api_(?i)[a-z0-9]{40}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "linear-client-secret"
|
|
||||||
description = "Linear client secret/ID"
|
|
||||||
regex = '''(?i)(linear[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "lob-api-key"
|
|
||||||
description = "Lob API Key"
|
|
||||||
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((live|test)_[a-f0-9]{35})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "lob-pub-api-key"
|
|
||||||
description = "Lob Publishable API Key"
|
|
||||||
regex = '''(?i)(lob[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]((test|live)_pub_[a-f0-9]{31})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "mailchimp-api-key"
|
|
||||||
description = "Mailchimp API key"
|
|
||||||
regex = '''(?i)(mailchimp[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-f0-9]{32}-us20)['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "mailgun-private-api-token"
|
|
||||||
description = "Mailgun private API token"
|
|
||||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](key-[a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "mailgun-pub-key"
|
|
||||||
description = "Mailgun public validation key"
|
|
||||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"](pubkey-[a-f0-9]{32})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "mailgun-signing-key"
|
|
||||||
description = "Mailgun webhook signing key"
|
|
||||||
regex = '''(?i)(mailgun[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{32}-[a-h0-9]{8}-[a-h0-9]{8})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "mapbox-api-token"
|
|
||||||
description = "Mapbox API token"
|
|
||||||
regex = '''(?i)(pk\.[a-z0-9]{60}\.[a-z0-9]{22})'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "messagebird-api-token"
|
|
||||||
description = "MessageBird API token"
|
|
||||||
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{25})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "messagebird-client-id"
|
|
||||||
description = "MessageBird API client ID"
|
|
||||||
regex = '''(?i)(messagebird[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{8}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{4}-[a-h0-9]{12})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "new-relic-user-api-key"
|
|
||||||
description = "New Relic user API Key"
|
|
||||||
regex = '''['\"](NRAK-[A-Z0-9]{27})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "new-relic-user-api-id"
|
|
||||||
description = "New Relic user API ID"
|
|
||||||
regex = '''(?i)(newrelic[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([A-Z0-9]{64})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "new-relic-browser-api-token"
|
|
||||||
description = "New Relic ingest browser API token"
|
|
||||||
regex = '''['\"](NRJS-[a-f0-9]{19})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "npm-access-token"
|
|
||||||
description = "npm access token"
|
|
||||||
regex = '''['\"](npm_(?i)[a-z0-9]{36})['\"]'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "planetscale-password"
|
|
||||||
description = "PlanetScale password"
|
|
||||||
regex = '''pscale_pw_(?i)[a-z0-9\-_\.]{43}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "planetscale-api-token"
|
|
||||||
description = "PlanetScale API token"
|
|
||||||
regex = '''pscale_tkn_(?i)[a-z0-9\-_\.]{43}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "postman-api-token"
|
|
||||||
description = "Postman API token"
|
|
||||||
regex = '''PMAK-(?i)[a-f0-9]{24}\-[a-f0-9]{34}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "pulumi-api-token"
|
|
||||||
description = "Pulumi API token"
|
|
||||||
regex = '''pul-[a-f0-9]{40}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "rubygems-api-token"
|
|
||||||
description = "Rubygem API token"
|
|
||||||
regex = '''rubygems_[a-f0-9]{48}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "sendgrid-api-token"
|
|
||||||
description = "SendGrid API token"
|
|
||||||
regex = '''SG\.(?i)[a-z0-9_\-\.]{66}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "sendinblue-api-token"
|
|
||||||
description = "Sendinblue API token"
|
|
||||||
regex = '''xkeysib-[a-f0-9]{64}\-(?i)[a-z0-9]{16}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "shippo-api-token"
|
|
||||||
description = "Shippo API token"
|
|
||||||
regex = '''shippo_(live|test)_[a-f0-9]{40}'''
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "linkedin-client-secret"
|
|
||||||
description = "LinkedIn Client secret"
|
|
||||||
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z]{16})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "linkedin-client-id"
|
|
||||||
description = "LinkedIn Client ID"
|
|
||||||
regex = '''(?i)(linkedin[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{14})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "twitch-api-token"
|
|
||||||
description = "Twitch API token"
|
|
||||||
regex = '''(?i)(twitch[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-z0-9]{30})['\"]'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "typeform-api-token"
|
|
||||||
description = "Typeform API token"
|
|
||||||
regex = '''(?i)(typeform[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}(tfp_[a-z0-9\-_\.=]{59})'''
|
|
||||||
secretGroup = 3
|
|
||||||
|
|
||||||
[[rules]]
|
|
||||||
id = "generic-api-key"
|
|
||||||
description = "Generic API Key"
|
|
||||||
regex = '''(?i)((key|api[^Version]|token|secret|password)[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9a-zA-Z\-_=]{8,64})['\"]'''
|
|
||||||
entropy = 3.7
|
|
||||||
secretGroup = 4
|
|
||||||
|
|
||||||
|
|
||||||
[allowlist]
|
|
||||||
description = "global allow lists"
|
|
||||||
regexes = ['''219-09-9999''', '''078-05-1120''', '''(9[0-9]{2}|666)-\d{2}-\d{4}''', '''RPM-GPG-KEY.*''', '''.*:.*StrelkaHexDump.*''', '''.*:.*PLACEHOLDER.*''']
|
|
||||||
paths = [
|
|
||||||
'''gitleaks.toml''',
|
|
||||||
'''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
|
|
||||||
'''(go.mod|go.sum)$''',
|
|
||||||
'''salt/nginx/files/enterprise-attack.json'''
|
|
||||||
]
|
|
||||||
190
.github/DISCUSSION_TEMPLATE/2-4.yml
vendored
@@ -1,190 +0,0 @@
|
|||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
⚠️ This category is solely for conversations related to Security Onion 2.4 ⚠️
|
|
||||||
|
|
||||||
If your organization needs more immediate, enterprise grade professional support, with one-on-one virtual meetings and screensharing, contact us via our website: https://securityonion.com/support
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Version
|
|
||||||
description: Which version of Security Onion 2.4.x are you asking about?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- 2.4 Pre-release (Beta, Release Candidate)
|
|
||||||
- 2.4.10
|
|
||||||
- 2.4.20
|
|
||||||
- 2.4.30
|
|
||||||
- 2.4.40
|
|
||||||
- 2.4.50
|
|
||||||
- 2.4.60
|
|
||||||
- 2.4.70
|
|
||||||
- 2.4.80
|
|
||||||
- 2.4.90
|
|
||||||
- 2.4.100
|
|
||||||
- Other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Installation Method
|
|
||||||
description: How did you install Security Onion?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Security Onion ISO image
|
|
||||||
- Network installation on Red Hat derivative like Oracle, Rocky, Alma, etc.
|
|
||||||
- Network installation on Ubuntu
|
|
||||||
- Network installation on Debian
|
|
||||||
- Other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Description
|
|
||||||
description: >
|
|
||||||
Is this discussion about installation, configuration, upgrading, or other?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- installation
|
|
||||||
- configuration
|
|
||||||
- upgrading
|
|
||||||
- other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Installation Type
|
|
||||||
description: >
|
|
||||||
When you installed, did you choose Import, Eval, Standalone, Distributed, or something else?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Import
|
|
||||||
- Eval
|
|
||||||
- Standalone
|
|
||||||
- Distributed
|
|
||||||
- other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Location
|
|
||||||
description: >
|
|
||||||
Is this deployment in the cloud, on-prem with Internet access, or airgap?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- cloud
|
|
||||||
- on-prem with Internet access
|
|
||||||
- airgap
|
|
||||||
- other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Hardware Specs
|
|
||||||
description: >
|
|
||||||
Does your hardware meet or exceed the minimum requirements for your installation type as shown at https://docs.securityonion.net/en/2.4/hardware.html?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Meets minimum requirements
|
|
||||||
- Exceeds minimum requirements
|
|
||||||
- Does not meet minimum requirements
|
|
||||||
- other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: CPU
|
|
||||||
description: How many CPU cores do you have?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: RAM
|
|
||||||
description: How much RAM do you have?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Storage for /
|
|
||||||
description: How much storage do you have for the / partition?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: input
|
|
||||||
attributes:
|
|
||||||
label: Storage for /nsm
|
|
||||||
description: How much storage do you have for the /nsm partition?
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Network Traffic Collection
|
|
||||||
description: >
|
|
||||||
Are you collecting network traffic from a tap or span port?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- tap
|
|
||||||
- span port
|
|
||||||
- other (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Network Traffic Speeds
|
|
||||||
description: >
|
|
||||||
How much network traffic are you monitoring?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Less than 1Gbps
|
|
||||||
- 1Gbps to 10Gbps
|
|
||||||
- more than 10Gbps
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Status
|
|
||||||
description: >
|
|
||||||
Does SOC Grid show all services on all nodes as running OK?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Yes, all services on all nodes are running OK
|
|
||||||
- No, one or more services are failed (please provide detail below)
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Salt Status
|
|
||||||
description: >
|
|
||||||
Do you get any failures when you run "sudo salt-call state.highstate"?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Yes, there are salt failures (please provide detail below)
|
|
||||||
- No, there are no failures
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: dropdown
|
|
||||||
attributes:
|
|
||||||
label: Logs
|
|
||||||
description: >
|
|
||||||
Are there any additional clues in /opt/so/log/?
|
|
||||||
options:
|
|
||||||
-
|
|
||||||
- Yes, there are additional clues in /opt/so/log/ (please provide detail below)
|
|
||||||
- No, there are no additional clues
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Detail
|
|
||||||
description: Please read our discussion guidelines at https://github.com/Security-Onion-Solutions/securityonion/discussions/1720 and then provide detailed information to help us help you.
|
|
||||||
placeholder: |-
|
|
||||||
STOP! Before typing, please read our discussion guidelines at https://github.com/Security-Onion-Solutions/securityonion/discussions/1720 in their entirety!
|
|
||||||
|
|
||||||
If your organization needs more immediate, enterprise grade professional support, with one-on-one virtual meetings and screensharing, contact us via our website: https://securityonion.com/support
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: Guidelines
|
|
||||||
options:
|
|
||||||
- label: I have read the discussion guidelines at https://github.com/Security-Onion-Solutions/securityonion/discussions/1720 and assert that I have followed the guidelines.
|
|
||||||
required: true
|
|
||||||
32
.github/workflows/close-threads.yml
vendored
@@ -1,32 +0,0 @@
|
|||||||
name: 'Close Threads'
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '50 1 * * *'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
discussions: write
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: lock-threads
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
close-threads:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@v5
|
|
||||||
with:
|
|
||||||
days-before-issue-stale: -1
|
|
||||||
days-before-issue-close: 60
|
|
||||||
stale-issue-message: "This issue is stale because it has been inactive for an extended period. Stale issues convey that the issue, while important to someone, is not critical enough for the author, or other community members to work on, sponsor, or otherwise shepherd the issue through to a resolution."
|
|
||||||
close-issue-message: "This issue was closed because it has been stale for an extended period. It will be automatically locked in 30 days, after which no further commenting will be available."
|
|
||||||
days-before-pr-stale: 45
|
|
||||||
days-before-pr-close: 60
|
|
||||||
stale-pr-message: "This PR is stale because it has been inactive for an extended period. The longer a PR remains stale the more out of date with the main branch it becomes."
|
|
||||||
close-pr-message: "This PR was closed because it has been stale for an extended period. It will be automatically locked in 30 days. If there is still a commitment to finishing this PR re-open it before it is locked."
|
|
||||||
24
.github/workflows/contrib.yml
vendored
@@ -1,24 +0,0 @@
|
|||||||
name: contrib
|
|
||||||
on:
|
|
||||||
issue_comment:
|
|
||||||
types: [created]
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened,closed,synchronize]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
CLAssistant:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: "Contributor Check"
|
|
||||||
if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target'
|
|
||||||
uses: cla-assistant/github-action@v2.3.1
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
|
|
||||||
with:
|
|
||||||
path-to-signatures: 'signatures_v1.json'
|
|
||||||
path-to-document: 'https://securityonionsolutions.com/cla'
|
|
||||||
allowlist: dependabot[bot],jertel,dougburks,TOoSmOotH,weslambert,defensivedepth,m0duspwnens
|
|
||||||
remote-organization-name: Security-Onion-Solutions
|
|
||||||
remote-repository-name: licensing
|
|
||||||
|
|
||||||
4
.github/workflows/leaktest.yml
vendored
@@ -12,6 +12,4 @@ jobs:
|
|||||||
fetch-depth: '0'
|
fetch-depth: '0'
|
||||||
|
|
||||||
- name: Gitleaks
|
- name: Gitleaks
|
||||||
uses: gitleaks/gitleaks-action@v1.6.0
|
uses: zricethezav/gitleaks-action@master
|
||||||
with:
|
|
||||||
config-path: .github/.gitleaks.toml
|
|
||||||
|
|||||||
25
.github/workflows/lock-threads.yml
vendored
@@ -1,25 +0,0 @@
|
|||||||
name: 'Lock Threads'
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '50 2 * * *'
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
discussions: write
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: lock-threads
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lock-threads:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: jertel/lock-threads@main
|
|
||||||
with:
|
|
||||||
include-discussion-currently-open: true
|
|
||||||
discussion-inactive-days: 90
|
|
||||||
issue-inactive-days: 30
|
|
||||||
pr-inactive-days: 30
|
|
||||||
39
.github/workflows/pythontest.yml
vendored
@@ -1,39 +0,0 @@
|
|||||||
name: python-test
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
paths:
|
|
||||||
- "salt/sensoroni/files/analyzers/**"
|
|
||||||
- "salt/manager/tools/sbin"
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "salt/sensoroni/files/analyzers/**"
|
|
||||||
- "salt/manager/tools/sbin"
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
python-version: ["3.10"]
|
|
||||||
python-code-path: ["salt/sensoroni/files/analyzers", "salt/manager/tools/sbin"]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
|
||||||
uses: actions/setup-python@v3
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.python-version }}
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
python -m pip install flake8 pytest pytest-cov
|
|
||||||
find . -name requirements.txt -exec pip install -r {} \;
|
|
||||||
- name: Lint with flake8
|
|
||||||
run: |
|
|
||||||
flake8 ${{ matrix.python-code-path }} --show-source --max-complexity=12 --doctests --max-line-length=200 --statistics
|
|
||||||
- name: Test with pytest
|
|
||||||
run: |
|
|
||||||
pytest ${{ matrix.python-code-path }} --cov=${{ matrix.python-code-path }} --doctest-modules --cov-report=term --cov-fail-under=100 --cov-config=pytest.ini
|
|
||||||
13
.gitignore
vendored
@@ -56,15 +56,4 @@ $RECYCLE.BIN/
|
|||||||
# Windows shortcuts
|
# Windows shortcuts
|
||||||
*.lnk
|
*.lnk
|
||||||
|
|
||||||
# End of https://www.gitignore.io/api/macos,windows
|
# End of https://www.gitignore.io/api/macos,windows
|
||||||
|
|
||||||
# Pytest output
|
|
||||||
__pycache__
|
|
||||||
.pytest_cache
|
|
||||||
.coverage
|
|
||||||
*.pyc
|
|
||||||
.venv
|
|
||||||
|
|
||||||
# Analyzer dev/test config files
|
|
||||||
*_dev.yaml
|
|
||||||
site-packages
|
|
||||||
@@ -29,11 +29,6 @@
|
|||||||
|
|
||||||
* See this document's [code styling and conventions section](#code-style-and-conventions) below to be sure your PR fits our code requirements prior to submitting.
|
* See this document's [code styling and conventions section](#code-style-and-conventions) below to be sure your PR fits our code requirements prior to submitting.
|
||||||
|
|
||||||
* Change behavior (fix a bug, add a new feature) separately from refactoring code. Refactor pull requests are welcome, but ensure your new code behaves exactly the same as the old.
|
|
||||||
|
|
||||||
* **Do not refactor code for non-functional reasons**. If you are submitting a pull request that refactors code, ensure the refactor is improving the functionality of the code you're refactoring (e.g. decreasing complexity, removing reliance on 3rd party tools, improving performance).
|
|
||||||
|
|
||||||
* Before submitting a PR with significant changes to the project, [start a discussion](https://github.com/Security-Onion-Solutions/securityonion/discussions/new) explaining what you hope to acheive. The project maintainers will provide feedback and determine whether your goal aligns with the project.
|
|
||||||
|
|
||||||
|
|
||||||
### Code style and conventions
|
### Code style and conventions
|
||||||
@@ -42,5 +37,3 @@
|
|||||||
* All new Bash code should pass [ShellCheck](https://www.shellcheck.net/) analysis. Where errors can be *safely* [ignored](https://github.com/koalaman/shellcheck/wiki/Ignore), the relevant disable directive should be accompanied by a brief explanation as to why the error is being ignored.
|
* All new Bash code should pass [ShellCheck](https://www.shellcheck.net/) analysis. Where errors can be *safely* [ignored](https://github.com/koalaman/shellcheck/wiki/Ignore), the relevant disable directive should be accompanied by a brief explanation as to why the error is being ignored.
|
||||||
|
|
||||||
* **Ensure all YAML (this includes Salt states and pillars) is properly formatted**. The spec for YAML v1.2 can be found [here](https://yaml.org/spec/1.2/spec.html), however there are numerous online resources with simpler descriptions of its formatting rules.
|
* **Ensure all YAML (this includes Salt states and pillars) is properly formatted**. The spec for YAML v1.2 can be found [here](https://yaml.org/spec/1.2/spec.html), however there are numerous online resources with simpler descriptions of its formatting rules.
|
||||||
|
|
||||||
* **All code of any language should match the style of other code of that same language within the project.** Be sure that any changes you make do not break from the pre-existing style of Security Onion code.
|
|
||||||
|
|||||||
32
README.md
@@ -1,47 +1,35 @@
|
|||||||
## Security Onion 2.4
|
## Security Onion 2.3.80
|
||||||
|
|
||||||
Security Onion 2.4 is here!
|
Security Onion 2.3.80 is here!
|
||||||
|
|
||||||
## Screenshots
|
## Screenshots
|
||||||
|
|
||||||
Alerts
|
Alerts
|
||||||

|

|
||||||
|
|
||||||
Dashboards
|
|
||||||

|
|
||||||
|
|
||||||
Hunt
|
Hunt
|
||||||

|

|
||||||
|
|
||||||
PCAP
|
|
||||||

|
|
||||||
|
|
||||||
Grid
|
|
||||||

|
|
||||||
|
|
||||||
Config
|
|
||||||

|
|
||||||
|
|
||||||
### Release Notes
|
### Release Notes
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/release-notes.html
|
https://docs.securityonion.net/en/2.3/release-notes.html
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/hardware.html
|
https://docs.securityonion.net/en/2.3/hardware.html
|
||||||
|
|
||||||
### Download
|
### Download
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/download.html
|
https://docs.securityonion.net/en/2.3/download.html
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/installation.html
|
https://docs.securityonion.net/en/2.3/installation.html
|
||||||
|
|
||||||
### FAQ
|
### FAQ
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/faq.html
|
https://docs.securityonion.net/en/2.3/faq.html
|
||||||
|
|
||||||
### Feedback
|
### Feedback
|
||||||
|
|
||||||
https://docs.securityonion.net/en/2.4/community-support.html
|
https://docs.securityonion.net/en/2.3/community-support.html
|
||||||
|
|||||||
@@ -4,8 +4,7 @@
|
|||||||
|
|
||||||
| Version | Supported |
|
| Version | Supported |
|
||||||
| ------- | ------------------ |
|
| ------- | ------------------ |
|
||||||
| 2.4.x | :white_check_mark: |
|
| 2.x.x | :white_check_mark: |
|
||||||
| 2.3.x | :white_check_mark: |
|
|
||||||
| 16.04.x | :x: |
|
| 16.04.x | :x: |
|
||||||
|
|
||||||
Security Onion 16.04 has reached End Of Life and is no longer supported.
|
Security Onion 16.04 has reached End Of Life and is no longer supported.
|
||||||
|
|||||||
@@ -1,46 +1,47 @@
|
|||||||
### 2.4.60-20240320 ISO image released on 2024/03/20
|
### 2.3.80 ISO image built on 2021/09/27
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
### Download and Verify
|
### Download and Verify
|
||||||
|
|
||||||
2.4.60-20240320 ISO image:
|
2.3.80 ISO image:
|
||||||
https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso
|
https://download.securityonion.net/file/securityonion/securityonion-2.3.80.iso
|
||||||
|
|
||||||
MD5: 178DD42D06B2F32F3870E0C27219821E
|
MD5: 24F38563860416F4A8ABE18746913E14
|
||||||
SHA1: 73EDCD50817A7F6003FE405CF1808A30D034F89D
|
SHA1: F923C005F54EA2A17AB225ADA0DA46042707AAD9
|
||||||
SHA256: DD334B8D7088A7B78160C253B680D645E25984BA5CCAB5CC5C327CA72137FC06
|
SHA256: 8E95D10AF664D9A406C168EC421D943CB23F0D0C1813C6C2DBA9B4E131984018
|
||||||
|
|
||||||
Signature for ISO image:
|
Signature for ISO image:
|
||||||
https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig
|
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.80.iso.sig
|
||||||
|
|
||||||
Signing key:
|
Signing key:
|
||||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS
|
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS
|
||||||
|
|
||||||
For example, here are the steps you can use on most Linux distributions to download and verify our Security Onion ISO image.
|
For example, here are the steps you can use on most Linux distributions to download and verify our Security Onion ISO image.
|
||||||
|
|
||||||
Download and import the signing key:
|
Download and import the signing key:
|
||||||
```
|
```
|
||||||
wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/2.4/main/KEYS -O - | gpg --import -
|
wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS -O - | gpg --import -
|
||||||
```
|
```
|
||||||
|
|
||||||
Download the signature file for the ISO:
|
Download the signature file for the ISO:
|
||||||
```
|
```
|
||||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/2.4/main/sigs/securityonion-2.4.60-20240320.iso.sig
|
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.80.iso.sig
|
||||||
```
|
```
|
||||||
|
|
||||||
Download the ISO image:
|
Download the ISO image:
|
||||||
```
|
```
|
||||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.4.60-20240320.iso
|
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.80.iso
|
||||||
```
|
```
|
||||||
|
|
||||||
Verify the downloaded ISO image using the signature file:
|
Verify the downloaded ISO image using the signature file:
|
||||||
```
|
```
|
||||||
gpg --verify securityonion-2.4.60-20240320.iso.sig securityonion-2.4.60-20240320.iso
|
gpg --verify securityonion-2.3.80.iso.sig securityonion-2.3.80.iso
|
||||||
```
|
```
|
||||||
|
|
||||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||||
```
|
```
|
||||||
gpg: Signature made Tue 19 Mar 2024 03:17:58 PM EDT using RSA key ID FE507013
|
gpg: Signature made Mon 27 Sep 2021 08:55:01 AM EDT using RSA key ID FE507013
|
||||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||||
gpg: WARNING: This key is not certified with a trusted signature!
|
gpg: WARNING: This key is not certified with a trusted signature!
|
||||||
gpg: There is no indication that the signature belongs to the owner.
|
gpg: There is no indication that the signature belongs to the owner.
|
||||||
@@ -48,4 +49,4 @@ Primary key fingerprint: C804 A93D 36BE 0C73 3EA1 9644 7C10 60B7 FE50 7013
|
|||||||
```
|
```
|
||||||
|
|
||||||
Once you've verified the ISO image, you're ready to proceed to our Installation guide:
|
Once you've verified the ISO image, you're ready to proceed to our Installation guide:
|
||||||
https://docs.securityonion.net/en/2.4/installation.html
|
https://docs.securityonion.net/en/2.3/installation.html
|
||||||
BIN
assets/images/screenshots/alerts-1.png
Normal file
|
After Width: | Height: | Size: 245 KiB |
|
Before Width: | Height: | Size: 186 KiB |
|
Before Width: | Height: | Size: 21 KiB |
|
Before Width: | Height: | Size: 22 KiB |
|
Before Width: | Height: | Size: 12 KiB |
|
Before Width: | Height: | Size: 201 KiB |
|
Before Width: | Height: | Size: 386 KiB |
BIN
assets/images/screenshots/hunt-1.png
Normal file
|
After Width: | Height: | Size: 168 KiB |
|
Before Width: | Height: | Size: 191 KiB |
@@ -1,8 +1,8 @@
|
|||||||
{% import_yaml 'firewall/ports/ports.yaml' as default_portgroups %}
|
{% import_yaml 'firewall/portgroups.yaml' as default_portgroups %}
|
||||||
{% set default_portgroups = default_portgroups.firewall.ports %}
|
{% set default_portgroups = default_portgroups.firewall.aliases.ports %}
|
||||||
{% import_yaml 'firewall/ports/ports.local.yaml' as local_portgroups %}
|
{% import_yaml 'firewall/portgroups.local.yaml' as local_portgroups %}
|
||||||
{% if local_portgroups.firewall.ports %}
|
{% if local_portgroups.firewall.aliases.ports %}
|
||||||
{% set local_portgroups = local_portgroups.firewall.ports %}
|
{% set local_portgroups = local_portgroups.firewall.aliases.ports %}
|
||||||
{% else %}
|
{% else %}
|
||||||
{% set local_portgroups = {} %}
|
{% set local_portgroups = {} %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
@@ -12,11 +12,10 @@ role:
|
|||||||
eval:
|
eval:
|
||||||
fleet:
|
fleet:
|
||||||
heavynode:
|
heavynode:
|
||||||
idh:
|
helixsensor:
|
||||||
import:
|
import:
|
||||||
manager:
|
manager:
|
||||||
managersearch:
|
managersearch:
|
||||||
receiver:
|
|
||||||
standalone:
|
standalone:
|
||||||
searchnode:
|
searchnode:
|
||||||
sensor:
|
sensor:
|
||||||
74
files/firewall/hostgroups.local.yaml
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
firewall:
|
||||||
|
hostgroups:
|
||||||
|
analyst:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
beats_endpoint:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
beats_endpoint_ssl:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
elasticsearch_rest:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
endgame:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
fleet:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
heavy_node:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
manager:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
minion:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
node:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
osquery_endpoint:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
search_node:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
sensor:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
strelka_frontend:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
syslog:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
wazuh_agent:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
wazuh_api:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
|
wazuh_authd:
|
||||||
|
ips:
|
||||||
|
delete:
|
||||||
|
insert:
|
||||||
3
files/firewall/portgroups.local.yaml
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
firewall:
|
||||||
|
aliases:
|
||||||
|
ports:
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
firewall:
|
|
||||||
ports:
|
|
||||||
@@ -41,8 +41,7 @@ file_roots:
|
|||||||
base:
|
base:
|
||||||
- /opt/so/saltstack/local/salt
|
- /opt/so/saltstack/local/salt
|
||||||
- /opt/so/saltstack/default/salt
|
- /opt/so/saltstack/default/salt
|
||||||
- /nsm/elastic-fleet/artifacts
|
|
||||||
- /opt/so/rules/nids
|
|
||||||
|
|
||||||
# The master_roots setting configures a master-only copy of the file_roots dictionary,
|
# The master_roots setting configures a master-only copy of the file_roots dictionary,
|
||||||
# used by the state compiler.
|
# used by the state compiler.
|
||||||
@@ -65,4 +64,10 @@ peer:
|
|||||||
.*:
|
.*:
|
||||||
- x509.sign_remote_certificate
|
- x509.sign_remote_certificate
|
||||||
|
|
||||||
|
reactor:
|
||||||
|
- 'so/fleet':
|
||||||
|
- salt://reactor/fleet.sls
|
||||||
|
- 'salt/beacon/*/watch_sqlite_db//opt/so/conf/kratos/db/sqlite.db':
|
||||||
|
- salt://reactor/kratos.sls
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -45,10 +45,12 @@ echo " rootfs: $ROOTFS" >> $local_salt_dir/pillar/data/$TYPE.sls
|
|||||||
echo " nsmfs: $NSM" >> $local_salt_dir/pillar/data/$TYPE.sls
|
echo " nsmfs: $NSM" >> $local_salt_dir/pillar/data/$TYPE.sls
|
||||||
if [ $TYPE == 'sensorstab' ]; then
|
if [ $TYPE == 'sensorstab' ]; then
|
||||||
echo " monint: bond0" >> $local_salt_dir/pillar/data/$TYPE.sls
|
echo " monint: bond0" >> $local_salt_dir/pillar/data/$TYPE.sls
|
||||||
|
salt-call state.apply grafana queue=True
|
||||||
fi
|
fi
|
||||||
if [ $TYPE == 'evaltab' ] || [ $TYPE == 'standalonetab' ]; then
|
if [ $TYPE == 'evaltab' ] || [ $TYPE == 'standalonetab' ]; then
|
||||||
echo " monint: bond0" >> $local_salt_dir/pillar/data/$TYPE.sls
|
echo " monint: bond0" >> $local_salt_dir/pillar/data/$TYPE.sls
|
||||||
if [ ! $10 ]; then
|
if [ ! $10 ]; then
|
||||||
|
salt-call state.apply grafana queue=True
|
||||||
salt-call state.apply utility queue=True
|
salt-call state.apply utility queue=True
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
@@ -1,2 +1,13 @@
|
|||||||
elasticsearch:
|
elasticsearch:
|
||||||
templates:
|
templates:
|
||||||
|
- so/so-beats-template.json.jinja
|
||||||
|
- so/so-common-template.json.jinja
|
||||||
|
- so/so-firewall-template.json.jinja
|
||||||
|
- so/so-flow-template.json.jinja
|
||||||
|
- so/so-ids-template.json.jinja
|
||||||
|
- so/so-import-template.json.jinja
|
||||||
|
- so/so-osquery-template.json.jinja
|
||||||
|
- so/so-ossec-template.json.jinja
|
||||||
|
- so/so-strelka-template.json.jinja
|
||||||
|
- so/so-syslog-template.json.jinja
|
||||||
|
- so/so-zeek-template.json.jinja
|
||||||
|
|||||||
@@ -1,2 +0,0 @@
|
|||||||
elasticsearch:
|
|
||||||
index_settings:
|
|
||||||
@@ -1,2 +1,14 @@
|
|||||||
elasticsearch:
|
elasticsearch:
|
||||||
templates:
|
templates:
|
||||||
|
- so/so-beats-template.json.jinja
|
||||||
|
- so/so-common-template.json.jinja
|
||||||
|
- so/so-endgame-template.json.jinja
|
||||||
|
- so/so-firewall-template.json.jinja
|
||||||
|
- so/so-flow-template.json.jinja
|
||||||
|
- so/so-ids-template.json.jinja
|
||||||
|
- so/so-import-template.json.jinja
|
||||||
|
- so/so-osquery-template.json.jinja
|
||||||
|
- so/so-ossec-template.json.jinja
|
||||||
|
- so/so-strelka-template.json.jinja
|
||||||
|
- so/so-syslog-template.json.jinja
|
||||||
|
- so/so-zeek-template.json.jinja
|
||||||
|
|||||||
@@ -1,2 +1,14 @@
|
|||||||
elasticsearch:
|
elasticsearch:
|
||||||
templates:
|
templates:
|
||||||
|
- so/so-beats-template.json.jinja
|
||||||
|
- so/so-common-template.json.jinja
|
||||||
|
- so/so-endgame-template.json.jinja
|
||||||
|
- so/so-firewall-template.json.jinja
|
||||||
|
- so/so-flow-template.json.jinja
|
||||||
|
- so/so-ids-template.json.jinja
|
||||||
|
- so/so-import-template.json.jinja
|
||||||
|
- so/so-osquery-template.json.jinja
|
||||||
|
- so/so-ossec-template.json.jinja
|
||||||
|
- so/so-strelka-template.json.jinja
|
||||||
|
- so/so-syslog-template.json.jinja
|
||||||
|
- so/so-zeek-template.json.jinja
|
||||||
|
|||||||
13
pillar/logrotate/init.sls
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
logrotate:
|
||||||
|
conf: |
|
||||||
|
daily
|
||||||
|
rotate 14
|
||||||
|
missingok
|
||||||
|
copytruncate
|
||||||
|
compress
|
||||||
|
create
|
||||||
|
extension .log
|
||||||
|
dateext
|
||||||
|
dateyesterday
|
||||||
|
group_conf: |
|
||||||
|
su root socore
|
||||||
42
pillar/logstash/helix.sls
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
logstash:
|
||||||
|
pipelines:
|
||||||
|
helix:
|
||||||
|
config:
|
||||||
|
- so/0010_input_hhbeats.conf
|
||||||
|
- so/1033_preprocess_snort.conf
|
||||||
|
- so/1100_preprocess_bro_conn.conf
|
||||||
|
- so/1101_preprocess_bro_dhcp.conf
|
||||||
|
- so/1102_preprocess_bro_dns.conf
|
||||||
|
- so/1103_preprocess_bro_dpd.conf
|
||||||
|
- so/1104_preprocess_bro_files.conf
|
||||||
|
- so/1105_preprocess_bro_ftp.conf
|
||||||
|
- so/1106_preprocess_bro_http.conf
|
||||||
|
- so/1107_preprocess_bro_irc.conf
|
||||||
|
- so/1108_preprocess_bro_kerberos.conf
|
||||||
|
- so/1109_preprocess_bro_notice.conf
|
||||||
|
- so/1110_preprocess_bro_rdp.conf
|
||||||
|
- so/1111_preprocess_bro_signatures.conf
|
||||||
|
- so/1112_preprocess_bro_smtp.conf
|
||||||
|
- so/1113_preprocess_bro_snmp.conf
|
||||||
|
- so/1114_preprocess_bro_software.conf
|
||||||
|
- so/1115_preprocess_bro_ssh.conf
|
||||||
|
- so/1116_preprocess_bro_ssl.conf
|
||||||
|
- so/1117_preprocess_bro_syslog.conf
|
||||||
|
- so/1118_preprocess_bro_tunnel.conf
|
||||||
|
- so/1119_preprocess_bro_weird.conf
|
||||||
|
- so/1121_preprocess_bro_mysql.conf
|
||||||
|
- so/1122_preprocess_bro_socks.conf
|
||||||
|
- so/1123_preprocess_bro_x509.conf
|
||||||
|
- so/1124_preprocess_bro_intel.conf
|
||||||
|
- so/1125_preprocess_bro_modbus.conf
|
||||||
|
- so/1126_preprocess_bro_sip.conf
|
||||||
|
- so/1127_preprocess_bro_radius.conf
|
||||||
|
- so/1128_preprocess_bro_pe.conf
|
||||||
|
- so/1129_preprocess_bro_rfb.conf
|
||||||
|
- so/1130_preprocess_bro_dnp3.conf
|
||||||
|
- so/1131_preprocess_bro_smb_files.conf
|
||||||
|
- so/1132_preprocess_bro_smb_mapping.conf
|
||||||
|
- so/1133_preprocess_bro_ntlm.conf
|
||||||
|
- so/1134_preprocess_bro_dce_rpc.conf
|
||||||
|
- so/8001_postprocess_common_ip_augmentation.conf
|
||||||
|
- so/9997_output_helix.conf.jinja
|
||||||
@@ -3,8 +3,6 @@ logstash:
|
|||||||
port_bindings:
|
port_bindings:
|
||||||
- 0.0.0.0:3765:3765
|
- 0.0.0.0:3765:3765
|
||||||
- 0.0.0.0:5044:5044
|
- 0.0.0.0:5044:5044
|
||||||
- 0.0.0.0:5055:5055
|
|
||||||
- 0.0.0.0:5056:5056
|
|
||||||
- 0.0.0.0:5644:5644
|
- 0.0.0.0:5644:5644
|
||||||
- 0.0.0.0:6050:6050
|
- 0.0.0.0:6050:6050
|
||||||
- 0.0.0.0:6051:6051
|
- 0.0.0.0:6051:6051
|
||||||
|
|||||||
10
pillar/logstash/manager.sls
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{%- set PIPELINE = salt['pillar.get']('global:pipeline', 'redis') %}
|
||||||
|
logstash:
|
||||||
|
pipelines:
|
||||||
|
manager:
|
||||||
|
config:
|
||||||
|
- so/0009_input_beats.conf
|
||||||
|
- so/0010_input_hhbeats.conf
|
||||||
|
- so/0011_input_endgame.conf
|
||||||
|
- so/9999_output_redis.conf.jinja
|
||||||
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
{% set node_types = {} %}
|
|
||||||
{% set cached_grains = salt.saltutil.runner('cache.grains', tgt='*') %}
|
|
||||||
{% for minionid, ip in salt.saltutil.runner(
|
|
||||||
'mine.get',
|
|
||||||
tgt='G@role:so-manager or G@role:so-managersearch or G@role:so-standalone or G@role:so-searchnode or G@role:so-heavynode or G@role:so-receiver or G@role:so-fleet ',
|
|
||||||
fun='network.ip_addrs',
|
|
||||||
tgt_type='compound') | dictsort()
|
|
||||||
%}
|
|
||||||
|
|
||||||
# only add a node to the pillar if it returned an ip from the mine
|
|
||||||
{% if ip | length > 0%}
|
|
||||||
{% set hostname = cached_grains[minionid]['host'] %}
|
|
||||||
{% set node_type = minionid.split('_')[1] %}
|
|
||||||
{% if node_type not in node_types.keys() %}
|
|
||||||
{% do node_types.update({node_type: {hostname: ip[0]}}) %}
|
|
||||||
{% else %}
|
|
||||||
{% if hostname not in node_types[node_type] %}
|
|
||||||
{% do node_types[node_type].update({hostname: ip[0]}) %}
|
|
||||||
{% else %}
|
|
||||||
{% do node_types[node_type][hostname].update(ip[0]) %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
|
|
||||||
logstash:
|
|
||||||
nodes:
|
|
||||||
{% for node_type, values in node_types.items() %}
|
|
||||||
{{node_type}}:
|
|
||||||
{% for hostname, ip in values.items() %}
|
|
||||||
{{hostname}}:
|
|
||||||
ip: {{ip}}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
17
pillar/logstash/search.sls
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
{%- set PIPELINE = salt['pillar.get']('global:pipeline', 'minio') %}
|
||||||
|
logstash:
|
||||||
|
pipelines:
|
||||||
|
search:
|
||||||
|
config:
|
||||||
|
- so/0900_input_redis.conf.jinja
|
||||||
|
- so/9000_output_zeek.conf.jinja
|
||||||
|
- so/9002_output_import.conf.jinja
|
||||||
|
- so/9034_output_syslog.conf.jinja
|
||||||
|
- so/9050_output_filebeatmodules.conf.jinja
|
||||||
|
- so/9100_output_osquery.conf.jinja
|
||||||
|
- so/9400_output_suricata.conf.jinja
|
||||||
|
- so/9500_output_beats.conf.jinja
|
||||||
|
- so/9600_output_ossec.conf.jinja
|
||||||
|
- so/9700_output_strelka.conf.jinja
|
||||||
|
- so/9800_output_logscan.conf.jinja
|
||||||
|
- so/9900_output_endgame.conf.jinja
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
{% set node_types = {} %}
|
|
||||||
{% set manage_alived = salt.saltutil.runner('manage.alived', show_ip=True) %}
|
|
||||||
{% for minionid, ip in salt.saltutil.runner('mine.get', tgt='*', fun='network.ip_addrs', tgt_type='glob') | dictsort() %}
|
|
||||||
{% set hostname = minionid.split('_')[0] %}
|
|
||||||
{% set node_type = minionid.split('_')[1] %}
|
|
||||||
{% set is_alive = False %}
|
|
||||||
|
|
||||||
# only add a node to the pillar if it returned an ip from the mine
|
|
||||||
{% if ip | length > 0%}
|
|
||||||
{% if minionid in manage_alived.keys() %}
|
|
||||||
{% if ip[0] == manage_alived[minionid] %}
|
|
||||||
{% set is_alive = True %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if node_type not in node_types.keys() %}
|
|
||||||
{% do node_types.update({node_type: {hostname: {'ip':ip[0], 'alive':is_alive }}}) %}
|
|
||||||
{% else %}
|
|
||||||
{% if hostname not in node_types[node_type] %}
|
|
||||||
{% do node_types[node_type].update({hostname: {'ip':ip[0], 'alive':is_alive}}) %}
|
|
||||||
{% else %}
|
|
||||||
{% do node_types[node_type][hostname].update({'ip':ip[0], 'alive':is_alive}) %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
node_data:
|
|
||||||
{% for node_type, host_values in node_types.items() %}
|
|
||||||
{% for hostname, details in host_values.items() %}
|
|
||||||
{{hostname}}:
|
|
||||||
ip: {{details.ip}}
|
|
||||||
alive: {{ details.alive }}
|
|
||||||
role: {{node_type}}
|
|
||||||
{% endfor %}
|
|
||||||
{% endfor %}
|
|
||||||
@@ -1,14 +0,0 @@
|
|||||||
# Copyright Jason Ertel (github.com/jertel).
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with
|
|
||||||
# the Elastic License 2.0.
|
|
||||||
|
|
||||||
# Note: Per the Elastic License 2.0, the second limitation states:
|
|
||||||
#
|
|
||||||
# "You may not move, change, disable, or circumvent the license key functionality
|
|
||||||
# in the software, and you may not remove or obscure any functionality in the
|
|
||||||
# software that is protected by the license key."
|
|
||||||
|
|
||||||
# This file is generated by Security Onion and contains a list of license-enabled features.
|
|
||||||
features: []
|
|
||||||
44
pillar/thresholding/pillar.example
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
thresholding:
|
||||||
|
sids:
|
||||||
|
8675309:
|
||||||
|
- threshold:
|
||||||
|
gen_id: 1
|
||||||
|
type: threshold
|
||||||
|
track: by_src
|
||||||
|
count: 10
|
||||||
|
seconds: 10
|
||||||
|
- threshold:
|
||||||
|
gen_id: 1
|
||||||
|
type: limit
|
||||||
|
track: by_dst
|
||||||
|
count: 100
|
||||||
|
seconds: 30
|
||||||
|
- rate_filter:
|
||||||
|
gen_id: 1
|
||||||
|
track: by_rule
|
||||||
|
count: 50
|
||||||
|
seconds: 30
|
||||||
|
new_action: alert
|
||||||
|
timeout: 30
|
||||||
|
- suppress:
|
||||||
|
gen_id: 1
|
||||||
|
track: by_either
|
||||||
|
ip: 10.10.3.7
|
||||||
|
11223344:
|
||||||
|
- threshold:
|
||||||
|
gen_id: 1
|
||||||
|
type: limit
|
||||||
|
track: by_dst
|
||||||
|
count: 10
|
||||||
|
seconds: 10
|
||||||
|
- rate_filter:
|
||||||
|
gen_id: 1
|
||||||
|
track: by_src
|
||||||
|
count: 50
|
||||||
|
seconds: 20
|
||||||
|
new_action: pass
|
||||||
|
timeout: 60
|
||||||
|
- suppress:
|
||||||
|
gen_id: 1
|
||||||
|
track: by_src
|
||||||
|
ip: 10.10.3.0/24
|
||||||
20
pillar/thresholding/pillar.usage
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
thresholding:
|
||||||
|
sids:
|
||||||
|
<signature id>:
|
||||||
|
- threshold:
|
||||||
|
gen_id: <generator id>
|
||||||
|
type: <threshold | limit | both>
|
||||||
|
track: <by_src | by_dst>
|
||||||
|
count: <count>
|
||||||
|
seconds: <seconds>
|
||||||
|
- rate_filter:
|
||||||
|
gen_id: <generator id>
|
||||||
|
track: <by_src | by_dst | by_rule | by_both>
|
||||||
|
count: <count>
|
||||||
|
seconds: <seconds>
|
||||||
|
new_action: <alert | pass>
|
||||||
|
timeout: <seconds>
|
||||||
|
- suppress:
|
||||||
|
gen_id: <generator id>
|
||||||
|
track: <by_src | by_dst | by_either>
|
||||||
|
ip: <ip | subnet>
|
||||||
332
pillar/top.sls
@@ -1,293 +1,119 @@
|
|||||||
base:
|
base:
|
||||||
'*':
|
'*':
|
||||||
- global.soc_global
|
|
||||||
- global.adv_global
|
|
||||||
- docker.soc_docker
|
|
||||||
- docker.adv_docker
|
|
||||||
- influxdb.token
|
|
||||||
- logrotate.soc_logrotate
|
|
||||||
- logrotate.adv_logrotate
|
|
||||||
- ntp.soc_ntp
|
|
||||||
- ntp.adv_ntp
|
|
||||||
- patch.needs_restarting
|
- patch.needs_restarting
|
||||||
- patch.soc_patch
|
- logrotate
|
||||||
- patch.adv_patch
|
- users
|
||||||
- sensoroni.soc_sensoroni
|
|
||||||
- sensoroni.adv_sensoroni
|
|
||||||
- telegraf.soc_telegraf
|
|
||||||
- telegraf.adv_telegraf
|
|
||||||
|
|
||||||
'* and not *_desktop':
|
'*_eval or *_helixsensor or *_heavynode or *_sensor or *_standalone or *_import':
|
||||||
- firewall.soc_firewall
|
- match: compound
|
||||||
- firewall.adv_firewall
|
- zeek
|
||||||
- nginx.soc_nginx
|
|
||||||
- nginx.adv_nginx
|
'*_managersearch or *_heavynode':
|
||||||
- node_data.ips
|
- match: compound
|
||||||
|
- logstash
|
||||||
|
- logstash.manager
|
||||||
|
- logstash.search
|
||||||
|
- elasticsearch.search
|
||||||
|
|
||||||
|
'*_manager':
|
||||||
|
- logstash
|
||||||
|
- logstash.manager
|
||||||
|
- elasticsearch.manager
|
||||||
|
|
||||||
'*_manager or *_managersearch':
|
'*_manager or *_managersearch':
|
||||||
- match: compound
|
- match: compound
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
- data.*
|
||||||
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||||
- elasticsearch.auth
|
- elasticsearch.auth
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
||||||
- kibana.secrets
|
- kibana.secrets
|
||||||
{% endif %}
|
{% endif %}
|
||||||
- secrets
|
- secrets
|
||||||
- manager.soc_manager
|
- global
|
||||||
- manager.adv_manager
|
|
||||||
- idstools.soc_idstools
|
|
||||||
- idstools.adv_idstools
|
|
||||||
- logstash.nodes
|
|
||||||
- logstash.soc_logstash
|
|
||||||
- logstash.adv_logstash
|
|
||||||
- soc.soc_soc
|
|
||||||
- soc.adv_soc
|
|
||||||
- soc.license
|
|
||||||
- kibana.soc_kibana
|
|
||||||
- kibana.adv_kibana
|
|
||||||
- kratos.soc_kratos
|
|
||||||
- kratos.adv_kratos
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- influxdb.soc_influxdb
|
|
||||||
- influxdb.adv_influxdb
|
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
- elasticfleet.soc_elasticfleet
|
|
||||||
- elasticfleet.adv_elasticfleet
|
|
||||||
- elastalert.soc_elastalert
|
|
||||||
- elastalert.adv_elastalert
|
|
||||||
- backup.soc_backup
|
|
||||||
- backup.adv_backup
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
- stig.soc_stig
|
|
||||||
|
|
||||||
'*_sensor':
|
'*_sensor':
|
||||||
|
- zeeklogs
|
||||||
- healthcheck.sensor
|
- healthcheck.sensor
|
||||||
- strelka.soc_strelka
|
- global
|
||||||
- strelka.adv_strelka
|
|
||||||
- zeek.soc_zeek
|
|
||||||
- zeek.adv_zeek
|
|
||||||
- bpf.soc_bpf
|
|
||||||
- bpf.adv_bpf
|
|
||||||
- pcap.soc_pcap
|
|
||||||
- pcap.adv_pcap
|
|
||||||
- suricata.soc_suricata
|
|
||||||
- suricata.adv_suricata
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
- stig.soc_stig
|
|
||||||
- soc.license
|
|
||||||
|
|
||||||
'*_eval':
|
'*_eval':
|
||||||
|
- data.*
|
||||||
|
- zeeklogs
|
||||||
- secrets
|
- secrets
|
||||||
- healthcheck.eval
|
- healthcheck.eval
|
||||||
- elasticsearch.index_templates
|
- elasticsearch.eval
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||||
- elasticsearch.auth
|
- elasticsearch.auth
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
||||||
- kibana.secrets
|
- kibana.secrets
|
||||||
{% endif %}
|
{% endif %}
|
||||||
- kratos.soc_kratos
|
- global
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
- elasticfleet.soc_elasticfleet
|
|
||||||
- elasticfleet.adv_elasticfleet
|
|
||||||
- elastalert.soc_elastalert
|
|
||||||
- elastalert.adv_elastalert
|
|
||||||
- manager.soc_manager
|
|
||||||
- manager.adv_manager
|
|
||||||
- idstools.soc_idstools
|
|
||||||
- idstools.adv_idstools
|
|
||||||
- soc.soc_soc
|
|
||||||
- soc.adv_soc
|
|
||||||
- soc.license
|
|
||||||
- kibana.soc_kibana
|
|
||||||
- kibana.adv_kibana
|
|
||||||
- strelka.soc_strelka
|
|
||||||
- strelka.adv_strelka
|
|
||||||
- kratos.soc_kratos
|
|
||||||
- kratos.adv_kratos
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- influxdb.soc_influxdb
|
|
||||||
- influxdb.adv_influxdb
|
|
||||||
- backup.soc_backup
|
|
||||||
- backup.adv_backup
|
|
||||||
- zeek.soc_zeek
|
|
||||||
- zeek.adv_zeek
|
|
||||||
- bpf.soc_bpf
|
|
||||||
- bpf.adv_bpf
|
|
||||||
- pcap.soc_pcap
|
|
||||||
- pcap.adv_pcap
|
|
||||||
- suricata.soc_suricata
|
|
||||||
- suricata.adv_suricata
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_standalone':
|
'*_standalone':
|
||||||
- logstash.nodes
|
- logstash
|
||||||
- logstash.soc_logstash
|
- logstash.manager
|
||||||
- logstash.adv_logstash
|
- logstash.search
|
||||||
- elasticsearch.index_templates
|
- elasticsearch.search
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||||
- elasticsearch.auth
|
- elasticsearch.auth
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
||||||
- kibana.secrets
|
- kibana.secrets
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
- data.*
|
||||||
|
- zeeklogs
|
||||||
- secrets
|
- secrets
|
||||||
- healthcheck.standalone
|
- healthcheck.standalone
|
||||||
- idstools.soc_idstools
|
- global
|
||||||
- idstools.adv_idstools
|
- minions.{{ grains.id }}
|
||||||
- kratos.soc_kratos
|
|
||||||
- kratos.adv_kratos
|
'*_node':
|
||||||
- redis.soc_redis
|
- global
|
||||||
- redis.adv_redis
|
|
||||||
- influxdb.soc_influxdb
|
|
||||||
- influxdb.adv_influxdb
|
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
- elasticfleet.soc_elasticfleet
|
|
||||||
- elasticfleet.adv_elasticfleet
|
|
||||||
- elastalert.soc_elastalert
|
|
||||||
- elastalert.adv_elastalert
|
|
||||||
- manager.soc_manager
|
|
||||||
- manager.adv_manager
|
|
||||||
- soc.soc_soc
|
|
||||||
- soc.adv_soc
|
|
||||||
- soc.license
|
|
||||||
- kibana.soc_kibana
|
|
||||||
- kibana.adv_kibana
|
|
||||||
- strelka.soc_strelka
|
|
||||||
- strelka.adv_strelka
|
|
||||||
- backup.soc_backup
|
|
||||||
- backup.adv_backup
|
|
||||||
- zeek.soc_zeek
|
|
||||||
- zeek.adv_zeek
|
|
||||||
- bpf.soc_bpf
|
|
||||||
- bpf.adv_bpf
|
|
||||||
- pcap.soc_pcap
|
|
||||||
- pcap.adv_pcap
|
|
||||||
- suricata.soc_suricata
|
|
||||||
- suricata.adv_suricata
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
- stig.soc_stig
|
|
||||||
|
|
||||||
'*_heavynode':
|
'*_heavynode':
|
||||||
|
- zeeklogs
|
||||||
- elasticsearch.auth
|
- elasticsearch.auth
|
||||||
- logstash.nodes
|
- global
|
||||||
- logstash.soc_logstash
|
|
||||||
- logstash.adv_logstash
|
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- zeek.soc_zeek
|
|
||||||
- zeek.adv_zeek
|
|
||||||
- bpf.soc_bpf
|
|
||||||
- bpf.adv_bpf
|
|
||||||
- pcap.soc_pcap
|
|
||||||
- pcap.adv_pcap
|
|
||||||
- suricata.soc_suricata
|
|
||||||
- suricata.adv_suricata
|
|
||||||
- strelka.soc_strelka
|
|
||||||
- strelka.adv_strelka
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_idh':
|
'*_helixsensor':
|
||||||
- idh.soc_idh
|
- fireeye
|
||||||
- idh.adv_idh
|
- zeeklogs
|
||||||
|
- logstash
|
||||||
|
- logstash.helix
|
||||||
|
- global
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_searchnode':
|
|
||||||
- logstash.nodes
|
|
||||||
- logstash.soc_logstash
|
|
||||||
- logstash.adv_logstash
|
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
|
||||||
- elasticsearch.auth
|
|
||||||
{% endif %}
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- minions.{{ grains.id }}
|
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
- stig.soc_stig
|
|
||||||
- soc.license
|
|
||||||
|
|
||||||
'*_receiver':
|
|
||||||
- logstash.nodes
|
|
||||||
- logstash.soc_logstash
|
|
||||||
- logstash.adv_logstash
|
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
|
||||||
- elasticsearch.auth
|
|
||||||
{% endif %}
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- minions.{{ grains.id }}
|
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_import':
|
|
||||||
- secrets
|
|
||||||
- elasticsearch.index_templates
|
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
|
||||||
- elasticsearch.auth
|
|
||||||
{% endif %}
|
|
||||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
|
||||||
- kibana.secrets
|
|
||||||
{% endif %}
|
|
||||||
- kratos.soc_kratos
|
|
||||||
- elasticsearch.soc_elasticsearch
|
|
||||||
- elasticsearch.adv_elasticsearch
|
|
||||||
- elasticfleet.soc_elasticfleet
|
|
||||||
- elasticfleet.adv_elasticfleet
|
|
||||||
- elastalert.soc_elastalert
|
|
||||||
- elastalert.adv_elastalert
|
|
||||||
- manager.soc_manager
|
|
||||||
- manager.adv_manager
|
|
||||||
- soc.soc_soc
|
|
||||||
- soc.adv_soc
|
|
||||||
- soc.license
|
|
||||||
- kibana.soc_kibana
|
|
||||||
- kibana.adv_kibana
|
|
||||||
- backup.soc_backup
|
|
||||||
- backup.adv_backup
|
|
||||||
- kratos.soc_kratos
|
|
||||||
- kratos.adv_kratos
|
|
||||||
- redis.soc_redis
|
|
||||||
- redis.adv_redis
|
|
||||||
- influxdb.soc_influxdb
|
|
||||||
- influxdb.adv_influxdb
|
|
||||||
- zeek.soc_zeek
|
|
||||||
- zeek.adv_zeek
|
|
||||||
- bpf.soc_bpf
|
|
||||||
- bpf.adv_bpf
|
|
||||||
- pcap.soc_pcap
|
|
||||||
- pcap.adv_pcap
|
|
||||||
- suricata.soc_suricata
|
|
||||||
- suricata.adv_suricata
|
|
||||||
- strelka.soc_strelka
|
|
||||||
- strelka.adv_strelka
|
|
||||||
- minions.{{ grains.id }}
|
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_fleet':
|
'*_fleet':
|
||||||
- backup.soc_backup
|
- data.*
|
||||||
- backup.adv_backup
|
- secrets
|
||||||
- logstash.nodes
|
- global
|
||||||
- logstash.soc_logstash
|
|
||||||
- logstash.adv_logstash
|
|
||||||
- elasticfleet.soc_elasticfleet
|
|
||||||
- elasticfleet.adv_elasticfleet
|
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|
||||||
'*_desktop':
|
'*_searchnode':
|
||||||
|
- logstash
|
||||||
|
- logstash.search
|
||||||
|
- elasticsearch.search
|
||||||
|
- elasticsearch.auth
|
||||||
|
- global
|
||||||
|
- minions.{{ grains.id }}
|
||||||
|
- data.nodestab
|
||||||
|
|
||||||
|
'*_import':
|
||||||
|
- zeeklogs
|
||||||
|
- secrets
|
||||||
|
- elasticsearch.eval
|
||||||
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||||
|
- elasticsearch.auth
|
||||||
|
{% endif %}
|
||||||
|
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/kibana/secrets.sls') %}
|
||||||
|
- kibana.secrets
|
||||||
|
{% endif %}
|
||||||
|
- global
|
||||||
- minions.{{ grains.id }}
|
- minions.{{ grains.id }}
|
||||||
- minions.adv_{{ grains.id }}
|
|
||||||
|
|||||||
2
pillar/users/init.sls
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
# users pillar goes in /opt/so/saltstack/local/pillar/users/init.sls
|
||||||
|
# the users directory may need to be created under /opt/so/saltstack/local/pillar
|
||||||
19
pillar/users/pillar.example
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
users:
|
||||||
|
sclapton:
|
||||||
|
# required fields
|
||||||
|
status: present
|
||||||
|
# node_access determines which node types the user can access.
|
||||||
|
# this can either be by grains.role or by final part of the minion id after the _
|
||||||
|
node_access:
|
||||||
|
- standalone
|
||||||
|
- searchnode
|
||||||
|
# optional fields
|
||||||
|
fullname: Stevie Claptoon
|
||||||
|
uid: 1001
|
||||||
|
gid: 1001
|
||||||
|
homephone: does not have a phone
|
||||||
|
groups:
|
||||||
|
- mygroup1
|
||||||
|
- mygroup2
|
||||||
|
- wheel # give sudo access
|
||||||
|
|
||||||
20
pillar/users/pillar.usage
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
users:
|
||||||
|
sclapton:
|
||||||
|
# required fields
|
||||||
|
status: <present | absent>
|
||||||
|
# node_access determines which node types the user can access.
|
||||||
|
# this can either be by grains.role or by final part of the minion id after the _
|
||||||
|
node_access:
|
||||||
|
- standalone
|
||||||
|
- searchnode
|
||||||
|
# optional fields
|
||||||
|
fullname: <string>
|
||||||
|
uid: <integer>
|
||||||
|
gid: <integer>
|
||||||
|
roomnumber: <string>
|
||||||
|
workphone: <string>
|
||||||
|
homephone: <string>
|
||||||
|
groups:
|
||||||
|
- <string>
|
||||||
|
- <string>
|
||||||
|
- wheel # give sudo access
|
||||||
@@ -1 +1,55 @@
|
|||||||
zeek:
|
zeek:
|
||||||
|
zeekctl:
|
||||||
|
MailTo: root@localhost
|
||||||
|
MailConnectionSummary: 1
|
||||||
|
MinDiskSpace: 5
|
||||||
|
MailHostUpDown: 1
|
||||||
|
LogRotationInterval: 3600
|
||||||
|
LogExpireInterval: 0
|
||||||
|
StatsLogEnable: 1
|
||||||
|
StatsLogExpireInterval: 0
|
||||||
|
StatusCmdShowAll: 0
|
||||||
|
CrashExpireInterval: 0
|
||||||
|
SitePolicyScripts: local.zeek
|
||||||
|
LogDir: /nsm/zeek/logs
|
||||||
|
SpoolDir: /nsm/zeek/spool
|
||||||
|
CfgDir: /opt/zeek/etc
|
||||||
|
CompressLogs: 1
|
||||||
|
local:
|
||||||
|
'@load':
|
||||||
|
- misc/loaded-scripts
|
||||||
|
- tuning/defaults
|
||||||
|
- misc/capture-loss
|
||||||
|
- misc/stats
|
||||||
|
- frameworks/software/vulnerable
|
||||||
|
- frameworks/software/version-changes
|
||||||
|
- protocols/ftp/software
|
||||||
|
- protocols/smtp/software
|
||||||
|
- protocols/ssh/software
|
||||||
|
- protocols/http/software
|
||||||
|
- protocols/dns/detect-external-names
|
||||||
|
- protocols/ftp/detect
|
||||||
|
- protocols/conn/known-hosts
|
||||||
|
- protocols/conn/known-services
|
||||||
|
- protocols/ssl/known-certs
|
||||||
|
- protocols/ssl/validate-certs
|
||||||
|
- protocols/ssl/log-hostcerts-only
|
||||||
|
- protocols/ssh/geo-data
|
||||||
|
- protocols/ssh/detect-bruteforcing
|
||||||
|
- protocols/ssh/interesting-hostnames
|
||||||
|
- protocols/http/detect-sqli
|
||||||
|
- frameworks/files/hash-all-files
|
||||||
|
- frameworks/files/detect-MHR
|
||||||
|
- policy/frameworks/notice/extend-email/hostnames
|
||||||
|
- ja3
|
||||||
|
- hassh
|
||||||
|
- intel
|
||||||
|
- cve-2020-0601
|
||||||
|
- securityonion/bpfconf
|
||||||
|
- securityonion/communityid
|
||||||
|
- securityonion/file-extraction
|
||||||
|
'@load-sigs':
|
||||||
|
- frameworks/signatures/detect-windows-shells
|
||||||
|
redef:
|
||||||
|
- LogAscii::use_json = T;
|
||||||
|
- CaptureLoss::watch_interval = 5 mins;
|
||||||
|
|||||||
26
pyci.sh
@@ -1,26 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
|
||||||
# Elastic License 2.0.
|
|
||||||
|
|
||||||
if [[ $# -ne 1 ]]; then
|
|
||||||
echo "Usage: $0 <python_script_dir>"
|
|
||||||
echo "Runs tests on all *_test.py files in the given directory."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
HOME_DIR=$(dirname "$0")
|
|
||||||
TARGET_DIR=${1:-.}
|
|
||||||
|
|
||||||
PATH=$PATH:/usr/local/bin
|
|
||||||
|
|
||||||
if ! which pytest &> /dev/null || ! which flake8 &> /dev/null ; then
|
|
||||||
echo "Missing dependencies. Consider running the following command:"
|
|
||||||
echo " python -m pip install flake8 pytest pytest-cov"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
pip install pytest pytest-cov
|
|
||||||
flake8 "$TARGET_DIR" "--config=${HOME_DIR}/pytest.ini"
|
|
||||||
python3 -m pytest "--cov-config=${HOME_DIR}/pytest.ini" "--cov=$TARGET_DIR" --doctest-modules --cov-report=term --cov-fail-under=100 "$TARGET_DIR"
|
|
||||||
27
pytest.ini
@@ -1,27 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude =
|
|
||||||
.venv
|
|
||||||
*/site-packages/,
|
|
||||||
*/source-packages/,
|
|
||||||
*/__pycache__
|
|
||||||
show_source = true
|
|
||||||
max_complexity = 12
|
|
||||||
max_line_length = 200
|
|
||||||
statistics = true
|
|
||||||
doctests = true
|
|
||||||
|
|
||||||
[pytest]
|
|
||||||
python_files = *_test.py
|
|
||||||
python_classes = Test
|
|
||||||
python_functions = test_*
|
|
||||||
norecursedirs = site-packages
|
|
||||||
|
|
||||||
[report]
|
|
||||||
exclude_lines =
|
|
||||||
if __name__ == .__main__.:
|
|
||||||
|
|
||||||
show_missing = True
|
|
||||||
omit =
|
|
||||||
*_test.py,
|
|
||||||
*/site-packages/*,
|
|
||||||
*/source-packages/*
|
|
||||||
@@ -3,14 +3,14 @@ import subprocess
|
|||||||
|
|
||||||
def check():
|
def check():
|
||||||
|
|
||||||
osfam = __grains__['os_family']
|
os = __grains__['os']
|
||||||
retval = 'False'
|
retval = 'False'
|
||||||
|
|
||||||
if osfam == 'Debian':
|
if os == 'Ubuntu':
|
||||||
if path.exists('/var/run/reboot-required'):
|
if path.exists('/var/run/reboot-required'):
|
||||||
retval = 'True'
|
retval = 'True'
|
||||||
|
|
||||||
elif osfam == 'RedHat':
|
elif os == 'CentOS':
|
||||||
cmd = 'needs-restarting -r > /dev/null 2>&1'
|
cmd = 'needs-restarting -r > /dev/null 2>&1'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -5,8 +5,6 @@ import logging
|
|||||||
def status():
|
def status():
|
||||||
return __salt__['cmd.run']('/usr/sbin/so-status')
|
return __salt__['cmd.run']('/usr/sbin/so-status')
|
||||||
|
|
||||||
def version():
|
|
||||||
return __salt__['cp.get_file_str']('/etc/soversion')
|
|
||||||
|
|
||||||
def mysql_conn(retry):
|
def mysql_conn(retry):
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
@@ -63,4 +61,4 @@ def mysql_conn(retry):
|
|||||||
for addr in ip_arr:
|
for addr in ip_arr:
|
||||||
log.debug(f' - {addr}')
|
log.debug(f' - {addr}')
|
||||||
|
|
||||||
return mysql_up
|
return mysql_up
|
||||||
@@ -1,8 +1,19 @@
|
|||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
{% set WAZUH = salt['pillar.get']('global:wazuh', '0') %}
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
{% set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||||
# Elastic License 2.0.
|
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||||
|
{% set FREQSERVER = salt['pillar.get']('manager:freq', '0') %}
|
||||||
|
{% set DOMAINSTATS = salt['pillar.get']('manager:domainstats', '0') %}
|
||||||
|
{% set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) %}
|
||||||
|
{% set FLEETNODE = salt['pillar.get']('global:fleet_node', False) %}
|
||||||
|
{% set ELASTALERT = salt['pillar.get']('elastalert:enabled', True) %}
|
||||||
|
{% set ELASTICSEARCH = salt['pillar.get']('elasticsearch:enabled', True) %}
|
||||||
|
{% set FILEBEAT = salt['pillar.get']('filebeat:enabled', True) %}
|
||||||
|
{% set KIBANA = salt['pillar.get']('kibana:enabled', True) %}
|
||||||
|
{% set LOGSTASH = salt['pillar.get']('logstash:enabled', True) %}
|
||||||
|
{% set CURATOR = salt['pillar.get']('curator:enabled', True) %}
|
||||||
|
{% set REDIS = salt['pillar.get']('redis:enabled', True) %}
|
||||||
|
{% set STRELKA = salt['pillar.get']('strelka:enabled', '0') %}
|
||||||
{% set ISAIRGAP = salt['pillar.get']('global:airgap', False) %}
|
{% set ISAIRGAP = salt['pillar.get']('global:airgap', False) %}
|
||||||
{% import_yaml 'salt/minion.defaults.yaml' as saltversion %}
|
{% import_yaml 'salt/minion.defaults.yaml' as saltversion %}
|
||||||
{% set saltversion = saltversion.salt.minion.version %}
|
{% set saltversion = saltversion.salt.minion.version %}
|
||||||
@@ -22,10 +33,9 @@
|
|||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'influxdb',
|
'influxdb',
|
||||||
|
'grafana',
|
||||||
'soc',
|
'soc',
|
||||||
'kratos',
|
'kratos',
|
||||||
'elasticfleet',
|
|
||||||
'elastic-fleet-package-registry',
|
|
||||||
'firewall',
|
'firewall',
|
||||||
'idstools',
|
'idstools',
|
||||||
'suricata.manager',
|
'suricata.manager',
|
||||||
@@ -34,10 +44,13 @@
|
|||||||
'suricata',
|
'suricata',
|
||||||
'utility',
|
'utility',
|
||||||
'schedule',
|
'schedule',
|
||||||
|
'soctopus',
|
||||||
'tcpreplay',
|
'tcpreplay',
|
||||||
'docker_clean'
|
'docker_clean',
|
||||||
|
'learn'
|
||||||
],
|
],
|
||||||
'so-heavynode': [
|
'so-heavynode': [
|
||||||
|
'ca',
|
||||||
'ssl',
|
'ssl',
|
||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
@@ -45,16 +58,38 @@
|
|||||||
'pcap',
|
'pcap',
|
||||||
'suricata',
|
'suricata',
|
||||||
'healthcheck',
|
'healthcheck',
|
||||||
'elasticagent',
|
|
||||||
'schedule',
|
'schedule',
|
||||||
'tcpreplay',
|
'tcpreplay',
|
||||||
'docker_clean'
|
'docker_clean'
|
||||||
],
|
],
|
||||||
'so-idh': [
|
'so-helixsensor': [
|
||||||
|
'salt.master',
|
||||||
|
'ca',
|
||||||
'ssl',
|
'ssl',
|
||||||
|
'registry',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'firewall',
|
'firewall',
|
||||||
'idh',
|
'idstools',
|
||||||
|
'suricata.manager',
|
||||||
|
'zeek',
|
||||||
|
'redis',
|
||||||
|
'elasticsearch',
|
||||||
|
'logstash',
|
||||||
|
'schedule',
|
||||||
|
'tcpreplay',
|
||||||
|
'docker_clean'
|
||||||
|
],
|
||||||
|
'so-fleet': [
|
||||||
|
'ca',
|
||||||
|
'ssl',
|
||||||
|
'nginx',
|
||||||
|
'telegraf',
|
||||||
|
'firewall',
|
||||||
|
'mysql',
|
||||||
|
'redis',
|
||||||
|
'fleet',
|
||||||
|
'fleet.install_package',
|
||||||
|
'filebeat',
|
||||||
'schedule',
|
'schedule',
|
||||||
'docker_clean'
|
'docker_clean'
|
||||||
],
|
],
|
||||||
@@ -67,8 +102,6 @@
|
|||||||
'nginx',
|
'nginx',
|
||||||
'soc',
|
'soc',
|
||||||
'kratos',
|
'kratos',
|
||||||
'influxdb',
|
|
||||||
'telegraf',
|
|
||||||
'firewall',
|
'firewall',
|
||||||
'idstools',
|
'idstools',
|
||||||
'suricata.manager',
|
'suricata.manager',
|
||||||
@@ -79,8 +112,7 @@
|
|||||||
'schedule',
|
'schedule',
|
||||||
'tcpreplay',
|
'tcpreplay',
|
||||||
'docker_clean',
|
'docker_clean',
|
||||||
'elasticfleet',
|
'learn'
|
||||||
'elastic-fleet-package-registry'
|
|
||||||
],
|
],
|
||||||
'so-manager': [
|
'so-manager': [
|
||||||
'salt.master',
|
'salt.master',
|
||||||
@@ -91,17 +123,17 @@
|
|||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'influxdb',
|
'influxdb',
|
||||||
|
'grafana',
|
||||||
'soc',
|
'soc',
|
||||||
'kratos',
|
'kratos',
|
||||||
'elasticfleet',
|
|
||||||
'elastic-fleet-package-registry',
|
|
||||||
'firewall',
|
'firewall',
|
||||||
'idstools',
|
'idstools',
|
||||||
'suricata.manager',
|
'suricata.manager',
|
||||||
'utility',
|
'utility',
|
||||||
'schedule',
|
'schedule',
|
||||||
|
'soctopus',
|
||||||
'docker_clean',
|
'docker_clean',
|
||||||
'stig'
|
'learn'
|
||||||
],
|
],
|
||||||
'so-managersearch': [
|
'so-managersearch': [
|
||||||
'salt.master',
|
'salt.master',
|
||||||
@@ -111,27 +143,27 @@
|
|||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'influxdb',
|
'influxdb',
|
||||||
|
'grafana',
|
||||||
'soc',
|
'soc',
|
||||||
'kratos',
|
'kratos',
|
||||||
'elastic-fleet-package-registry',
|
|
||||||
'elasticfleet',
|
|
||||||
'firewall',
|
'firewall',
|
||||||
'manager',
|
'manager',
|
||||||
'idstools',
|
'idstools',
|
||||||
'suricata.manager',
|
'suricata.manager',
|
||||||
'utility',
|
'utility',
|
||||||
'schedule',
|
'schedule',
|
||||||
|
'soctopus',
|
||||||
'docker_clean',
|
'docker_clean',
|
||||||
'stig'
|
'learn'
|
||||||
],
|
],
|
||||||
'so-searchnode': [
|
'so-node': [
|
||||||
|
'ca',
|
||||||
'ssl',
|
'ssl',
|
||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'firewall',
|
'firewall',
|
||||||
'schedule',
|
'schedule',
|
||||||
'docker_clean',
|
'docker_clean'
|
||||||
'stig'
|
|
||||||
],
|
],
|
||||||
'so-standalone': [
|
'so-standalone': [
|
||||||
'salt.master',
|
'salt.master',
|
||||||
@@ -142,10 +174,9 @@
|
|||||||
'nginx',
|
'nginx',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'influxdb',
|
'influxdb',
|
||||||
|
'grafana',
|
||||||
'soc',
|
'soc',
|
||||||
'kratos',
|
'kratos',
|
||||||
'elastic-fleet-package-registry',
|
|
||||||
'elasticfleet',
|
|
||||||
'firewall',
|
'firewall',
|
||||||
'idstools',
|
'idstools',
|
||||||
'suricata.manager',
|
'suricata.manager',
|
||||||
@@ -154,11 +185,13 @@
|
|||||||
'healthcheck',
|
'healthcheck',
|
||||||
'utility',
|
'utility',
|
||||||
'schedule',
|
'schedule',
|
||||||
|
'soctopus',
|
||||||
'tcpreplay',
|
'tcpreplay',
|
||||||
'docker_clean',
|
'docker_clean',
|
||||||
'stig'
|
'learn'
|
||||||
],
|
],
|
||||||
'so-sensor': [
|
'so-sensor': [
|
||||||
|
'ca',
|
||||||
'ssl',
|
'ssl',
|
||||||
'telegraf',
|
'telegraf',
|
||||||
'firewall',
|
'firewall',
|
||||||
@@ -166,69 +199,102 @@
|
|||||||
'pcap',
|
'pcap',
|
||||||
'suricata',
|
'suricata',
|
||||||
'healthcheck',
|
'healthcheck',
|
||||||
|
'wazuh',
|
||||||
|
'filebeat',
|
||||||
'schedule',
|
'schedule',
|
||||||
'tcpreplay',
|
'tcpreplay',
|
||||||
'docker_clean',
|
|
||||||
'stig'
|
|
||||||
],
|
|
||||||
'so-fleet': [
|
|
||||||
'ssl',
|
|
||||||
'telegraf',
|
|
||||||
'firewall',
|
|
||||||
'logstash',
|
|
||||||
'nginx',
|
|
||||||
'healthcheck',
|
|
||||||
'schedule',
|
|
||||||
'elasticfleet',
|
|
||||||
'docker_clean'
|
'docker_clean'
|
||||||
],
|
],
|
||||||
'so-receiver': [
|
|
||||||
'ssl',
|
|
||||||
'telegraf',
|
|
||||||
'firewall',
|
|
||||||
'schedule',
|
|
||||||
'docker_clean'
|
|
||||||
],
|
|
||||||
'so-desktop': [
|
|
||||||
'ssl',
|
|
||||||
'docker_clean',
|
|
||||||
'telegraf'
|
|
||||||
],
|
|
||||||
}, grain='role') %}
|
}, grain='role') %}
|
||||||
|
|
||||||
{%- if grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
{% if FILEBEAT and grains.role in ['so-helixsensor', 'so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-import'] %}
|
||||||
|
{% do allowed_states.append('filebeat') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if ((FLEETMANAGER or FLEETNODE) or PLAYBOOK != 0) and grains.role in ['so-eval', 'so-manager', 'so-managersearch', 'so-standalone'] %}
|
||||||
|
{% do allowed_states.append('mysql') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (FLEETMANAGER or FLEETNODE) and grains.role in ['so-sensor', 'so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode'] %}
|
||||||
|
{% do allowed_states.append('fleet.install_package') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (FLEETMANAGER or FLEETNODE) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-heavynode'] %}
|
||||||
|
{% do allowed_states.append('fleet') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (FLEETMANAGER or FLEETNODE) and grains.role in ['so-eval'] %}
|
||||||
|
{% do allowed_states.append('redis') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{%- if ZEEKVER != 'SURICATA' and grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||||
{% do allowed_states.append('zeek') %}
|
{% do allowed_states.append('zeek') %}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
{% if STRELKA and grains.role in ['so-sensor', 'so-eval', 'so-standalone', 'so-heavynode'] %}
|
||||||
{% do allowed_states.append('strelka') %}
|
{% do allowed_states.append('strelka') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-import'] %}
|
{% if WAZUH and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode']%}
|
||||||
|
{% do allowed_states.append('wazuh') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if ELASTICSEARCH and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-import'] %}
|
||||||
{% do allowed_states.append('elasticsearch') %}
|
{% do allowed_states.append('elasticsearch') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
{% if ELASTICSEARCH and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||||
{% do allowed_states.append('elasticsearch.auth') %}
|
{% do allowed_states.append('elasticsearch.auth') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
{% if KIBANA and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch', 'so-import'] %}
|
||||||
{% do allowed_states.append('kibana') %}
|
{% do allowed_states.append('kibana') %}
|
||||||
{% do allowed_states.append('kibana.secrets') %}
|
{% do allowed_states.append('kibana.secrets') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
{% if grains.role in ['so-eval', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode', 'so-manager'] %}
|
||||||
|
{% do allowed_states.append('curator') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if ELASTALERT and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||||
{% do allowed_states.append('elastalert') %}
|
{% do allowed_states.append('elastalert') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-manager', 'so-standalone', 'so-searchnode', 'so-managersearch', 'so-heavynode', 'so-receiver'] %}
|
{% if (THEHIVE != 0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||||
|
{% do allowed_states.append('thehive') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (PLAYBOOK !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||||
|
{% do allowed_states.append('playbook') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (PLAYBOOK !=0) and grains.role in ['so-eval'] %}
|
||||||
|
{% do allowed_states.append('redis') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (FREQSERVER !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||||
|
{% do allowed_states.append('freqserver') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if (DOMAINSTATS !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||||
|
{% do allowed_states.append('domainstats') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if LOGSTASH and grains.role in ['so-helixsensor', 'so-manager', 'so-standalone', 'so-node', 'so-managersearch', 'so-heavynode'] %}
|
||||||
{% do allowed_states.append('logstash') %}
|
{% do allowed_states.append('logstash') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if grains.role in ['so-manager', 'so-standalone', 'so-managersearch', 'so-heavynode', 'so-receiver', 'so-eval'] %}
|
{% if REDIS and grains.role in ['so-manager', 'so-standalone', 'so-managersearch', 'so-heavynode'] %}
|
||||||
{% do allowed_states.append('redis') %}
|
{% do allowed_states.append('redis') %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
{% if grains.os == 'CentOS' %}
|
||||||
|
{% if not ISAIRGAP %}
|
||||||
|
{% do allowed_states.append('yum') %}
|
||||||
|
{% endif %}
|
||||||
|
{% do allowed_states.append('yum.packages') %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
{# all nodes on the right salt version can run the following states #}
|
{# all nodes on the right salt version can run the following states #}
|
||||||
{% do allowed_states.append('common') %}
|
{% do allowed_states.append('common') %}
|
||||||
{% do allowed_states.append('patch.os.schedule') %}
|
{% do allowed_states.append('patch.os.schedule') %}
|
||||||
|
|||||||
@@ -1,34 +0,0 @@
|
|||||||
{% from 'backup/map.jinja' import BACKUP_MERGED %}
|
|
||||||
|
|
||||||
# Lock permissions on the backup directory
|
|
||||||
backupdir:
|
|
||||||
file.directory:
|
|
||||||
- name: /nsm/backup
|
|
||||||
- user: 0
|
|
||||||
- group: 0
|
|
||||||
- makedirs: True
|
|
||||||
- mode: 700
|
|
||||||
|
|
||||||
config_backup_script:
|
|
||||||
file.managed:
|
|
||||||
- name: /usr/sbin/so-config-backup
|
|
||||||
- user: root
|
|
||||||
- group: root
|
|
||||||
- mode: 755
|
|
||||||
- template: jinja
|
|
||||||
- source: salt://backup/tools/sbin/so-config-backup.jinja
|
|
||||||
- defaults:
|
|
||||||
BACKUPLOCATIONS: {{ BACKUP_MERGED.locations }}
|
|
||||||
DESTINATION: {{ BACKUP_MERGED.destination }}
|
|
||||||
|
|
||||||
# Add config backup
|
|
||||||
so_config_backup:
|
|
||||||
cron.present:
|
|
||||||
- name: /usr/sbin/so-config-backup > /dev/null 2>&1
|
|
||||||
- identifier: so_config_backup
|
|
||||||
- user: root
|
|
||||||
- minute: '1'
|
|
||||||
- hour: '0'
|
|
||||||
- daymonth: '*'
|
|
||||||
- month: '*'
|
|
||||||
- dayweek: '*'
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
backup:
|
|
||||||
locations:
|
|
||||||
- /opt/so/saltstack/local
|
|
||||||
- /etc/pki
|
|
||||||
- /etc/salt
|
|
||||||
- /nsm/kratos
|
|
||||||
destination: "/nsm/backup"
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
{% import_yaml 'backup/defaults.yaml' as BACKUP_DEFAULTS %}
|
|
||||||
{% set BACKUP_MERGED = salt['pillar.get']('backup', BACKUP_DEFAULTS.backup, merge=true, merge_nested_lists=true) %}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
backup:
|
|
||||||
locations:
|
|
||||||
description: List of locations to back up to the destination.
|
|
||||||
helpLink: backup.html
|
|
||||||
global: True
|
|
||||||
destination:
|
|
||||||
description: Directory to store the configuration backups in.
|
|
||||||
helpLink: backup.html
|
|
||||||
global: True
|
|
||||||
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
#
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
|
||||||
# Elastic License 2.0.
|
|
||||||
|
|
||||||
. /usr/sbin/so-common
|
|
||||||
|
|
||||||
TODAY=$(date '+%Y_%m_%d')
|
|
||||||
BACKUPDIR={{ DESTINATION }}
|
|
||||||
BACKUPFILE="$BACKUPDIR/so-config-backup-$TODAY.tar"
|
|
||||||
MAXBACKUPS=7
|
|
||||||
|
|
||||||
# Create backup dir if it does not exist
|
|
||||||
mkdir -p /nsm/backup
|
|
||||||
|
|
||||||
# If we haven't already written a backup file for today, let's do so
|
|
||||||
if [ ! -f $BACKUPFILE ]; then
|
|
||||||
|
|
||||||
# Create empty backup file
|
|
||||||
tar -cf $BACKUPFILE -T /dev/null
|
|
||||||
|
|
||||||
# Loop through all paths defined in global.sls, and append them to backup file
|
|
||||||
{%- for LOCATION in BACKUPLOCATIONS %}
|
|
||||||
tar -rf $BACKUPFILE {{ LOCATION }}
|
|
||||||
{%- endfor %}
|
|
||||||
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Find oldest backup files and remove them
|
|
||||||
NUMBACKUPS=$(find /nsm/backup/ -type f -name "so-config-backup*" | wc -l)
|
|
||||||
while [ "$NUMBACKUPS" -gt "$MAXBACKUPS" ]; do
|
|
||||||
OLDESTBACKUP=$(find /nsm/backup/ -type f -name "so-config-backup*" -type f -printf '%T+ %p\n' | sort | head -n 1 | awk -F" " '{print $2}')
|
|
||||||
rm -f $OLDESTBACKUP
|
|
||||||
NUMBACKUPS=$(find /nsm/backup/ -type f -name "so-config-backup*" | wc -l)
|
|
||||||
done
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
bpf:
|
|
||||||
pcap: []
|
|
||||||
suricata: []
|
|
||||||
zeek: []
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{% macro remove_comments(bpfmerged, app) %}
|
|
||||||
|
|
||||||
{# remove comments from the bpf #}
|
|
||||||
{% for bpf in bpfmerged[app] %}
|
|
||||||
{% if bpf.strip().startswith('#') %}
|
|
||||||
{% do bpfmerged[app].pop(loop.index0) %}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
||||||
{% endmacro %}
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
|
||||||
{% if GLOBALS.pcap_engine == "TRANSITION" %}
|
|
||||||
{% set PCAPBPF = ["ip and host 255.255.255.1 and port 1"] %}
|
|
||||||
{% else %}
|
|
||||||
{% import_yaml 'bpf/defaults.yaml' as BPFDEFAULTS %}
|
|
||||||
{% set BPFMERGED = salt['pillar.get']('bpf', BPFDEFAULTS.bpf, merge=True) %}
|
|
||||||
{% import 'bpf/macros.jinja' as MACROS %}
|
|
||||||
{{ MACROS.remove_comments(BPFMERGED, 'pcap') }}
|
|
||||||
{% set PCAPBPF = BPFMERGED.pcap %}
|
|
||||||
{% endif %}
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
bpf:
|
|
||||||
pcap:
|
|
||||||
description: List of BPF filters to apply to Stenographer.
|
|
||||||
multiline: True
|
|
||||||
forcedType: "[]string"
|
|
||||||
helpLink: bpf.html
|
|
||||||
suricata:
|
|
||||||
description: List of BPF filters to apply to Suricata.
|
|
||||||
multiline: True
|
|
||||||
forcedType: "[]string"
|
|
||||||
helpLink: bpf.html
|
|
||||||
zeek:
|
|
||||||
description: List of BPF filters to apply to Zeek.
|
|
||||||
multiline: True
|
|
||||||
forcedType: "[]string"
|
|
||||||
helpLink: bpf.html
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{% import_yaml 'bpf/defaults.yaml' as BPFDEFAULTS %}
|
|
||||||
{% set BPFMERGED = salt['pillar.get']('bpf', BPFDEFAULTS.bpf, merge=True) %}
|
|
||||||
{% import 'bpf/macros.jinja' as MACROS %}
|
|
||||||
|
|
||||||
{{ MACROS.remove_comments(BPFMERGED, 'suricata') }}
|
|
||||||
|
|
||||||
{% set SURICATABPF = BPFMERGED.suricata %}
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
{% import_yaml 'bpf/defaults.yaml' as BPFDEFAULTS %}
|
|
||||||
{% set BPFMERGED = salt['pillar.get']('bpf', BPFDEFAULTS.bpf, merge=True) %}
|
|
||||||
{% import 'bpf/macros.jinja' as MACROS %}
|
|
||||||
|
|
||||||
{{ MACROS.remove_comments(BPFMERGED, 'zeek') }}
|
|
||||||
|
|
||||||
{% set ZEEKBPF = BPFMERGED.zeek %}
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
pki_issued_certs:
|
|
||||||
file.directory:
|
|
||||||
- name: /etc/pki/issued_certs
|
|
||||||
- makedirs: True
|
|
||||||
@@ -1,6 +1,3 @@
|
|||||||
mine_functions:
|
|
||||||
x509.get_pem_entries: [/etc/pki/ca.crt]
|
|
||||||
|
|
||||||
x509_signing_policies:
|
x509_signing_policies:
|
||||||
filebeat:
|
filebeat:
|
||||||
- minions: '*'
|
- minions: '*'
|
||||||
@@ -37,7 +34,7 @@ x509_signing_policies:
|
|||||||
- ST: Utah
|
- ST: Utah
|
||||||
- L: Salt Lake City
|
- L: Salt Lake City
|
||||||
- basicConstraints: "critical CA:false"
|
- basicConstraints: "critical CA:false"
|
||||||
- keyUsage: "critical keyEncipherment digitalSignature"
|
- keyUsage: "critical keyEncipherment"
|
||||||
- subjectKeyIdentifier: hash
|
- subjectKeyIdentifier: hash
|
||||||
- authorityKeyIdentifier: keyid,issuer:always
|
- authorityKeyIdentifier: keyid,issuer:always
|
||||||
- extendedKeyUsage: serverAuth
|
- extendedKeyUsage: serverAuth
|
||||||
@@ -57,7 +54,7 @@ x509_signing_policies:
|
|||||||
- extendedKeyUsage: serverAuth
|
- extendedKeyUsage: serverAuth
|
||||||
- days_valid: 820
|
- days_valid: 820
|
||||||
- copypath: /etc/pki/issued_certs/
|
- copypath: /etc/pki/issued_certs/
|
||||||
elasticfleet:
|
fleet:
|
||||||
- minions: '*'
|
- minions: '*'
|
||||||
- signing_private_key: /etc/pki/ca.key
|
- signing_private_key: /etc/pki/ca.key
|
||||||
- signing_cert: /etc/pki/ca.crt
|
- signing_cert: /etc/pki/ca.crt
|
||||||
@@ -65,8 +62,9 @@ x509_signing_policies:
|
|||||||
- ST: Utah
|
- ST: Utah
|
||||||
- L: Salt Lake City
|
- L: Salt Lake City
|
||||||
- basicConstraints: "critical CA:false"
|
- basicConstraints: "critical CA:false"
|
||||||
- keyUsage: "digitalSignature, nonRepudiation"
|
- keyUsage: "critical keyEncipherment"
|
||||||
- subjectKeyIdentifier: hash
|
- subjectKeyIdentifier: hash
|
||||||
- authorityKeyIdentifier: keyid,issuer:always
|
- authorityKeyIdentifier: keyid,issuer:always
|
||||||
|
- extendedKeyUsage: serverAuth
|
||||||
- days_valid: 820
|
- days_valid: 820
|
||||||
- copypath: /etc/pki/issued_certs/
|
- copypath: /etc/pki/issued_certs/
|
||||||
|
|||||||
@@ -1,25 +1,23 @@
|
|||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
|
||||||
# Elastic License 2.0.
|
|
||||||
|
|
||||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||||
{% if sls in allowed_states %}
|
{% if sls in allowed_states %}
|
||||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
|
||||||
|
|
||||||
|
|
||||||
include:
|
|
||||||
- ca.dirs
|
|
||||||
|
|
||||||
|
{% set manager = salt['grains.get']('master') %}
|
||||||
/etc/salt/minion.d/signing_policies.conf:
|
/etc/salt/minion.d/signing_policies.conf:
|
||||||
file.managed:
|
file.managed:
|
||||||
- source: salt://ca/files/signing_policies.conf
|
- source: salt://ca/files/signing_policies.conf
|
||||||
|
|
||||||
|
/etc/pki:
|
||||||
|
file.directory: []
|
||||||
|
|
||||||
|
/etc/pki/issued_certs:
|
||||||
|
file.directory: []
|
||||||
|
|
||||||
pki_private_key:
|
pki_private_key:
|
||||||
x509.private_key_managed:
|
x509.private_key_managed:
|
||||||
- name: /etc/pki/ca.key
|
- name: /etc/pki/ca.key
|
||||||
- keysize: 4096
|
- bits: 4096
|
||||||
- passphrase:
|
- passphrase:
|
||||||
|
- cipher: aes_256_cbc
|
||||||
- backup: True
|
- backup: True
|
||||||
{% if salt['file.file_exists']('/etc/pki/ca.key') -%}
|
{% if salt['file.file_exists']('/etc/pki/ca.key') -%}
|
||||||
- prereq:
|
- prereq:
|
||||||
@@ -30,7 +28,7 @@ pki_public_ca_crt:
|
|||||||
x509.certificate_managed:
|
x509.certificate_managed:
|
||||||
- name: /etc/pki/ca.crt
|
- name: /etc/pki/ca.crt
|
||||||
- signing_private_key: /etc/pki/ca.key
|
- signing_private_key: /etc/pki/ca.key
|
||||||
- CN: {{ GLOBALS.manager }}
|
- CN: {{ manager }}
|
||||||
- C: US
|
- C: US
|
||||||
- ST: Utah
|
- ST: Utah
|
||||||
- L: Salt Lake City
|
- L: Salt Lake City
|
||||||
@@ -38,23 +36,23 @@ pki_public_ca_crt:
|
|||||||
- keyUsage: "critical cRLSign, keyCertSign"
|
- keyUsage: "critical cRLSign, keyCertSign"
|
||||||
- extendedkeyUsage: "serverAuth, clientAuth"
|
- extendedkeyUsage: "serverAuth, clientAuth"
|
||||||
- subjectKeyIdentifier: hash
|
- subjectKeyIdentifier: hash
|
||||||
- authorityKeyIdentifier: keyid:always, issuer
|
- authorityKeyIdentifier: keyid,issuer:always
|
||||||
- days_valid: 3650
|
- days_valid: 3650
|
||||||
- days_remaining: 0
|
- days_remaining: 0
|
||||||
- backup: True
|
- backup: True
|
||||||
- replace: False
|
- replace: False
|
||||||
- require:
|
- require:
|
||||||
- sls: ca.dirs
|
- file: /etc/pki
|
||||||
- timeout: 30
|
- timeout: 30
|
||||||
- retry:
|
- retry:
|
||||||
attempts: 5
|
attempts: 5
|
||||||
interval: 30
|
interval: 30
|
||||||
|
|
||||||
mine_update_ca_crt:
|
x509_pem_entries:
|
||||||
module.run:
|
module.run:
|
||||||
- mine.update: []
|
- mine.send:
|
||||||
- onchanges:
|
- name: x509.get_pem_entries
|
||||||
- x509: pki_public_ca_crt
|
- glob_path: /etc/pki/ca.crt
|
||||||
|
|
||||||
cakeyperms:
|
cakeyperms:
|
||||||
file.managed:
|
file.managed:
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
pki_private_key:
|
|
||||||
file.absent:
|
|
||||||
- name: /etc/pki/ca.key
|
|
||||||
|
|
||||||
pki_public_ca_crt:
|
|
||||||
file.absent:
|
|
||||||
- name: /etc/pki/ca.crt
|
|
||||||
2
salt/common/cron/common-rotate
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
/usr/sbin/logrotate -f /opt/so/conf/log-rotate.conf > /dev/null 2>&1
|
||||||
2
salt/common/cron/sensor-rotate
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
/usr/sbin/logrotate -f /opt/so/conf/sensor-rotate.conf > /dev/null 2>&1
|
||||||
79
salt/common/files/analyst/README
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
The following GUI tools are available on the analyst workstation:
|
||||||
|
|
||||||
|
chromium
|
||||||
|
url: https://www.chromium.org/Home
|
||||||
|
To run chromium, click Applications > Internet > Chromium Web Browser
|
||||||
|
|
||||||
|
Wireshark
|
||||||
|
url: https://www.wireshark.org/
|
||||||
|
To run Wireshark, click Applications > Internet > Wireshark Network Analyzer
|
||||||
|
|
||||||
|
NetworkMiner
|
||||||
|
url: https://www.netresec.com
|
||||||
|
To run NetworkMiner, click Applications > Internet > NetworkMiner
|
||||||
|
|
||||||
|
The following CLI tools are available on the analyst workstation:
|
||||||
|
|
||||||
|
bit-twist
|
||||||
|
url: http://bittwist.sourceforge.net
|
||||||
|
To run bit-twist, open a terminal and type: bittwist -h
|
||||||
|
|
||||||
|
chaosreader
|
||||||
|
url: http://chaosreader.sourceforge.net
|
||||||
|
To run chaosreader, open a terminal and type: chaosreader -h
|
||||||
|
|
||||||
|
dnsiff
|
||||||
|
url: https://www.monkey.org/~dugsong/dsniff/
|
||||||
|
To run dsniff, open a terminal and type: dsniff -h
|
||||||
|
|
||||||
|
foremost
|
||||||
|
url: http://foremost.sourceforge.net
|
||||||
|
To run foremost, open a terminal and type: foremost -h
|
||||||
|
|
||||||
|
hping3
|
||||||
|
url: http://www.hping.org/hping3.html
|
||||||
|
To run hping3, open a terminal and type: hping3 -h
|
||||||
|
|
||||||
|
netsed
|
||||||
|
url: http://silicone.homelinux.org/projects/netsed/
|
||||||
|
To run netsed, open a terminal and type: netsed -h
|
||||||
|
|
||||||
|
ngrep
|
||||||
|
url: https://github.com/jpr5/ngrep
|
||||||
|
To run ngrep, open a terminal and type: ngrep -h
|
||||||
|
|
||||||
|
scapy
|
||||||
|
url: http://www.secdev.org/projects/scapy/
|
||||||
|
To run scapy, open a terminal and type: scapy
|
||||||
|
|
||||||
|
ssldump
|
||||||
|
url: http://www.rtfm.com/ssldump/
|
||||||
|
To run ssldump, open a terminal and type: ssldump -h
|
||||||
|
|
||||||
|
sslsplit
|
||||||
|
url: https://github.com/droe/sslsplit
|
||||||
|
To run sslsplit, open a terminal and type: sslsplit -h
|
||||||
|
|
||||||
|
tcpdump
|
||||||
|
url: http://www.tcpdump.org
|
||||||
|
To run tcpdump, open a terminal and type: tcpdump -h
|
||||||
|
|
||||||
|
tcpflow
|
||||||
|
url: https://github.com/simsong/tcpflow
|
||||||
|
To run tcpflow, open a terminal and type: tcpflow -h
|
||||||
|
|
||||||
|
tcpstat
|
||||||
|
url: https://frenchfries.net/paul/tcpstat/
|
||||||
|
To run tcpstat, open a terminal and type: tcpstat -h
|
||||||
|
|
||||||
|
tcptrace
|
||||||
|
url: http://www.tcptrace.org
|
||||||
|
To run tcptrace, open a terminal and type: tcptrace -h
|
||||||
|
|
||||||
|
tcpxtract
|
||||||
|
url: http://tcpxtract.sourceforge.net/
|
||||||
|
To run tcpxtract, open a terminal and type: tcpxtract -h
|
||||||
|
|
||||||
|
whois
|
||||||
|
url: http://www.linux.it/~md/software/
|
||||||
|
To run whois, open a terminal and type: whois -h
|
||||||
|
Before Width: | Height: | Size: 269 KiB After Width: | Height: | Size: 269 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
|
Before Width: | Height: | Size: 319 KiB After Width: | Height: | Size: 319 KiB |
@@ -1,12 +1,12 @@
|
|||||||
|
{%- set DOCKERRANGE = salt['pillar.get']('docker:range', '172.17.0.0/24') %}
|
||||||
|
{%- set DOCKERBIND = salt['pillar.get']('docker:bip', '172.17.0.1/24') %}
|
||||||
{
|
{
|
||||||
"registry-mirrors": [
|
"registry-mirrors": [ "https://:5000" ],
|
||||||
"https://:5000"
|
"bip": "{{ DOCKERBIND }}",
|
||||||
],
|
"default-address-pools": [
|
||||||
"bip": "172.17.0.1/24",
|
{
|
||||||
"default-address-pools": [
|
"base" : "{{ DOCKERRANGE }}",
|
||||||
{
|
"size" : 24
|
||||||
"base": "172.17.0.0/24",
|
}
|
||||||
"size": 24
|
]
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
36
salt/common/files/log-rotate.conf
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
{%- set logrotate_conf = salt['pillar.get']('logrotate:conf') %}
|
||||||
|
{%- set group_conf = salt['pillar.get']('logrotate:group_conf') %}
|
||||||
|
|
||||||
|
|
||||||
|
/opt/so/log/aptcacher-ng/*.log
|
||||||
|
/opt/so/log/idstools/*.log
|
||||||
|
/opt/so/log/nginx/*.log
|
||||||
|
/opt/so/log/soc/*.log
|
||||||
|
/opt/so/log/kratos/*.log
|
||||||
|
/opt/so/log/kibana/*.log
|
||||||
|
/opt/so/log/influxdb/*.log
|
||||||
|
/opt/so/log/elastalert/*.log
|
||||||
|
/opt/so/log/soctopus/*.log
|
||||||
|
/opt/so/log/curator/*.log
|
||||||
|
/opt/so/log/fleet/*.log
|
||||||
|
/opt/so/log/suricata/*.log
|
||||||
|
/opt/so/log/mysql/*.log
|
||||||
|
/opt/so/log/telegraf/*.log
|
||||||
|
/opt/so/log/redis/*.log
|
||||||
|
/opt/so/log/sensoroni/*.log
|
||||||
|
/opt/so/log/stenographer/*.log
|
||||||
|
/opt/so/log/salt/so-salt-minion-check
|
||||||
|
/opt/so/log/salt/minion
|
||||||
|
/opt/so/log/salt/master
|
||||||
|
/opt/so/log/logscan/*.log
|
||||||
|
{
|
||||||
|
{{ logrotate_conf | indent(width=4) }}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Playbook's log directory needs additional configuration
|
||||||
|
# because Playbook requires a more permissive directory
|
||||||
|
/opt/so/log/playbook/*.log
|
||||||
|
{
|
||||||
|
{{ logrotate_conf | indent(width=4) }}
|
||||||
|
{{ group_conf | indent(width=4) }}
|
||||||
|
}
|
||||||
22
salt/common/files/sensor-rotate.conf
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
/opt/so/log/sensor_clean.log
|
||||||
|
{
|
||||||
|
daily
|
||||||
|
rotate 2
|
||||||
|
missingok
|
||||||
|
nocompress
|
||||||
|
create
|
||||||
|
sharedscripts
|
||||||
|
}
|
||||||
|
|
||||||
|
/nsm/strelka/log/strelka.log
|
||||||
|
{
|
||||||
|
daily
|
||||||
|
rotate 14
|
||||||
|
missingok
|
||||||
|
copytruncate
|
||||||
|
compress
|
||||||
|
create
|
||||||
|
extension .log
|
||||||
|
dateext
|
||||||
|
dateyesterday
|
||||||
|
}
|
||||||
@@ -3,3 +3,4 @@ filetype plugin indent on
|
|||||||
|
|
||||||
" Sets .sls files to use YAML syntax highlighting
|
" Sets .sls files to use YAML syntax highlighting
|
||||||
autocmd BufNewFile,BufRead *.sls set syntax=yaml
|
autocmd BufNewFile,BufRead *.sls set syntax=yaml
|
||||||
|
set number
|
||||||
@@ -1,24 +1,19 @@
|
|||||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||||
{% if sls in allowed_states %}
|
{% if sls in allowed_states %}
|
||||||
|
|
||||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
{% set role = grains.id.split('_') | last %}
|
||||||
|
{% from 'elasticsearch/auth.map.jinja' import ELASTICAUTH with context %}
|
||||||
include:
|
|
||||||
- common.packages
|
|
||||||
{% if GLOBALS.role in GLOBALS.manager_roles %}
|
|
||||||
- manager.elasticsearch # needed for elastic_curl_config state
|
|
||||||
- manager.kibana
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
net.core.wmem_default:
|
|
||||||
sysctl.present:
|
|
||||||
- value: 26214400
|
|
||||||
|
|
||||||
# Remove variables.txt from /tmp - This is temp
|
# Remove variables.txt from /tmp - This is temp
|
||||||
rmvariablesfile:
|
rmvariablesfile:
|
||||||
file.absent:
|
file.absent:
|
||||||
- name: /tmp/variables.txt
|
- name: /tmp/variables.txt
|
||||||
|
|
||||||
|
dockergroup:
|
||||||
|
group.present:
|
||||||
|
- name: docker
|
||||||
|
- gid: 920
|
||||||
|
|
||||||
# Add socore Group
|
# Add socore Group
|
||||||
socoregroup:
|
socoregroup:
|
||||||
group.present:
|
group.present:
|
||||||
@@ -37,15 +32,15 @@ socore:
|
|||||||
soconfperms:
|
soconfperms:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/conf
|
- name: /opt/so/conf
|
||||||
- user: 939
|
- uid: 939
|
||||||
- group: 939
|
- gid: 939
|
||||||
- dir_mode: 770
|
- dir_mode: 770
|
||||||
|
|
||||||
sostatusconf:
|
sostatusconf:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/conf/so-status
|
- name: /opt/so/conf/so-status
|
||||||
- user: 939
|
- uid: 939
|
||||||
- group: 939
|
- gid: 939
|
||||||
- dir_mode: 770
|
- dir_mode: 770
|
||||||
|
|
||||||
so-status.conf:
|
so-status.conf:
|
||||||
@@ -53,12 +48,13 @@ so-status.conf:
|
|||||||
- name: /opt/so/conf/so-status/so-status.conf
|
- name: /opt/so/conf/so-status/so-status.conf
|
||||||
- unless: ls /opt/so/conf/so-status/so-status.conf
|
- unless: ls /opt/so/conf/so-status/so-status.conf
|
||||||
|
|
||||||
socore_opso_perms:
|
sosaltstackperms:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so
|
- name: /opt/so/saltstack
|
||||||
- user: 939
|
- uid: 939
|
||||||
- group: 939
|
- gid: 939
|
||||||
|
- dir_mode: 770
|
||||||
|
|
||||||
so_log_perms:
|
so_log_perms:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/log
|
- name: /opt/so/log
|
||||||
@@ -86,6 +82,92 @@ vimconfig:
|
|||||||
- source: salt://common/files/vimrc
|
- source: salt://common/files/vimrc
|
||||||
- replace: False
|
- replace: False
|
||||||
|
|
||||||
|
# Install common packages
|
||||||
|
{% if grains['os'] != 'CentOS' %}
|
||||||
|
commonpkgs:
|
||||||
|
pkg.installed:
|
||||||
|
- skip_suggestions: True
|
||||||
|
- pkgs:
|
||||||
|
- apache2-utils
|
||||||
|
- wget
|
||||||
|
- ntpdate
|
||||||
|
- jq
|
||||||
|
- python3-docker
|
||||||
|
- curl
|
||||||
|
- ca-certificates
|
||||||
|
- software-properties-common
|
||||||
|
- apt-transport-https
|
||||||
|
- openssl
|
||||||
|
- netcat
|
||||||
|
- python3-mysqldb
|
||||||
|
- sqlite3
|
||||||
|
- libssl-dev
|
||||||
|
- python3-dateutil
|
||||||
|
- python3-m2crypto
|
||||||
|
- python3-mysqldb
|
||||||
|
- python3-packaging
|
||||||
|
- python3-lxml
|
||||||
|
- git
|
||||||
|
- vim
|
||||||
|
|
||||||
|
heldpackages:
|
||||||
|
pkg.installed:
|
||||||
|
- pkgs:
|
||||||
|
{% if grains['oscodename'] == 'bionic' %}
|
||||||
|
- containerd.io: 1.4.4-1
|
||||||
|
- docker-ce: 5:20.10.5~3-0~ubuntu-bionic
|
||||||
|
- docker-ce-cli: 5:20.10.5~3-0~ubuntu-bionic
|
||||||
|
- docker-ce-rootless-extras: 5:20.10.5~3-0~ubuntu-bionic
|
||||||
|
{% elif grains['oscodename'] == 'focal' %}
|
||||||
|
- containerd.io: 1.4.9-1
|
||||||
|
- docker-ce: 5:20.10.8~3-0~ubuntu-focal
|
||||||
|
- docker-ce-cli: 5:20.10.5~3-0~ubuntu-focal
|
||||||
|
- docker-ce-rootless-extras: 5:20.10.5~3-0~ubuntu-focal
|
||||||
|
{% endif %}
|
||||||
|
- hold: True
|
||||||
|
- update_holds: True
|
||||||
|
|
||||||
|
{% else %}
|
||||||
|
commonpkgs:
|
||||||
|
pkg.installed:
|
||||||
|
- skip_suggestions: True
|
||||||
|
- pkgs:
|
||||||
|
- wget
|
||||||
|
- ntpdate
|
||||||
|
- bind-utils
|
||||||
|
- jq
|
||||||
|
- tcpdump
|
||||||
|
- httpd-tools
|
||||||
|
- net-tools
|
||||||
|
- curl
|
||||||
|
- sqlite
|
||||||
|
- mariadb-devel
|
||||||
|
- nmap-ncat
|
||||||
|
- python3
|
||||||
|
- python36-docker
|
||||||
|
- python36-dateutil
|
||||||
|
- python36-m2crypto
|
||||||
|
- python36-mysql
|
||||||
|
- python36-packaging
|
||||||
|
- python36-lxml
|
||||||
|
- yum-utils
|
||||||
|
- device-mapper-persistent-data
|
||||||
|
- lvm2
|
||||||
|
- openssl
|
||||||
|
- git
|
||||||
|
- vim-enhanced
|
||||||
|
|
||||||
|
heldpackages:
|
||||||
|
pkg.installed:
|
||||||
|
- pkgs:
|
||||||
|
- containerd.io: 1.4.4-3.1.el7
|
||||||
|
- docker-ce: 3:20.10.5-3.el7
|
||||||
|
- docker-ce-cli: 1:20.10.5-3.el7
|
||||||
|
- docker-ce-rootless-extras: 20.10.5-3.el7
|
||||||
|
- hold: True
|
||||||
|
- update_holds: True
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
# Always keep these packages up to date
|
# Always keep these packages up to date
|
||||||
|
|
||||||
alwaysupdated:
|
alwaysupdated:
|
||||||
@@ -100,8 +182,6 @@ alwaysupdated:
|
|||||||
Etc/UTC:
|
Etc/UTC:
|
||||||
timezone.system
|
timezone.system
|
||||||
|
|
||||||
# Sync curl configuration for Elasticsearch authentication
|
|
||||||
{% if GLOBALS.role in ['so-eval', 'so-heavynode', 'so-import', 'so-manager', 'so-managersearch', 'so-searchnode', 'so-standalone'] %}
|
|
||||||
elastic_curl_config:
|
elastic_curl_config:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/conf/elasticsearch/curl.config
|
- name: /opt/so/conf/elasticsearch/curl.config
|
||||||
@@ -109,62 +189,77 @@ elastic_curl_config:
|
|||||||
- mode: 600
|
- mode: 600
|
||||||
- show_changes: False
|
- show_changes: False
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
{% if GLOBALS.role in GLOBALS.manager_roles %}
|
|
||||||
- require:
|
|
||||||
- file: elastic_curl_config_distributed
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
|
# Sync some Utilities
|
||||||
common_sbin:
|
utilsyncscripts:
|
||||||
file.recurse:
|
file.recurse:
|
||||||
- name: /usr/sbin
|
- name: /usr/sbin
|
||||||
- source: salt://common/tools/sbin
|
- user: root
|
||||||
- user: 939
|
- group: root
|
||||||
- group: 939
|
|
||||||
- file_mode: 755
|
|
||||||
|
|
||||||
common_sbin_jinja:
|
|
||||||
file.recurse:
|
|
||||||
- name: /usr/sbin
|
|
||||||
- source: salt://common/tools/sbin_jinja
|
|
||||||
- user: 939
|
|
||||||
- group: 939
|
|
||||||
- file_mode: 755
|
- file_mode: 755
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
- source: salt://common/tools/sbin
|
||||||
|
- defaults:
|
||||||
|
ELASTICCURL: 'curl'
|
||||||
|
- context:
|
||||||
|
ELASTICCURL: {{ ELASTICAUTH.elasticcurl }}
|
||||||
|
|
||||||
{% if not GLOBALS.is_manager%}
|
{% if role in ['eval', 'standalone', 'sensor', 'heavynode'] %}
|
||||||
# prior to 2.4.50 these scripts were in common/tools/sbin on the manager because of soup and distributed to non managers
|
|
||||||
# these two states remove the scripts from non manager nodes
|
|
||||||
remove_soup:
|
|
||||||
file.absent:
|
|
||||||
- name: /usr/sbin/soup
|
|
||||||
|
|
||||||
remove_so-firewall:
|
|
||||||
file.absent:
|
|
||||||
- name: /usr/sbin/so-firewall
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
so-status_script:
|
|
||||||
file.managed:
|
|
||||||
- name: /usr/sbin/so-status
|
|
||||||
- source: salt://common/tools/sbin/so-status
|
|
||||||
- mode: 755
|
|
||||||
|
|
||||||
{% if GLOBALS.role in GLOBALS.sensor_roles %}
|
|
||||||
# Add sensor cleanup
|
# Add sensor cleanup
|
||||||
so-sensor-clean:
|
/usr/sbin/so-sensor-clean:
|
||||||
cron.present:
|
cron.present:
|
||||||
- name: /usr/sbin/so-sensor-clean
|
|
||||||
- identifier: so-sensor-clean
|
|
||||||
- user: root
|
- user: root
|
||||||
- minute: '*'
|
- minute: '*'
|
||||||
- hour: '*'
|
- hour: '*'
|
||||||
- daymonth: '*'
|
- daymonth: '*'
|
||||||
- month: '*'
|
- month: '*'
|
||||||
- dayweek: '*'
|
- dayweek: '*'
|
||||||
|
|
||||||
|
sensorrotatescript:
|
||||||
|
file.managed:
|
||||||
|
- name: /usr/local/bin/sensor-rotate
|
||||||
|
- source: salt://common/cron/sensor-rotate
|
||||||
|
- mode: 755
|
||||||
|
|
||||||
|
sensorrotateconf:
|
||||||
|
file.managed:
|
||||||
|
- name: /opt/so/conf/sensor-rotate.conf
|
||||||
|
- source: salt://common/files/sensor-rotate.conf
|
||||||
|
- mode: 644
|
||||||
|
|
||||||
|
/usr/local/bin/sensor-rotate:
|
||||||
|
cron.present:
|
||||||
|
- user: root
|
||||||
|
- minute: '1'
|
||||||
|
- hour: '0'
|
||||||
|
- daymonth: '*'
|
||||||
|
- month: '*'
|
||||||
|
- dayweek: '*'
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
|
commonlogrotatescript:
|
||||||
|
file.managed:
|
||||||
|
- name: /usr/local/bin/common-rotate
|
||||||
|
- source: salt://common/cron/common-rotate
|
||||||
|
- mode: 755
|
||||||
|
|
||||||
|
commonlogrotateconf:
|
||||||
|
file.managed:
|
||||||
|
- name: /opt/so/conf/log-rotate.conf
|
||||||
|
- source: salt://common/files/log-rotate.conf
|
||||||
|
- template: jinja
|
||||||
|
- mode: 644
|
||||||
|
|
||||||
|
/usr/local/bin/common-rotate:
|
||||||
|
cron.present:
|
||||||
|
- user: root
|
||||||
|
- minute: '1'
|
||||||
|
- hour: '0'
|
||||||
|
- daymonth: '*'
|
||||||
|
- month: '*'
|
||||||
|
- dayweek: '*'
|
||||||
|
|
||||||
# Create the status directory
|
# Create the status directory
|
||||||
sostatusdir:
|
sostatusdir:
|
||||||
file.directory:
|
file.directory:
|
||||||
@@ -177,12 +272,10 @@ sostatus_log:
|
|||||||
file.managed:
|
file.managed:
|
||||||
- name: /opt/so/log/sostatus/status.log
|
- name: /opt/so/log/sostatus/status.log
|
||||||
- mode: 644
|
- mode: 644
|
||||||
|
|
||||||
# Install sostatus check cron. This is used to populate Grid.
|
# Install sostatus check cron
|
||||||
so-status_check_cron:
|
'/usr/sbin/so-status -q; echo $? > /opt/so/log/sostatus/status.log 2>&1':
|
||||||
cron.present:
|
cron.present:
|
||||||
- name: '/usr/sbin/so-status -j > /opt/so/log/sostatus/status.log 2>&1'
|
|
||||||
- identifier: so-status_check_cron
|
|
||||||
- user: root
|
- user: root
|
||||||
- minute: '*/1'
|
- minute: '*/1'
|
||||||
- hour: '*'
|
- hour: '*'
|
||||||
@@ -190,21 +283,27 @@ so-status_check_cron:
|
|||||||
- month: '*'
|
- month: '*'
|
||||||
- dayweek: '*'
|
- dayweek: '*'
|
||||||
|
|
||||||
# This cronjob/script runs a check if the node needs restarted, but should be used for future status checks as well
|
|
||||||
common_status_check_cron:
|
{% if role in ['eval', 'manager', 'managersearch', 'standalone'] %}
|
||||||
|
# Lock permissions on the backup directory
|
||||||
|
backupdir:
|
||||||
|
file.directory:
|
||||||
|
- name: /nsm/backup
|
||||||
|
- user: 0
|
||||||
|
- group: 0
|
||||||
|
- makedirs: True
|
||||||
|
- mode: 700
|
||||||
|
|
||||||
|
# Add config backup
|
||||||
|
/usr/sbin/so-config-backup > /dev/null 2>&1:
|
||||||
cron.present:
|
cron.present:
|
||||||
- name: '/usr/sbin/so-common-status-check > /dev/null 2>&1'
|
|
||||||
- identifier: common_status_check
|
|
||||||
- user: root
|
- user: root
|
||||||
- minute: '*/10'
|
- minute: '1'
|
||||||
|
- hour: '0'
|
||||||
remove_post_setup_cron:
|
- daymonth: '*'
|
||||||
cron.absent:
|
- month: '*'
|
||||||
- name: 'PATH=$PATH:/usr/sbin salt-call state.highstate'
|
- dayweek: '*'
|
||||||
- identifier: post_setup_cron
|
{% else %}
|
||||||
|
|
||||||
{% if GLOBALS.role not in ['eval', 'manager', 'managersearch', 'standalone'] %}
|
|
||||||
|
|
||||||
soversionfile:
|
soversionfile:
|
||||||
file.managed:
|
file.managed:
|
||||||
- name: /etc/soversion
|
- name: /etc/soversion
|
||||||
@@ -214,8 +313,34 @@ soversionfile:
|
|||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% if GLOBALS.so_model and GLOBALS.so_model not in ['SO2AMI01', 'SO2AZI01', 'SO2GCI01'] %}
|
# Manager daemon.json
|
||||||
{% if GLOBALS.os == 'OEL' %}
|
docker_daemon:
|
||||||
|
file.managed:
|
||||||
|
- source: salt://common/files/daemon.json
|
||||||
|
- name: /etc/docker/daemon.json
|
||||||
|
- template: jinja
|
||||||
|
|
||||||
|
# Make sure Docker is always running
|
||||||
|
docker:
|
||||||
|
service.running:
|
||||||
|
- enable: True
|
||||||
|
- watch:
|
||||||
|
- file: docker_daemon
|
||||||
|
|
||||||
|
# Reserve OS ports for Docker proxy in case boot settings are not already applied/present
|
||||||
|
# 55000 = Wazuh, 57314 = Strelka, 47760-47860 = Zeek
|
||||||
|
dockerapplyports:
|
||||||
|
cmd.run:
|
||||||
|
- name: if [ ! -s /etc/sysctl.d/99-reserved-ports.conf ]; then sysctl -w net.ipv4.ip_local_reserved_ports="55000,57314,47760-47860"; fi
|
||||||
|
|
||||||
|
# Reserve OS ports for Docker proxy
|
||||||
|
dockerreserveports:
|
||||||
|
file.managed:
|
||||||
|
- source: salt://common/files/99-reserved-ports.conf
|
||||||
|
- name: /etc/sysctl.d/99-reserved-ports.conf
|
||||||
|
|
||||||
|
{% if salt['grains.get']('sosmodel', '') %}
|
||||||
|
{% if grains['os'] == 'CentOS' %}
|
||||||
# Install Raid tools
|
# Install Raid tools
|
||||||
raidpkgs:
|
raidpkgs:
|
||||||
pkg.installed:
|
pkg.installed:
|
||||||
@@ -226,10 +351,8 @@ raidpkgs:
|
|||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
# Install raid check cron
|
# Install raid check cron
|
||||||
so-raid-status:
|
/usr/sbin/so-raid-status > /dev/null 2>&1:
|
||||||
cron.present:
|
cron.present:
|
||||||
- name: '/usr/sbin/so-raid-status > /dev/null 2>&1'
|
|
||||||
- identifier: so-raid-status
|
|
||||||
- user: root
|
- user: root
|
||||||
- minute: '*/15'
|
- minute: '*/15'
|
||||||
- hour: '*'
|
- hour: '*'
|
||||||
@@ -237,7 +360,8 @@ so-raid-status:
|
|||||||
- month: '*'
|
- month: '*'
|
||||||
- dayweek: '*'
|
- dayweek: '*'
|
||||||
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{% else %}
|
{% else %}
|
||||||
|
|
||||||
{{sls}}_state_not_allowed:
|
{{sls}}_state_not_allowed:
|
||||||
|
|||||||
@@ -1,86 +0,0 @@
|
|||||||
{% from 'vars/globals.map.jinja' import GLOBALS %}
|
|
||||||
|
|
||||||
{% if GLOBALS.os_family == 'Debian' %}
|
|
||||||
commonpkgs:
|
|
||||||
pkg.installed:
|
|
||||||
- skip_suggestions: True
|
|
||||||
- pkgs:
|
|
||||||
- apache2-utils
|
|
||||||
- wget
|
|
||||||
- ntpdate
|
|
||||||
- jq
|
|
||||||
- curl
|
|
||||||
- ca-certificates
|
|
||||||
- software-properties-common
|
|
||||||
- apt-transport-https
|
|
||||||
- openssl
|
|
||||||
- netcat-openbsd
|
|
||||||
- sqlite3
|
|
||||||
- libssl-dev
|
|
||||||
- procps
|
|
||||||
- python3-dateutil
|
|
||||||
- python3-docker
|
|
||||||
- python3-packaging
|
|
||||||
- python3-lxml
|
|
||||||
- git
|
|
||||||
- rsync
|
|
||||||
- vim
|
|
||||||
- tar
|
|
||||||
- unzip
|
|
||||||
{% if grains.oscodename != 'focal' %}
|
|
||||||
- python3-rich
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if grains.oscodename == 'focal' %}
|
|
||||||
# since Ubuntu requires and internet connection we can use pip to install modules
|
|
||||||
python3-pip:
|
|
||||||
pkg.installed
|
|
||||||
|
|
||||||
python-rich:
|
|
||||||
pip.installed:
|
|
||||||
- name: rich
|
|
||||||
- target: /usr/local/lib/python3.8/dist-packages/
|
|
||||||
- require:
|
|
||||||
- pkg: python3-pip
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if GLOBALS.os_family == 'RedHat' %}
|
|
||||||
|
|
||||||
remove_mariadb:
|
|
||||||
pkg.removed:
|
|
||||||
- name: mariadb-devel
|
|
||||||
|
|
||||||
commonpkgs:
|
|
||||||
pkg.installed:
|
|
||||||
- skip_suggestions: True
|
|
||||||
- pkgs:
|
|
||||||
- python3-dnf-plugin-versionlock
|
|
||||||
- curl
|
|
||||||
- device-mapper-persistent-data
|
|
||||||
- fuse
|
|
||||||
- fuse-libs
|
|
||||||
- fuse-overlayfs
|
|
||||||
- fuse-common
|
|
||||||
- fuse3
|
|
||||||
- fuse3-libs
|
|
||||||
- git
|
|
||||||
- httpd-tools
|
|
||||||
- jq
|
|
||||||
- lvm2
|
|
||||||
- net-tools
|
|
||||||
- nmap-ncat
|
|
||||||
- procps-ng
|
|
||||||
- python3-docker
|
|
||||||
- python3-m2crypto
|
|
||||||
- python3-packaging
|
|
||||||
- python3-pyyaml
|
|
||||||
- python3-rich
|
|
||||||
- rsync
|
|
||||||
- sqlite
|
|
||||||
- tcpdump
|
|
||||||
- unzip
|
|
||||||
- wget
|
|
||||||
- yum-utils
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
{% import_yaml '/opt/so/saltstack/local/pillar/global/soc_global.sls' as SOC_GLOBAL %}
|
|
||||||
{% if SOC_GLOBAL.global.airgap %}
|
|
||||||
{% set UPDATE_DIR='/tmp/soagupdate/SecurityOnion' %}
|
|
||||||
{% else %}
|
|
||||||
{% set UPDATE_DIR='/tmp/sogh/securityonion' %}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
remove_common_soup:
|
|
||||||
file.absent:
|
|
||||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/soup
|
|
||||||
|
|
||||||
remove_common_so-firewall:
|
|
||||||
file.absent:
|
|
||||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-firewall
|
|
||||||
|
|
||||||
copy_so-common_common_tools_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-common
|
|
||||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-common
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_so-image-common_common_tools_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /opt/so/saltstack/default/salt/common/tools/sbin/so-image-common
|
|
||||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-image-common
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_soup_manager_tools_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /opt/so/saltstack/default/salt/manager/tools/sbin/soup
|
|
||||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/soup
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_so-firewall_manager_tools_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /opt/so/saltstack/default/salt/manager/tools/sbin/so-firewall
|
|
||||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-firewall
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_so-common_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /usr/sbin/so-common
|
|
||||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-common
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_so-image-common_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /usr/sbin/so-image-common
|
|
||||||
- source: {{UPDATE_DIR}}/salt/common/tools/sbin/so-image-common
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_soup_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /usr/sbin/soup
|
|
||||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/soup
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
|
|
||||||
copy_so-firewall_sbin:
|
|
||||||
file.copy:
|
|
||||||
- name: /usr/sbin/so-firewall
|
|
||||||
- source: {{UPDATE_DIR}}/salt/manager/tools/sbin/so-firewall
|
|
||||||
- force: True
|
|
||||||
- preserve: True
|
|
||||||
213
salt/common/tools/sbin/so-allow
Executable file
@@ -0,0 +1,213 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
import ipaddress
|
||||||
|
import textwrap
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import re
|
||||||
|
from lxml import etree as ET
|
||||||
|
from xml.dom import minidom
|
||||||
|
from datetime import datetime as dt
|
||||||
|
from datetime import timezone as tz
|
||||||
|
|
||||||
|
|
||||||
|
LOCAL_SALT_DIR='/opt/so/saltstack/local'
|
||||||
|
WAZUH_CONF='/nsm/wazuh/etc/ossec.conf'
|
||||||
|
VALID_ROLES = {
|
||||||
|
'a': { 'role': 'analyst','desc': 'Analyst - 80/tcp, 443/tcp' },
|
||||||
|
'b': { 'role': 'beats_endpoint', 'desc': 'Logstash Beat - 5044/tcp' },
|
||||||
|
'e': { 'role': 'elasticsearch_rest', 'desc': 'Elasticsearch REST API - 9200/tcp' },
|
||||||
|
'f': { 'role': 'strelka_frontend', 'desc': 'Strelka frontend - 57314/tcp' },
|
||||||
|
'o': { 'role': 'osquery_endpoint', 'desc': 'Osquery endpoint - 8090/tcp' },
|
||||||
|
's': { 'role': 'syslog', 'desc': 'Syslog device - 514/tcp/udp' },
|
||||||
|
'w': { 'role': 'wazuh_agent', 'desc': 'Wazuh agent - 1514/tcp/udp' },
|
||||||
|
'p': { 'role': 'wazuh_api', 'desc': 'Wazuh API - 55000/tcp' },
|
||||||
|
'r': { 'role': 'wazuh_authd', 'desc': 'Wazuh registration service - 1515/tcp' }
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def validate_ip_cidr(ip_cidr: str) -> bool:
|
||||||
|
try:
|
||||||
|
ipaddress.ip_address(ip_cidr)
|
||||||
|
except ValueError:
|
||||||
|
try:
|
||||||
|
ipaddress.ip_network(ip_cidr)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def role_prompt() -> str:
|
||||||
|
print()
|
||||||
|
print('Choose the role for the IP or Range you would like to allow')
|
||||||
|
print()
|
||||||
|
for role in VALID_ROLES:
|
||||||
|
print(f'[{role}] - {VALID_ROLES[role]["desc"]}')
|
||||||
|
print()
|
||||||
|
role = input('Please enter your selection: ')
|
||||||
|
if role in VALID_ROLES.keys():
|
||||||
|
return VALID_ROLES[role]['role']
|
||||||
|
else:
|
||||||
|
print(f'Invalid role \'{role}\', please try again.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def ip_prompt() -> str:
|
||||||
|
ip = input('Enter a single ip address or range to allow (ex: 10.10.10.10 or 10.10.0.0/16): ')
|
||||||
|
if validate_ip_cidr(ip):
|
||||||
|
return ip
|
||||||
|
else:
|
||||||
|
print(f'Invalid IP address or CIDR block \'{ip}\', please try again.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def wazuh_enabled() -> bool:
|
||||||
|
for file in os.listdir(f'{LOCAL_SALT_DIR}/pillar'):
|
||||||
|
with open(file, 'r') as pillar:
|
||||||
|
if 'wazuh: 1' in pillar.read():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def root_to_str(root: ET.ElementTree) -> str:
|
||||||
|
xml_str = ET.tostring(root, encoding='unicode', method='xml').replace('\n', '')
|
||||||
|
xml_str = re.sub(r'(?:(?<=>) *)', '', xml_str)
|
||||||
|
xml_str = re.sub(r' -', '', xml_str)
|
||||||
|
xml_str = re.sub(r' -->', ' -->', xml_str)
|
||||||
|
dom = minidom.parseString(xml_str)
|
||||||
|
return dom.toprettyxml(indent=" ")
|
||||||
|
|
||||||
|
|
||||||
|
def add_wl(ip):
|
||||||
|
parser = ET.XMLParser(remove_blank_text=True)
|
||||||
|
with open(WAZUH_CONF, 'rb') as wazuh_conf:
|
||||||
|
tree = ET.parse(wazuh_conf, parser)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
source_comment = ET.Comment(f'Address {ip} added by /usr/sbin/so-allow on {dt.utcnow().replace(tzinfo=tz.utc).strftime("%a %b %e %H:%M:%S %Z %Y")}')
|
||||||
|
new_global = ET.Element("global")
|
||||||
|
new_wl = ET.SubElement(new_global, 'white_list')
|
||||||
|
new_wl.text = ip
|
||||||
|
|
||||||
|
root.append(source_comment)
|
||||||
|
root.append(new_global)
|
||||||
|
|
||||||
|
with open(WAZUH_CONF, 'w') as add_out:
|
||||||
|
add_out.write(root_to_str(root))
|
||||||
|
|
||||||
|
|
||||||
|
def apply(role: str, ip: str) -> int:
|
||||||
|
firewall_cmd = ['so-firewall', 'includehost', role, ip]
|
||||||
|
salt_cmd = ['salt-call', 'state.apply', '-l', 'quiet', 'firewall', 'queue=True']
|
||||||
|
restart_wazuh_cmd = ['so-wazuh-restart']
|
||||||
|
print(f'Adding {ip} to the {role} role. This can take a few seconds...')
|
||||||
|
cmd = subprocess.run(firewall_cmd)
|
||||||
|
if cmd.returncode == 0:
|
||||||
|
cmd = subprocess.run(salt_cmd, stdout=subprocess.DEVNULL)
|
||||||
|
else:
|
||||||
|
return cmd.returncode
|
||||||
|
if cmd.returncode == 0:
|
||||||
|
if wazuh_enabled and role=='analyst':
|
||||||
|
try:
|
||||||
|
add_wl(ip)
|
||||||
|
print(f'Added whitelist entry for {ip} from {WAZUH_CONF}', file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Failed to add whitelist entry for {ip} from {WAZUH_CONF}', file=sys.stderr)
|
||||||
|
print(e)
|
||||||
|
return 1
|
||||||
|
print('Restarting OSSEC Server...')
|
||||||
|
cmd = subprocess.run(restart_wazuh_cmd)
|
||||||
|
else:
|
||||||
|
return cmd.returncode
|
||||||
|
else:
|
||||||
|
print(f'Commmand \'{" ".join(salt_cmd)}\' failed.', file=sys.stderr)
|
||||||
|
return cmd.returncode
|
||||||
|
if cmd.returncode != 0:
|
||||||
|
print('Failed to restart OSSEC server.')
|
||||||
|
return cmd.returncode
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
if os.geteuid() != 0:
|
||||||
|
print('You must run this script as root', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
main_parser = argparse.ArgumentParser(
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog=textwrap.dedent(f'''\
|
||||||
|
additional information:
|
||||||
|
To use this script in interactive mode call it with no arguments
|
||||||
|
'''
|
||||||
|
))
|
||||||
|
|
||||||
|
group = main_parser.add_argument_group(title='roles')
|
||||||
|
group.add_argument('-a', dest='roles', action='append_const', const=VALID_ROLES['a']['role'], help="Analyst - 80/tcp, 443/tcp")
|
||||||
|
group.add_argument('-b', dest='roles', action='append_const', const=VALID_ROLES['b']['role'], help="Logstash Beat - 5044/tcp")
|
||||||
|
group.add_argument('-e', dest='roles', action='append_const', const=VALID_ROLES['e']['role'], help="Elasticsearch REST API - 9200/tcp")
|
||||||
|
group.add_argument('-f', dest='roles', action='append_const', const=VALID_ROLES['f']['role'], help="Strelka frontend - 57314/tcp")
|
||||||
|
group.add_argument('-o', dest='roles', action='append_const', const=VALID_ROLES['o']['role'], help="Osquery endpoint - 8090/tcp")
|
||||||
|
group.add_argument('-s', dest='roles', action='append_const', const=VALID_ROLES['s']['role'], help="Syslog device - 514/tcp/udp")
|
||||||
|
group.add_argument('-w', dest='roles', action='append_const', const=VALID_ROLES['w']['role'], help="Wazuh agent - 1514/tcp/udp")
|
||||||
|
group.add_argument('-p', dest='roles', action='append_const', const=VALID_ROLES['p']['role'], help="Wazuh API - 55000/tcp")
|
||||||
|
group.add_argument('-r', dest='roles', action='append_const', const=VALID_ROLES['r']['role'], help="Wazuh registration service - 1515/tcp")
|
||||||
|
|
||||||
|
ip_g = main_parser.add_argument_group(title='allow')
|
||||||
|
ip_g.add_argument('-i', help="IP or CIDR block to disallow connections from, requires at least one role argument", metavar='', dest='ip')
|
||||||
|
|
||||||
|
args = main_parser.parse_args(sys.argv[1:])
|
||||||
|
|
||||||
|
if args.roles is None:
|
||||||
|
role = role_prompt()
|
||||||
|
ip = ip_prompt()
|
||||||
|
try:
|
||||||
|
return_code = apply(role, ip)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Unexpected exception occurred: {e}', file=sys.stderr)
|
||||||
|
return_code = e.errno
|
||||||
|
sys.exit(return_code)
|
||||||
|
elif args.roles is not None and args.ip is None:
|
||||||
|
if os.environ.get('IP') is None:
|
||||||
|
main_parser.print_help()
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
args.ip = os.environ['IP']
|
||||||
|
|
||||||
|
if validate_ip_cidr(args.ip):
|
||||||
|
try:
|
||||||
|
for role in args.roles:
|
||||||
|
return_code = apply(role, args.ip)
|
||||||
|
if return_code > 0:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Unexpected exception occurred: {e}', file=sys.stderr)
|
||||||
|
return_code = e.errno
|
||||||
|
else:
|
||||||
|
print(f'Invalid IP address or CIDR block \'{args.ip}\', please try again.', file=sys.stderr)
|
||||||
|
return_code = 1
|
||||||
|
|
||||||
|
sys.exit(return_code)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
main()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
23
salt/common/tools/sbin/so-allow-view
Executable file
@@ -0,0 +1,23 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
echo ""
|
||||||
|
echo "Hosts/Networks that have access to login to the Security Onion Console:"
|
||||||
|
|
||||||
|
so-firewall includedhosts analyst
|
||||||
309
salt/common/tools/sbin/so-analyst-install
Executable file
@@ -0,0 +1,309 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014-2020 Security Onion Solutions, LLC
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
if [ "$(id -u)" -ne 0 ]; then
|
||||||
|
echo "This script must be run using sudo!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
INSTALL_LOG=/root/so-analyst-install.log
|
||||||
|
exec &> >(tee -a "$INSTALL_LOG")
|
||||||
|
|
||||||
|
log() {
|
||||||
|
msg=$1
|
||||||
|
level=${2:-I}
|
||||||
|
now=$(TZ=GMT date +"%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
echo -e "$now | $level | $msg" >> "$INSTALL_LOG" 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
error() {
|
||||||
|
log "$1" "E"
|
||||||
|
}
|
||||||
|
|
||||||
|
info() {
|
||||||
|
log "$1" "I"
|
||||||
|
}
|
||||||
|
|
||||||
|
title() {
|
||||||
|
echo -e "\n-----------------------------\n $1\n-----------------------------\n" >> "$INSTALL_LOG" 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
logCmd() {
|
||||||
|
cmd=$1
|
||||||
|
info "Executing command: $cmd"
|
||||||
|
$cmd >> "$INSTALL_LOG" 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
analyze_system() {
|
||||||
|
title "System Characteristics"
|
||||||
|
logCmd "uptime"
|
||||||
|
logCmd "uname -a"
|
||||||
|
logCmd "free -h"
|
||||||
|
logCmd "lscpu"
|
||||||
|
logCmd "df -h"
|
||||||
|
logCmd "ip a"
|
||||||
|
}
|
||||||
|
|
||||||
|
analyze_system
|
||||||
|
|
||||||
|
OS=$(grep PRETTY_NAME /etc/os-release | grep 'CentOS Linux 7')
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "This is an unsupported OS. Please use CentOS 7 to install the analyst node."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$manufacturer" == "Security Onion Solutions" && "$family" == "Automated" ]]; then
|
||||||
|
INSTALL=yes
|
||||||
|
CURLCONTINUE=no
|
||||||
|
else
|
||||||
|
INSTALL=''
|
||||||
|
CURLCONTINUE=''
|
||||||
|
fi
|
||||||
|
|
||||||
|
FIRSTPASS=yes
|
||||||
|
while [[ $INSTALL != "yes" ]] && [[ $INSTALL != "no" ]]; do
|
||||||
|
if [[ "$FIRSTPASS" == "yes" ]]; then
|
||||||
|
clear
|
||||||
|
echo "###########################################"
|
||||||
|
echo "## ** W A R N I N G ** ##"
|
||||||
|
echo "## _______________________________ ##"
|
||||||
|
echo "## ##"
|
||||||
|
echo "## Installing the Security Onion ##"
|
||||||
|
echo "## analyst node on this device will ##"
|
||||||
|
echo "## make permanent changes to ##"
|
||||||
|
echo "## the system. ##"
|
||||||
|
echo "## ##"
|
||||||
|
echo "###########################################"
|
||||||
|
echo "Do you wish to continue? (Type the entire word 'yes' to proceed or 'no' to exit)"
|
||||||
|
FIRSTPASS=no
|
||||||
|
else
|
||||||
|
echo "Please type 'yes' to continue or 'no' to exit."
|
||||||
|
fi
|
||||||
|
read INSTALL
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ $INSTALL == "no" ]]; then
|
||||||
|
echo "Exiting analyst node installation."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Testing for internet connection with curl https://securityonionsolutions.com/"
|
||||||
|
CANCURL=$(curl -sI https://securityonionsolutions.com/ | grep "200 OK")
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
FIRSTPASS=yes
|
||||||
|
while [[ $CURLCONTINUE != "yes" ]] && [[ $CURLCONTINUE != "no" ]]; do
|
||||||
|
if [[ "$FIRSTPASS" == "yes" ]]; then
|
||||||
|
echo "We could not access https://securityonionsolutions.com/."
|
||||||
|
echo "Since packages are downloaded from the internet, internet acceess is required."
|
||||||
|
echo "If you would like to ignore this warning and continue anyway, please type 'yes'."
|
||||||
|
echo "Otherwise, type 'no' to exit."
|
||||||
|
FIRSTPASS=no
|
||||||
|
else
|
||||||
|
echo "Please type 'yes' to continue or 'no' to exit."
|
||||||
|
fi
|
||||||
|
read CURLCONTINUE
|
||||||
|
done
|
||||||
|
if [[ "$CURLCONTINUE" == "no" ]]; then
|
||||||
|
echo "Exiting analyst node installation."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "We were able to curl https://securityonionsolutions.com/."
|
||||||
|
sleep 3
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install a GUI text editor
|
||||||
|
yum -y install gedit
|
||||||
|
|
||||||
|
# Install misc utils
|
||||||
|
yum -y install wget curl unzip epel-release yum-plugin-versionlock;
|
||||||
|
|
||||||
|
# Install xWindows
|
||||||
|
yum -y groupinstall "X Window System";
|
||||||
|
yum -y install gnome-classic-session gnome-terminal nautilus-open-terminal control-center liberation-mono-fonts;
|
||||||
|
unlink /etc/systemd/system/default.target;
|
||||||
|
ln -sf /lib/systemd/system/graphical.target /etc/systemd/system/default.target;
|
||||||
|
yum -y install file-roller
|
||||||
|
|
||||||
|
# Install Mono - prereq for NetworkMiner
|
||||||
|
yum -y install mono-core mono-basic mono-winforms expect
|
||||||
|
|
||||||
|
# Install NetworkMiner
|
||||||
|
yum -y install libcanberra-gtk2;
|
||||||
|
wget https://www.netresec.com/?download=NetworkMiner -O /tmp/nm.zip;
|
||||||
|
mkdir -p /opt/networkminer/
|
||||||
|
unzip /tmp/nm.zip -d /opt/networkminer/;
|
||||||
|
rm /tmp/nm.zip;
|
||||||
|
mv /opt/networkminer/NetworkMiner_*/* /opt/networkminer/
|
||||||
|
chmod +x /opt/networkminer/NetworkMiner.exe;
|
||||||
|
chmod -R go+w /opt/networkminer/AssembledFiles/;
|
||||||
|
chmod -R go+w /opt/networkminer/Captures/;
|
||||||
|
# Create networkminer shim
|
||||||
|
cat << EOF >> /bin/networkminer
|
||||||
|
#!/bin/bash
|
||||||
|
/bin/mono /opt/networkminer/NetworkMiner.exe --noupdatecheck "\$@"
|
||||||
|
EOF
|
||||||
|
chmod +x /bin/networkminer
|
||||||
|
# Convert networkminer ico file to png format
|
||||||
|
yum -y install ImageMagick
|
||||||
|
convert /opt/networkminer/networkminericon.ico /opt/networkminer/networkminericon.png
|
||||||
|
# Create menu entry
|
||||||
|
cat << EOF >> /usr/share/applications/networkminer.desktop
|
||||||
|
[Desktop Entry]
|
||||||
|
Name=NetworkMiner
|
||||||
|
Comment=NetworkMiner
|
||||||
|
Encoding=UTF-8
|
||||||
|
Exec=/bin/networkminer %f
|
||||||
|
Icon=/opt/networkminer/networkminericon-4.png
|
||||||
|
StartupNotify=true
|
||||||
|
Terminal=false
|
||||||
|
X-MultipleArgs=false
|
||||||
|
Type=Application
|
||||||
|
MimeType=application/x-pcap;
|
||||||
|
Categories=Network;
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Set default monospace font to Liberation
|
||||||
|
cat << EOF >> /etc/fonts/local.conf
|
||||||
|
<match target="pattern">
|
||||||
|
<test name="family" qual="any">
|
||||||
|
<string>monospace</string>
|
||||||
|
</test>
|
||||||
|
<edit binding="strong" mode="prepend" name="family">
|
||||||
|
<string>Liberation Mono</string>
|
||||||
|
</edit>
|
||||||
|
</match>
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Install Wireshark for Gnome
|
||||||
|
yum -y install wireshark-gnome;
|
||||||
|
|
||||||
|
# Install dnsiff
|
||||||
|
yum -y install dsniff;
|
||||||
|
|
||||||
|
# Install hping3
|
||||||
|
yum -y install hping3;
|
||||||
|
|
||||||
|
# Install netsed
|
||||||
|
yum -y install netsed;
|
||||||
|
|
||||||
|
# Install ngrep
|
||||||
|
yum -y install ngrep;
|
||||||
|
|
||||||
|
# Install scapy
|
||||||
|
yum -y install python36-scapy;
|
||||||
|
|
||||||
|
# Install ssldump
|
||||||
|
yum -y install ssldump;
|
||||||
|
|
||||||
|
# Install tcpdump
|
||||||
|
yum -y install tcpdump;
|
||||||
|
|
||||||
|
# Install tcpflow
|
||||||
|
yum -y install tcpflow;
|
||||||
|
|
||||||
|
# Install tcpxtract
|
||||||
|
yum -y install tcpxtract;
|
||||||
|
|
||||||
|
# Install whois
|
||||||
|
yum -y install whois;
|
||||||
|
|
||||||
|
# Install foremost
|
||||||
|
yum -y install https://forensics.cert.org/centos/cert/7/x86_64//foremost-1.5.7-13.1.el7.x86_64.rpm;
|
||||||
|
|
||||||
|
# Install chromium
|
||||||
|
yum -y install chromium;
|
||||||
|
|
||||||
|
# Install tcpstat
|
||||||
|
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcpstat-1.5.0/securityonion-tcpstat-1.5.0.rpm;
|
||||||
|
|
||||||
|
# Install tcptrace
|
||||||
|
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-tcptrace-6.6.7/securityonion-tcptrace-6.6.7.rpm;
|
||||||
|
|
||||||
|
# Install sslsplit
|
||||||
|
yum -y install libevent;
|
||||||
|
yum -y install sslsplit;
|
||||||
|
|
||||||
|
# Install Bit-Twist
|
||||||
|
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-bittwist-2.0.0/securityonion-bittwist-2.0.0.rpm;
|
||||||
|
|
||||||
|
# Install chaosreader
|
||||||
|
yum -y install perl-IO-Compress perl-Net-DNS;
|
||||||
|
yum -y install https://github.com/Security-Onion-Solutions/securityonion-docker-rpm/releases/download/securityonion-chaosreader-0.95.10/securityonion-chaosreader-0.95.10.rpm;
|
||||||
|
chmod +x /bin/chaosreader;
|
||||||
|
|
||||||
|
if [ -f ../../files/analyst/README ]; then
|
||||||
|
cp ../../files/analyst/README /;
|
||||||
|
cp ../../files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
|
||||||
|
cp ../../files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
|
||||||
|
cp ../../files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
|
||||||
|
else
|
||||||
|
cp /opt/so/saltstack/default/salt/common/files/analyst/README /;
|
||||||
|
cp /opt/so/saltstack/default/salt/common/files/analyst/so-wallpaper.jpg /usr/share/backgrounds/;
|
||||||
|
cp /opt/so/saltstack/default/salt/common/files/analyst/so-lockscreen.jpg /usr/share/backgrounds/;
|
||||||
|
cp /opt/so/saltstack/default/salt/common/files/analyst/so-login-logo-dark.svg /usr/share/pixmaps/;
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set background wallpaper
|
||||||
|
cat << EOF >> /etc/dconf/db/local.d/00-background
|
||||||
|
# Specify the dconf path
|
||||||
|
[org/gnome/desktop/background]
|
||||||
|
|
||||||
|
# Specify the path to the desktop background image file
|
||||||
|
picture-uri='file:///usr/share/backgrounds/so-wallpaper.jpg'
|
||||||
|
# Specify one of the rendering options for the background image:
|
||||||
|
# 'none', 'wallpaper', 'centered', 'scaled', 'stretched', 'zoom', 'spanned'
|
||||||
|
picture-options='zoom'
|
||||||
|
# Specify the left or top color when drawing gradients or the solid color
|
||||||
|
primary-color='000000'
|
||||||
|
# Specify the right or bottom color when drawing gradients
|
||||||
|
secondary-color='FFFFFF'
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Set lock screen
|
||||||
|
cat << EOF >> /etc/dconf/db/local.d/00-screensaver
|
||||||
|
[org/gnome/desktop/session]
|
||||||
|
idle-delay=uint32 180
|
||||||
|
|
||||||
|
[org/gnome/desktop/screensaver]
|
||||||
|
lock-enabled=true
|
||||||
|
lock-delay=uint32 120
|
||||||
|
picture-options='zoom'
|
||||||
|
picture-uri='file:///usr/share/backgrounds/so-lockscreen.jpg'
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat << EOF >> /etc/dconf/db/local.d/locks/screensaver
|
||||||
|
/org/gnome/desktop/session/idle-delay
|
||||||
|
/org/gnome/desktop/screensaver/lock-enabled
|
||||||
|
/org/gnome/desktop/screensaver/lock-delay
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Do not show the user list at login screen
|
||||||
|
cat << EOF >> /etc/dconf/db/local.d/00-login-screen
|
||||||
|
[org/gnome/login-screen]
|
||||||
|
logo='/usr/share/pixmaps/so-login-logo-dark.svg'
|
||||||
|
disable-user-list=true
|
||||||
|
EOF
|
||||||
|
|
||||||
|
dconf update;
|
||||||
|
|
||||||
|
echo
|
||||||
|
echo "Analyst workstation has been installed!"
|
||||||
|
echo "Press ENTER to reboot or Ctrl-C to cancel."
|
||||||
|
read pause
|
||||||
|
|
||||||
|
reboot;
|
||||||
@@ -29,7 +29,7 @@ fi
|
|||||||
|
|
||||||
interface="$1"
|
interface="$1"
|
||||||
shift
|
shift
|
||||||
tcpdump -i $interface -ddd $@ | tail -n+2 |
|
sudo tcpdump -i $interface -ddd $@ | tail -n+2 |
|
||||||
while read line; do
|
while read line; do
|
||||||
cols=( $line )
|
cols=( $line )
|
||||||
printf "%04x%02x%02x%08x" ${cols[0]} ${cols[1]} ${cols[2]} ${cols[3]}
|
printf "%04x%02x%02x%08x" ${cols[0]} ${cols[1]} ${cols[2]} ${cols[3]}
|
||||||
|
|||||||
@@ -1,11 +1,19 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
#
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# Elastic License 2.0.
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
. /usr/sbin/so-common
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
|||||||
@@ -1,34 +1,26 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
#
|
#
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
#
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# Elastic License 2.0.
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
# Elastic agent is not managed by salt. Because of this we must store this base information in a
|
# (at your option) any later version.
|
||||||
# script that accompanies the soup system. Since so-common is one of those special soup files,
|
#
|
||||||
# and since this same logic is required during installation, it's included in this file.
|
# This program is distributed in the hope that it will be useful,
|
||||||
ELASTIC_AGENT_TARBALL_VERSION="8.10.4"
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
ELASTIC_AGENT_URL="https://repo.securityonion.net/file/so-repo/prod/2.4/elasticagent/elastic-agent_SO-$ELASTIC_AGENT_TARBALL_VERSION.tar.gz"
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
ELASTIC_AGENT_MD5_URL="https://repo.securityonion.net/file/so-repo/prod/2.4/elasticagent/elastic-agent_SO-$ELASTIC_AGENT_TARBALL_VERSION.md5"
|
# GNU General Public License for more details.
|
||||||
ELASTIC_AGENT_FILE="/nsm/elastic-fleet/artifacts/elastic-agent_SO-$ELASTIC_AGENT_TARBALL_VERSION.tar.gz"
|
#
|
||||||
ELASTIC_AGENT_MD5="/nsm/elastic-fleet/artifacts/elastic-agent_SO-$ELASTIC_AGENT_TARBALL_VERSION.md5"
|
# You should have received a copy of the GNU General Public License
|
||||||
ELASTIC_AGENT_EXPANSION_DIR=/nsm/elastic-fleet/artifacts/beats/elastic-agent
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
DEFAULT_SALT_DIR=/opt/so/saltstack/default
|
DEFAULT_SALT_DIR=/opt/so/saltstack/default
|
||||||
DOC_BASE_URL="https://docs.securityonion.net/en/2.4"
|
|
||||||
|
|
||||||
if [ -z $NOROOT ]; then
|
# Check for prerequisites
|
||||||
# Check for prerequisites
|
if [ "$(id -u)" -ne 0 ]; then
|
||||||
if [ "$(id -u)" -ne 0 ]; then
|
echo "This script must be run using sudo!"
|
||||||
echo "This script must be run using sudo!"
|
exit 1
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure /usr/sbin is in path
|
|
||||||
if ! echo "$PATH" | grep -q "/usr/sbin"; then
|
|
||||||
export PATH="$PATH:/usr/sbin"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Define a banner to separate sections
|
# Define a banner to separate sections
|
||||||
@@ -64,37 +56,33 @@ add_interface_bond0() {
|
|||||||
ethtool -K "$BNIC" $i off &>/dev/null
|
ethtool -K "$BNIC" $i off &>/dev/null
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
# Check if the bond slave connection has already been created
|
||||||
|
nmcli -f name,uuid -p con | grep -q "bond0-slave-$BNIC"
|
||||||
|
local found_int=$?
|
||||||
|
|
||||||
if ! [[ $is_cloud ]]; then
|
if [[ $found_int != 0 ]]; then
|
||||||
# Check if the bond slave connection has already been created
|
# Create the slave interface and assign it to the bond
|
||||||
nmcli -f name,uuid -p con | grep -q "bond0-slave-$BNIC"
|
nmcli con add type ethernet ifname "$BNIC" con-name "bond0-slave-$BNIC" master bond0 -- \
|
||||||
local found_int=$?
|
ethernet.mtu "$MTU" \
|
||||||
|
connection.autoconnect "yes"
|
||||||
|
else
|
||||||
|
local int_uuid
|
||||||
|
int_uuid=$(nmcli -f name,uuid -p con | sed -n "s/bond0-slave-$BNIC //p" | tr -d ' ')
|
||||||
|
|
||||||
if [[ $found_int != 0 ]]; then
|
nmcli con mod "$int_uuid" \
|
||||||
# Create the slave interface and assign it to the bond
|
ethernet.mtu "$MTU" \
|
||||||
nmcli con add type ethernet ifname "$BNIC" con-name "bond0-slave-$BNIC" master bond0 -- \
|
connection.autoconnect "yes"
|
||||||
ethernet.mtu "$MTU" \
|
fi
|
||||||
connection.autoconnect "yes"
|
|
||||||
else
|
|
||||||
local int_uuid
|
|
||||||
int_uuid=$(nmcli -f name,uuid -p con | sed -n "s/bond0-slave-$BNIC //p" | tr -d ' ')
|
|
||||||
|
|
||||||
nmcli con mod "$int_uuid" \
|
|
||||||
ethernet.mtu "$MTU" \
|
|
||||||
connection.autoconnect "yes"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
ip link set dev "$BNIC" arp off multicast off allmulticast off promisc on
|
ip link set dev "$BNIC" arp off multicast off allmulticast off promisc on
|
||||||
|
|
||||||
if ! [[ $is_cloud ]]; then
|
# Bring the slave interface up
|
||||||
# Bring the slave interface up
|
if [[ $verbose == true ]]; then
|
||||||
if [[ $verbose == true ]]; then
|
nmcli con up "bond0-slave-$BNIC"
|
||||||
nmcli con up "bond0-slave-$BNIC"
|
else
|
||||||
else
|
nmcli con up "bond0-slave-$BNIC" &>/dev/null
|
||||||
nmcli con up "bond0-slave-$BNIC" &>/dev/null
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$nic_error" != 0 ]; then
|
if [ "$nic_error" != 0 ]; then
|
||||||
return "$nic_error"
|
return "$nic_error"
|
||||||
fi
|
fi
|
||||||
@@ -132,48 +120,11 @@ check_elastic_license() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
check_salt_master_status() {
|
|
||||||
local count=0
|
|
||||||
local attempts="${1:- 10}"
|
|
||||||
current_time="$(date '+%b %d %H:%M:%S')"
|
|
||||||
echo "Checking if we can access the salt master and that it is ready at: ${current_time}"
|
|
||||||
while ! salt-call state.show_top -l error concurrent=true 1> /dev/null; do
|
|
||||||
current_time="$(date '+%b %d %H:%M:%S')"
|
|
||||||
echo "Can't access salt master or it is not ready at: ${current_time}"
|
|
||||||
((count+=1))
|
|
||||||
if [[ $count -eq $attempts ]]; then
|
|
||||||
# 10 attempts takes about 5.5 minutes
|
|
||||||
echo "Gave up trying to access salt-master"
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
current_time="$(date '+%b %d %H:%M:%S')"
|
|
||||||
echo "Successfully accessed and salt master ready at: ${current_time}"
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
# this is only intended to be used to check the status of the minion from a salt master
|
|
||||||
check_salt_minion_status() {
|
|
||||||
local minion="$1"
|
|
||||||
local timeout="${2:-5}"
|
|
||||||
local logfile="${3:-'/dev/stdout'}"
|
|
||||||
echo "Checking if the salt minion: $minion will respond to jobs" >> "$logfile" 2>&1
|
|
||||||
salt "$minion" test.ping -t $timeout > /dev/null 2>&1
|
|
||||||
local status=$?
|
|
||||||
if [ $status -gt 0 ]; then
|
|
||||||
echo " Minion did not respond" >> "$logfile" 2>&1
|
|
||||||
else
|
|
||||||
echo " Received job response from salt minion" >> "$logfile" 2>&1
|
|
||||||
fi
|
|
||||||
|
|
||||||
return $status
|
|
||||||
}
|
|
||||||
|
|
||||||
copy_new_files() {
|
copy_new_files() {
|
||||||
# Copy new files over to the salt dir
|
# Copy new files over to the salt dir
|
||||||
cd $UPDATE_DIR
|
cd $UPDATE_DIR
|
||||||
rsync -a salt $DEFAULT_SALT_DIR/ --delete
|
rsync -a salt $DEFAULT_SALT_DIR/
|
||||||
rsync -a pillar $DEFAULT_SALT_DIR/ --delete
|
rsync -a pillar $DEFAULT_SALT_DIR/
|
||||||
chown -R socore:socore $DEFAULT_SALT_DIR/
|
chown -R socore:socore $DEFAULT_SALT_DIR/
|
||||||
chmod 755 $DEFAULT_SALT_DIR/pillar/firewall/addfirewall.sh
|
chmod 755 $DEFAULT_SALT_DIR/pillar/firewall/addfirewall.sh
|
||||||
cd /tmp
|
cd /tmp
|
||||||
@@ -183,44 +134,19 @@ disable_fastestmirror() {
|
|||||||
sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
|
sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
|
||||||
}
|
}
|
||||||
|
|
||||||
download_and_verify() {
|
|
||||||
source_url=$1
|
|
||||||
source_md5_url=$2
|
|
||||||
dest_file=$3
|
|
||||||
md5_file=$4
|
|
||||||
expand_dir=$5
|
|
||||||
|
|
||||||
if [[ -n "$expand_dir" ]]; then
|
|
||||||
mkdir -p "$expand_dir"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! verify_md5_checksum "$dest_file" "$md5_file"; then
|
|
||||||
retry 15 10 "curl --fail --retry 5 --retry-delay 15 -L '$source_url' --output '$dest_file'" "" ""
|
|
||||||
retry 15 10 "curl --fail --retry 5 --retry-delay 15 -L '$source_md5_url' --output '$md5_file'" "" ""
|
|
||||||
|
|
||||||
if verify_md5_checksum "$dest_file" "$md5_file"; then
|
|
||||||
echo "Source file and checksum are good."
|
|
||||||
else
|
|
||||||
echo "Unable to download and verify the source file and checksum."
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [[ -n "$expand_dir" ]]; then
|
|
||||||
tar -xf "$dest_file" -C "$expand_dir"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
elastic_license() {
|
elastic_license() {
|
||||||
|
|
||||||
read -r -d '' message <<- EOM
|
read -r -d '' message <<- EOM
|
||||||
\n
|
\n
|
||||||
Elastic Stack binaries and Security Onion components are only available under the Elastic License version 2 (ELv2):
|
Starting in Elastic Stack version 7.11, the Elastic Stack binaries are only available under the Elastic License:
|
||||||
https://securityonion.net/license/
|
https://securityonion.net/elastic-license
|
||||||
|
|
||||||
Do you agree to the terms of ELv2?
|
Please review the Elastic License:
|
||||||
|
https://www.elastic.co/licensing/elastic-license
|
||||||
|
|
||||||
If so, type AGREE to accept ELv2 and continue. Otherwise, press Enter to exit this program without making any changes.
|
Do you agree to the terms of the Elastic License?
|
||||||
|
|
||||||
|
If so, type AGREE to accept the Elastic License and continue. Otherwise, press Enter to exit this program without making any changes.
|
||||||
EOM
|
EOM
|
||||||
|
|
||||||
AGREED=$(whiptail --title "$whiptail_title" --inputbox \
|
AGREED=$(whiptail --title "$whiptail_title" --inputbox \
|
||||||
@@ -249,20 +175,19 @@ get_random_value() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
gpg_rpm_import() {
|
gpg_rpm_import() {
|
||||||
if [[ $is_oracle ]]; then
|
if [[ "$OS" == "centos" ]]; then
|
||||||
if [[ "$WHATWOULDYOUSAYYAHDOHERE" == "setup" ]]; then
|
if [[ "$WHATWOULDYOUSAYYAHDOHERE" == "setup" ]]; then
|
||||||
local RPMKEYSLOC="../salt/repo/client/files/$OS/keys"
|
local RPMKEYSLOC="../salt/repo/client/files/centos/keys"
|
||||||
else
|
else
|
||||||
local RPMKEYSLOC="$UPDATE_DIR/salt/repo/client/files/$OS/keys"
|
local RPMKEYSLOC="$UPDATE_DIR/salt/repo/client/files/centos/keys"
|
||||||
fi
|
fi
|
||||||
RPMKEYS=('RPM-GPG-KEY-oracle' 'RPM-GPG-KEY-EPEL-9' 'SALT-PROJECT-GPG-PUBKEY-2023.pub' 'docker.pub' 'securityonion.pub')
|
|
||||||
for RPMKEY in "${RPMKEYS[@]}"; do
|
RPMKEYS=('RPM-GPG-KEY-EPEL-7' 'GPG-KEY-WAZUH' 'docker.pub' 'SALTSTACK-GPG-KEY.pub' 'securityonion.pub')
|
||||||
|
|
||||||
|
for RPMKEY in "${RPMKEYS[@]}"; do
|
||||||
rpm --import $RPMKEYSLOC/$RPMKEY
|
rpm --import $RPMKEYSLOC/$RPMKEY
|
||||||
echo "Imported $RPMKEY"
|
echo "Imported $RPMKEY"
|
||||||
done
|
done
|
||||||
elif [[ $is_rpm ]]; then
|
|
||||||
echo "Importing the security onion GPG key"
|
|
||||||
rpm --import ../salt/repo/client/files/oracle/keys/securityonion.pub
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -275,15 +200,12 @@ init_monitor() {
|
|||||||
|
|
||||||
if [[ $MONITORNIC == "bond0" ]]; then
|
if [[ $MONITORNIC == "bond0" ]]; then
|
||||||
BIFACES=$(lookup_bond_interfaces)
|
BIFACES=$(lookup_bond_interfaces)
|
||||||
for i in rx tx sg tso ufo gso gro lro rx-vlan-offload tx-vlan-offload generic-receive-offload generic-segmentation-offload tcp-segmentation-offload; do
|
|
||||||
ethtool -K "$MONITORNIC" "$i" off;
|
|
||||||
done
|
|
||||||
else
|
else
|
||||||
BIFACES=$MONITORNIC
|
BIFACES=$MONITORNIC
|
||||||
fi
|
fi
|
||||||
|
|
||||||
for DEVICE_IFACE in $BIFACES; do
|
for DEVICE_IFACE in $BIFACES; do
|
||||||
for i in rx tx sg tso ufo gso gro lro rx-vlan-offload tx-vlan-offload generic-receive-offload generic-segmentation-offload tcp-segmentation-offload; do
|
for i in rx tx sg tso ufo gso gro lro; do
|
||||||
ethtool -K "$DEVICE_IFACE" "$i" off;
|
ethtool -K "$DEVICE_IFACE" "$i" off;
|
||||||
done
|
done
|
||||||
ip link set dev "$DEVICE_IFACE" arp off multicast off allmulticast off promisc on
|
ip link set dev "$DEVICE_IFACE" arp off multicast off allmulticast off promisc on
|
||||||
@@ -291,17 +213,31 @@ init_monitor() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
is_manager_node() {
|
is_manager_node() {
|
||||||
grep "role: so-" /etc/salt/grains | grep -E "manager|eval|managersearch|standalone|import" &> /dev/null
|
# Check to see if this is a manager node
|
||||||
|
role=$(lookup_role)
|
||||||
|
is_single_node_grid && return 0
|
||||||
|
[ $role == 'manager' ] && return 0
|
||||||
|
[ $role == 'managersearch' ] && return 0
|
||||||
|
[ $role == 'helix' ] && return 0
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
is_sensor_node() {
|
is_sensor_node() {
|
||||||
# Check to see if this is a sensor (forward) node
|
# Check to see if this is a sensor (forward) node
|
||||||
|
role=$(lookup_role)
|
||||||
is_single_node_grid && return 0
|
is_single_node_grid && return 0
|
||||||
grep "role: so-" /etc/salt/grains | grep -E "sensor|heavynode" &> /dev/null
|
[ $role == 'sensor' ] && return 0
|
||||||
|
[ $role == 'heavynode' ] && return 0
|
||||||
|
[ $role == 'helix' ] && return 0
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
is_single_node_grid() {
|
is_single_node_grid() {
|
||||||
grep "role: so-" /etc/salt/grains | grep -E "eval|standalone|import" &> /dev/null
|
role=$(lookup_role)
|
||||||
|
[ $role == 'eval' ] && return 0
|
||||||
|
[ $role == 'standalone' ] && return 0
|
||||||
|
[ $role == 'import' ] && return 0
|
||||||
|
return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
lookup_bond_interfaces() {
|
lookup_bond_interfaces() {
|
||||||
@@ -313,7 +249,6 @@ lookup_salt_value() {
|
|||||||
group=$2
|
group=$2
|
||||||
kind=$3
|
kind=$3
|
||||||
output=${4:-newline_values_only}
|
output=${4:-newline_values_only}
|
||||||
local=$5
|
|
||||||
|
|
||||||
if [ -z "$kind" ]; then
|
if [ -z "$kind" ]; then
|
||||||
kind=pillar
|
kind=pillar
|
||||||
@@ -323,13 +258,7 @@ lookup_salt_value() {
|
|||||||
group=${group}:
|
group=${group}:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$local" == "--local" ]] || [[ "$local" == "local" ]]; then
|
salt-call --no-color ${kind}.get ${group}${key} --out=${output}
|
||||||
local="--local"
|
|
||||||
else
|
|
||||||
local=""
|
|
||||||
fi
|
|
||||||
|
|
||||||
salt-call --no-color ${kind}.get ${group}${key} --out=${output} ${local}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
lookup_pillar() {
|
lookup_pillar() {
|
||||||
@@ -355,24 +284,6 @@ lookup_role() {
|
|||||||
echo ${pieces[1]}
|
echo ${pieces[1]}
|
||||||
}
|
}
|
||||||
|
|
||||||
is_feature_enabled() {
|
|
||||||
feature=$1
|
|
||||||
enabled=$(lookup_salt_value features)
|
|
||||||
for cur in $enabled; do
|
|
||||||
if [[ "$feature" == "$cur" ]]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
read_feat() {
|
|
||||||
if [ -f /opt/so/log/sostatus/lks_enabled ]; then
|
|
||||||
lic_id=$(cat /opt/so/saltstack/local/pillar/soc/license.sls | grep license_id: | awk '{print $2}')
|
|
||||||
echo "$lic_id/$(cat /opt/so/log/sostatus/lks_enabled)/$(cat /opt/so/log/sostatus/fps_enabled)"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
require_manager() {
|
require_manager() {
|
||||||
if is_manager_node; then
|
if is_manager_node; then
|
||||||
echo "This is a manager, so we can proceed."
|
echo "This is a manager, so we can proceed."
|
||||||
@@ -383,53 +294,32 @@ require_manager() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
retry() {
|
retry() {
|
||||||
maxAttempts=$1
|
maxAttempts=$1
|
||||||
sleepDelay=$2
|
sleepDelay=$2
|
||||||
cmd=$3
|
cmd=$3
|
||||||
expectedOutput=$4
|
expectedOutput=$4
|
||||||
failedOutput=$5
|
attempt=0
|
||||||
attempt=0
|
local exitcode=0
|
||||||
local exitcode=0
|
while [[ $attempt -lt $maxAttempts ]]; do
|
||||||
while [[ $attempt -lt $maxAttempts ]]; do
|
attempt=$((attempt+1))
|
||||||
attempt=$((attempt+1))
|
echo "Executing command with retry support: $cmd"
|
||||||
echo "Executing command with retry support: $cmd"
|
output=$(eval "$cmd")
|
||||||
output=$(eval "$cmd")
|
exitcode=$?
|
||||||
exitcode=$?
|
echo "Results: $output ($exitcode)"
|
||||||
echo "Results: $output ($exitcode)"
|
if [ -n "$expectedOutput" ]; then
|
||||||
if [ -n "$expectedOutput" ]; then
|
if [[ "$output" =~ "$expectedOutput" ]]; then
|
||||||
if [[ "$output" =~ "$expectedOutput" ]]; then
|
return $exitCode
|
||||||
return $exitcode
|
else
|
||||||
else
|
echo "Expected '$expectedOutput' but got '$output'"
|
||||||
echo "Did not find expectedOutput: '$expectedOutput' in the output below from running the command: '$cmd'"
|
fi
|
||||||
echo "<Start of output>"
|
elif [[ $exitcode -eq 0 ]]; then
|
||||||
echo "$output"
|
return $exitCode
|
||||||
echo "<End of output>"
|
fi
|
||||||
if [[ $exitcode -eq 0 ]]; then
|
echo "Command failed with exit code $exitcode; will retry in $sleepDelay seconds ($attempt / $maxAttempts)..."
|
||||||
echo "Forcing exit code to 1"
|
sleep $sleepDelay
|
||||||
exitcode=1
|
done
|
||||||
fi
|
echo "Command continues to fail; giving up."
|
||||||
fi
|
return $exitcode
|
||||||
elif [ -n "$failedOutput" ]; then
|
|
||||||
if [[ "$output" =~ "$failedOutput" ]]; then
|
|
||||||
echo "Found failedOutput: '$failedOutput' in the output below from running the command: '$cmd'"
|
|
||||||
echo "<Start of output>"
|
|
||||||
echo "$output"
|
|
||||||
echo "<End of output>"
|
|
||||||
if [[ $exitcode -eq 0 ]]; then
|
|
||||||
echo "Forcing exit code to 1"
|
|
||||||
exitcode=1
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
return $exitcode
|
|
||||||
fi
|
|
||||||
elif [[ $exitcode -eq 0 ]]; then
|
|
||||||
return $exitcode
|
|
||||||
fi
|
|
||||||
echo "Command failed with exit code $exitcode; will retry in $sleepDelay seconds ($attempt / $maxAttempts)..."
|
|
||||||
sleep $sleepDelay
|
|
||||||
done
|
|
||||||
echo "Command continues to fail; giving up."
|
|
||||||
return $exitcode
|
|
||||||
}
|
}
|
||||||
|
|
||||||
run_check_net_err() {
|
run_check_net_err() {
|
||||||
@@ -454,82 +344,11 @@ run_check_net_err() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
wait_for_salt_minion() {
|
|
||||||
local minion="$1"
|
|
||||||
local timeout="${2:-5}"
|
|
||||||
local logfile="${3:-'/dev/stdout'}"
|
|
||||||
retry 60 5 "journalctl -u salt-minion.service | grep 'Minion is ready to receive requests'" >> "$logfile" 2>&1 || fail
|
|
||||||
local attempt=0
|
|
||||||
# each attempts would take about 15 seconds
|
|
||||||
local maxAttempts=20
|
|
||||||
until check_salt_minion_status "$minion" "$timeout" "$logfile"; do
|
|
||||||
attempt=$((attempt+1))
|
|
||||||
if [[ $attempt -eq $maxAttempts ]]; then
|
|
||||||
return 1
|
|
||||||
fi
|
|
||||||
sleep 10
|
|
||||||
done
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
|
|
||||||
salt_minion_count() {
|
|
||||||
local MINIONDIR="/opt/so/saltstack/local/pillar/minions"
|
|
||||||
MINIONCOUNT=$(ls -la $MINIONDIR/*.sls | grep -v adv_ | wc -l)
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
set_os() {
|
set_os() {
|
||||||
if [ -f /etc/redhat-release ]; then
|
if [ -f /etc/redhat-release ]; then
|
||||||
if grep -q "Rocky Linux release 9" /etc/redhat-release; then
|
OS=centos
|
||||||
OS=rocky
|
else
|
||||||
OSVER=9
|
OS=ubuntu
|
||||||
is_rocky=true
|
|
||||||
is_rpm=true
|
|
||||||
elif grep -q "CentOS Stream release 9" /etc/redhat-release; then
|
|
||||||
OS=centos
|
|
||||||
OSVER=9
|
|
||||||
is_centos=true
|
|
||||||
is_rpm=true
|
|
||||||
elif grep -q "AlmaLinux release 9" /etc/redhat-release; then
|
|
||||||
OS=alma
|
|
||||||
OSVER=9
|
|
||||||
is_alma=true
|
|
||||||
is_rpm=true
|
|
||||||
elif grep -q "Red Hat Enterprise Linux release 9" /etc/redhat-release; then
|
|
||||||
if [ -f /etc/oracle-release ]; then
|
|
||||||
OS=oracle
|
|
||||||
OSVER=9
|
|
||||||
is_oracle=true
|
|
||||||
is_rpm=true
|
|
||||||
else
|
|
||||||
OS=rhel
|
|
||||||
OSVER=9
|
|
||||||
is_rhel=true
|
|
||||||
is_rpm=true
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
cron_service_name="crond"
|
|
||||||
elif [ -f /etc/os-release ]; then
|
|
||||||
if grep -q "UBUNTU_CODENAME=focal" /etc/os-release; then
|
|
||||||
OSVER=focal
|
|
||||||
UBVER=20.04
|
|
||||||
OS=ubuntu
|
|
||||||
is_ubuntu=true
|
|
||||||
is_deb=true
|
|
||||||
elif grep -q "UBUNTU_CODENAME=jammy" /etc/os-release; then
|
|
||||||
OSVER=jammy
|
|
||||||
UBVER=22.04
|
|
||||||
OS=ubuntu
|
|
||||||
is_ubuntu=true
|
|
||||||
is_deb=true
|
|
||||||
elif grep -q "VERSION_CODENAME=bookworm" /etc/os-release; then
|
|
||||||
OSVER=bookworm
|
|
||||||
DEBVER=12
|
|
||||||
is_debian=true
|
|
||||||
OS=debian
|
|
||||||
is_deb=true
|
|
||||||
fi
|
|
||||||
cron_service_name="cron"
|
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -538,7 +357,7 @@ set_minionid() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
set_palette() {
|
set_palette() {
|
||||||
if [[ $is_deb ]]; then
|
if [ "$OS" == ubuntu ]; then
|
||||||
update-alternatives --set newt-palette /etc/newt/palette.original
|
update-alternatives --set newt-palette /etc/newt/palette.original
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -562,33 +381,6 @@ set_version() {
|
|||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
status () {
|
|
||||||
printf "\n=========================================================================\n$(date) | $1\n=========================================================================\n"
|
|
||||||
}
|
|
||||||
|
|
||||||
sync_options() {
|
|
||||||
set_version
|
|
||||||
set_os
|
|
||||||
salt_minion_count
|
|
||||||
|
|
||||||
echo "$VERSION/$OS/$(uname -r)/$MINIONCOUNT/$(read_feat)"
|
|
||||||
}
|
|
||||||
|
|
||||||
systemctl_func() {
|
|
||||||
local action=$1
|
|
||||||
local echo_action=$1
|
|
||||||
local service_name=$2
|
|
||||||
|
|
||||||
if [[ "$echo_action" == "stop" ]]; then
|
|
||||||
echo_action="stopp"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "${echo_action^}ing $service_name service at $(date +"%T.%6N")"
|
|
||||||
systemctl $action $service_name && echo "Successfully ${echo_action}ed $service_name." || echo "Failed to $action $service_name."
|
|
||||||
echo ""
|
|
||||||
}
|
|
||||||
|
|
||||||
has_uppercase() {
|
has_uppercase() {
|
||||||
local string=$1
|
local string=$1
|
||||||
|
|
||||||
@@ -597,11 +389,6 @@ has_uppercase() {
|
|||||||
|| return 1
|
|| return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
update_elastic_agent() {
|
|
||||||
echo "Checking if Elastic Agent update is necessary..."
|
|
||||||
download_and_verify "$ELASTIC_AGENT_URL" "$ELASTIC_AGENT_MD5_URL" "$ELASTIC_AGENT_FILE" "$ELASTIC_AGENT_MD5" "$ELASTIC_AGENT_EXPANSION_DIR"
|
|
||||||
}
|
|
||||||
|
|
||||||
valid_cidr() {
|
valid_cidr() {
|
||||||
# Verify there is a backslash in the string
|
# Verify there is a backslash in the string
|
||||||
echo "$1" | grep -qP "^[^/]+/[^/]+$" || return 1
|
echo "$1" | grep -qP "^[^/]+/[^/]+$" || return 1
|
||||||
@@ -660,18 +447,6 @@ valid_hostname() {
|
|||||||
[[ $hostname =~ ^[a-zA-Z0-9\-]+$ ]] && [[ $hostname != 'localhost' ]] && return 0 || return 1
|
[[ $hostname =~ ^[a-zA-Z0-9\-]+$ ]] && [[ $hostname != 'localhost' ]] && return 0 || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
verify_ip4() {
|
|
||||||
local ip=$1
|
|
||||||
# Is this an IP or CIDR?
|
|
||||||
if grep -qP "^[^/]+/[^/]+$" <<< $ip; then
|
|
||||||
# Looks like a CIDR
|
|
||||||
valid_ip4_cidr_mask "$ip"
|
|
||||||
else
|
|
||||||
# We know this is not a CIDR - Is it an IP?
|
|
||||||
valid_ip4 "$ip"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
valid_ip4() {
|
valid_ip4() {
|
||||||
local ip=$1
|
local ip=$1
|
||||||
|
|
||||||
@@ -755,23 +530,6 @@ valid_username() {
|
|||||||
echo "$user" | grep -qP '^[a-z_]([a-z0-9_-]{0,31}|[a-z0-9_-]{0,30}\$)$' && return 0 || return 1
|
echo "$user" | grep -qP '^[a-z_]([a-z0-9_-]{0,31}|[a-z0-9_-]{0,30}\$)$' && return 0 || return 1
|
||||||
}
|
}
|
||||||
|
|
||||||
verify_md5_checksum() {
|
|
||||||
data_file=$1
|
|
||||||
md5_file=${2:-${data_file}.md5}
|
|
||||||
|
|
||||||
if [[ ! -f "$dest_file" || ! -f "$md5_file" ]]; then
|
|
||||||
return 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
SOURCEHASH=$(md5sum "$data_file" | awk '{ print $1 }')
|
|
||||||
HASH=$(cat "$md5_file")
|
|
||||||
|
|
||||||
if [[ "$HASH" == "$SOURCEHASH" ]]; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
wait_for_web_response() {
|
wait_for_web_response() {
|
||||||
url=$1
|
url=$1
|
||||||
expected=$2
|
expected=$2
|
||||||
|
|||||||
@@ -1,103 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
|
||||||
# Elastic License 2.0.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
|
|
||||||
sys.path.append('/opt/saltstack/salt/lib/python3.10/site-packages/')
|
|
||||||
import salt.config
|
|
||||||
import salt.loader
|
|
||||||
|
|
||||||
__opts__ = salt.config.minion_config('/etc/salt/minion')
|
|
||||||
__grains__ = salt.loader.grains(__opts__)
|
|
||||||
|
|
||||||
def check_needs_restarted():
|
|
||||||
osfam = __grains__['os_family']
|
|
||||||
val = '0'
|
|
||||||
outfile = "/opt/so/log/sostatus/needs_restarted"
|
|
||||||
|
|
||||||
if osfam == 'Debian':
|
|
||||||
if os.path.exists('/var/run/reboot-required'):
|
|
||||||
val = '1'
|
|
||||||
elif osfam == 'RedHat':
|
|
||||||
cmd = 'needs-restarting -r > /dev/null 2>&1'
|
|
||||||
try:
|
|
||||||
needs_restarting = subprocess.check_call(cmd, shell=True)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
val = '1'
|
|
||||||
else:
|
|
||||||
fail("Unsupported OS")
|
|
||||||
|
|
||||||
with open(outfile, 'w') as f:
|
|
||||||
f.write(val)
|
|
||||||
|
|
||||||
def check_for_fps():
|
|
||||||
feat = 'fps'
|
|
||||||
feat_full = feat.replace('ps', 'ips')
|
|
||||||
fps = 0
|
|
||||||
try:
|
|
||||||
result = subprocess.run([feat_full + '-mode-setup', '--is-enabled'], stdout=subprocess.PIPE)
|
|
||||||
if result.returncode == 0:
|
|
||||||
fps = 1
|
|
||||||
except FileNotFoundError:
|
|
||||||
fn = '/proc/sys/crypto/' + feat_full + '_enabled'
|
|
||||||
try:
|
|
||||||
with open(fn, 'r') as f:
|
|
||||||
contents = f.read()
|
|
||||||
if '1' in contents:
|
|
||||||
fps = 1
|
|
||||||
except:
|
|
||||||
# Unknown, so assume 0
|
|
||||||
fps = 0
|
|
||||||
|
|
||||||
with open('/opt/so/log/sostatus/fps_enabled', 'w') as f:
|
|
||||||
f.write(str(fps))
|
|
||||||
|
|
||||||
def check_for_lks():
|
|
||||||
feat = 'Lks'
|
|
||||||
feat_full = feat.replace('ks', 'uks')
|
|
||||||
lks = 0
|
|
||||||
result = subprocess.run(['lsblk', '-p', '-J'], check=True, stdout=subprocess.PIPE)
|
|
||||||
data = json.loads(result.stdout)
|
|
||||||
for device in data['blockdevices']:
|
|
||||||
if 'children' in device:
|
|
||||||
for gc in device['children']:
|
|
||||||
if 'children' in gc:
|
|
||||||
try:
|
|
||||||
arg = 'is' + feat_full
|
|
||||||
result = subprocess.run(['cryptsetup', arg, gc['name']], stdout=subprocess.PIPE)
|
|
||||||
if result.returncode == 0:
|
|
||||||
lks = 1
|
|
||||||
except FileNotFoundError:
|
|
||||||
for ggc in gc['children']:
|
|
||||||
if 'crypt' in ggc['type']:
|
|
||||||
lks = 1
|
|
||||||
if lks:
|
|
||||||
break
|
|
||||||
with open('/opt/so/log/sostatus/lks_enabled', 'w') as f:
|
|
||||||
f.write(str(lks))
|
|
||||||
|
|
||||||
def fail(msg):
|
|
||||||
print(msg, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def main():
|
|
||||||
proc = subprocess.run(['id', '-u'], stdout=subprocess.PIPE, encoding="utf-8")
|
|
||||||
if proc.stdout.strip() != "0":
|
|
||||||
fail("This program must be run as root")
|
|
||||||
# Ensure that umask is 0022 so that files created by this script have rw-r-r permissions
|
|
||||||
org_umask = os.umask(0o022)
|
|
||||||
check_needs_restarted()
|
|
||||||
check_for_fps()
|
|
||||||
check_for_lks()
|
|
||||||
# Restore umask to whatever value was set before this script was run. SXIG sets to 0077 rw---
|
|
||||||
os.umask(org_umask)
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
48
salt/common/tools/sbin/so-config-backup
Executable file
@@ -0,0 +1,48 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.. /usr/sbin/so-common
|
||||||
|
{% set BACKUPLOCATIONS = salt['pillar.get']('backup:locations', {}) %}
|
||||||
|
|
||||||
|
TODAY=$(date '+%Y_%m_%d')
|
||||||
|
BACKUPFILE="/nsm/backup/so-config-backup-$TODAY.tar"
|
||||||
|
MAXBACKUPS=7
|
||||||
|
|
||||||
|
# Create backup dir if it does not exist
|
||||||
|
mkdir -p /nsm/backup
|
||||||
|
|
||||||
|
# If we haven't already written a backup file for today, let's do so
|
||||||
|
if [ ! -f $BACKUPFILE ]; then
|
||||||
|
|
||||||
|
# Create empty backup file
|
||||||
|
tar -cf $BACKUPFILE -T /dev/null
|
||||||
|
|
||||||
|
# Loop through all paths defined in global.sls, and append them to backup file
|
||||||
|
{%- for LOCATION in BACKUPLOCATIONS %}
|
||||||
|
tar -rf $BACKUPFILE {{ LOCATION }}
|
||||||
|
{%- endfor %}
|
||||||
|
tar -rf $BACKUPFILE /etc/pki
|
||||||
|
tar -rf $BACKUPFILE /etc/salt
|
||||||
|
tar -rf $BACKUPFILE /opt/so/conf/kratos
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Find oldest backup files and remove them
|
||||||
|
NUMBACKUPS=$(find /nsm/backup/ -type f -name "so-config-backup*" | wc -l)
|
||||||
|
while [ "$NUMBACKUPS" -gt "$MAXBACKUPS" ]; do
|
||||||
|
OLDESTBACKUP=$(find /nsm/backup/ -type f -name "so-config-backup*" -type f -printf '%T+ %p\n' | sort | head -n 1 | awk -F" " '{print $2}')
|
||||||
|
rm -f $OLDESTBACKUP
|
||||||
|
NUMBACKUPS=$(find /nsm/backup/ -type f -name "so-config-backup*" | wc -l)
|
||||||
|
done
|
||||||
21
salt/common/tools/sbin/so-cortex-restart
Executable file
@@ -0,0 +1,21 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-stop cortex $1
|
||||||
|
/usr/sbin/so-start thehive $1
|
||||||
20
salt/common/tools/sbin/so-cortex-start
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-start thehive $1
|
||||||
20
salt/common/tools/sbin/so-cortex-stop
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-stop cortex $1
|
||||||
54
salt/common/tools/sbin/so-cortex-user-add
Executable file
@@ -0,0 +1,54 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <new-user-name>"
|
||||||
|
echo ""
|
||||||
|
echo "Adds a new user to Cortex. The new password will be read from STDIN."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ $# -ne 1 ]; then
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
USER=$1
|
||||||
|
|
||||||
|
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||||
|
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||||
|
CORTEX_ORG_NAME=$(lookup_pillar cortexorgname)
|
||||||
|
CORTEX_USER=$USER
|
||||||
|
|
||||||
|
# Read password for new user from stdin
|
||||||
|
test -t 0
|
||||||
|
if [[ $? == 0 ]]; then
|
||||||
|
echo "Enter new password:"
|
||||||
|
fi
|
||||||
|
read -rs CORTEX_PASS
|
||||||
|
|
||||||
|
# Create new user in Cortex
|
||||||
|
resp=$(curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user" -d "{\"name\": \"$CORTEX_USER\",\"roles\": [\"read\",\"analyze\",\"orgadmin\"],\"organization\": \"$CORTEX_ORG_NAME\",\"login\": \"$CORTEX_USER\",\"password\" : \"$CORTEX_PASS\" }")
|
||||||
|
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||||
|
echo "Successfully added user to Cortex."
|
||||||
|
else
|
||||||
|
echo "Unable to add user to Cortex; user might already exist."
|
||||||
|
echo $resp
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
57
salt/common/tools/sbin/so-cortex-user-enable
Executable file
@@ -0,0 +1,57 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
echo "Usage: $0 <user-name> <true|false>"
|
||||||
|
echo ""
|
||||||
|
echo "Enables or disables a user in Cortex."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ $# -ne 2 ]; then
|
||||||
|
usage
|
||||||
|
fi
|
||||||
|
|
||||||
|
USER=$1
|
||||||
|
|
||||||
|
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||||
|
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||||
|
CORTEX_USER=$USER
|
||||||
|
|
||||||
|
case "${2^^}" in
|
||||||
|
FALSE | NO | 0)
|
||||||
|
CORTEX_STATUS=Locked
|
||||||
|
;;
|
||||||
|
TRUE | YES | 1)
|
||||||
|
CORTEX_STATUS=Ok
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user/${CORTEX_USER}" -d "{\"status\":\"${CORTEX_STATUS}\" }")
|
||||||
|
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||||
|
echo "Successfully updated user in Cortex."
|
||||||
|
else
|
||||||
|
echo "Failed to update user in Cortex."
|
||||||
|
echo $resp
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
20
salt/common/tools/sbin/so-curator-restart
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-restart curator $1
|
||||||
20
salt/common/tools/sbin/so-curator-start
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-start curator $1
|
||||||
20
salt/common/tools/sbin/so-curator-stop
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
|
||||||
|
/usr/sbin/so-stop curator $1
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
#
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# Elastic License 2.0.
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import textwrap
|
import textwrap
|
||||||
@@ -19,12 +27,17 @@ from xml.dom import minidom
|
|||||||
|
|
||||||
|
|
||||||
LOCAL_SALT_DIR='/opt/so/saltstack/local'
|
LOCAL_SALT_DIR='/opt/so/saltstack/local'
|
||||||
|
WAZUH_CONF='/nsm/wazuh/etc/ossec.conf'
|
||||||
VALID_ROLES = {
|
VALID_ROLES = {
|
||||||
'a': { 'role': 'analyst','desc': 'Analyst - 80/tcp, 443/tcp' },
|
'a': { 'role': 'analyst','desc': 'Analyst - 80/tcp, 443/tcp' },
|
||||||
'b': { 'role': 'beats_endpoint', 'desc': 'Logstash Beat - 5044/tcp' },
|
'b': { 'role': 'beats_endpoint', 'desc': 'Logstash Beat - 5044/tcp' },
|
||||||
'e': { 'role': 'elasticsearch_rest', 'desc': 'Elasticsearch REST API - 9200/tcp' },
|
'e': { 'role': 'elasticsearch_rest', 'desc': 'Elasticsearch REST API - 9200/tcp' },
|
||||||
'f': { 'role': 'strelka_frontend', 'desc': 'Strelka frontend - 57314/tcp' },
|
'f': { 'role': 'strelka_frontend', 'desc': 'Strelka frontend - 57314/tcp' },
|
||||||
|
'o': { 'role': 'osquery_endpoint', 'desc': 'Osquery endpoint - 8090/tcp' },
|
||||||
's': { 'role': 'syslog', 'desc': 'Syslog device - 514/tcp/udp' },
|
's': { 'role': 'syslog', 'desc': 'Syslog device - 514/tcp/udp' },
|
||||||
|
'w': { 'role': 'wazuh_agent', 'desc': 'Wazuh agent - 1514/tcp/udp' },
|
||||||
|
'p': { 'role': 'wazuh_api', 'desc': 'Wazuh API - 55000/tcp' },
|
||||||
|
'r': { 'role': 'wazuh_authd', 'desc': 'Wazuh registration service - 1515/tcp' }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -63,15 +76,73 @@ def ip_prompt() -> str:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def wazuh_enabled() -> bool:
|
||||||
|
for file in os.listdir(f'{LOCAL_SALT_DIR}/pillar'):
|
||||||
|
with open(file, 'r') as pillar:
|
||||||
|
if 'wazuh: 1' in pillar.read():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def root_to_str(root: ET.ElementTree) -> str:
|
||||||
|
xml_str = ET.tostring(root, encoding='unicode', method='xml').replace('\n', '')
|
||||||
|
xml_str = re.sub(r'(?:(?<=>) *)', '', xml_str)
|
||||||
|
|
||||||
|
# Remove specific substrings to better format comments on intial parse/write
|
||||||
|
xml_str = re.sub(r' -', '', xml_str)
|
||||||
|
xml_str = re.sub(r' -->', ' -->', xml_str)
|
||||||
|
|
||||||
|
dom = minidom.parseString(xml_str)
|
||||||
|
return dom.toprettyxml(indent=" ")
|
||||||
|
|
||||||
|
|
||||||
|
def rem_wl(ip):
|
||||||
|
parser = ET.XMLParser(remove_blank_text=True)
|
||||||
|
with open(WAZUH_CONF, 'rb') as wazuh_conf:
|
||||||
|
tree = ET.parse(wazuh_conf, parser)
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
global_elems = root.findall(f"global/white_list[. = '{ip}']/..")
|
||||||
|
if len(global_elems) > 0:
|
||||||
|
for g_elem in global_elems:
|
||||||
|
ge_index = list(root).index(g_elem)
|
||||||
|
if ge_index > 0 and root[list(root).index(g_elem) - 1].tag == ET.Comment:
|
||||||
|
root.remove(root[ge_index - 1])
|
||||||
|
root.remove(g_elem)
|
||||||
|
|
||||||
|
with open(WAZUH_CONF, 'w') as out:
|
||||||
|
out.write(root_to_str(root))
|
||||||
|
|
||||||
|
|
||||||
def apply(role: str, ip: str) -> int:
|
def apply(role: str, ip: str) -> int:
|
||||||
firewall_cmd = ['so-firewall', 'excludehost', role, ip]
|
firewall_cmd = ['so-firewall', 'excludehost', role, ip]
|
||||||
salt_cmd = ['salt-call', 'state.apply', '-l', 'quiet', 'firewall', 'queue=True']
|
salt_cmd = ['salt-call', 'state.apply', '-l', 'quiet', 'firewall', 'queue=True']
|
||||||
|
restart_wazuh_cmd = ['so-wazuh-restart']
|
||||||
print(f'Removing {ip} from the {role} role. This can take a few seconds...')
|
print(f'Removing {ip} from the {role} role. This can take a few seconds...')
|
||||||
cmd = subprocess.run(firewall_cmd)
|
cmd = subprocess.run(firewall_cmd)
|
||||||
if cmd.returncode == 0:
|
if cmd.returncode == 0:
|
||||||
cmd = subprocess.run(salt_cmd, stdout=subprocess.DEVNULL)
|
cmd = subprocess.run(salt_cmd, stdout=subprocess.DEVNULL)
|
||||||
else:
|
else:
|
||||||
return cmd.returncode
|
return cmd.returncode
|
||||||
|
if cmd.returncode == 0:
|
||||||
|
if wazuh_enabled and role=='analyst':
|
||||||
|
try:
|
||||||
|
rem_wl(ip)
|
||||||
|
print(f'Removed whitelist entry for {ip} from {WAZUH_CONF}', file=sys.stderr)
|
||||||
|
except Exception as e:
|
||||||
|
print(f'Failed to remove whitelist entry for {ip} from {WAZUH_CONF}', file=sys.stderr)
|
||||||
|
print(e)
|
||||||
|
return 1
|
||||||
|
print('Restarting OSSEC Server...')
|
||||||
|
cmd = subprocess.run(restart_wazuh_cmd)
|
||||||
|
else:
|
||||||
|
return cmd.returncode
|
||||||
|
else:
|
||||||
|
print(f'Commmand \'{" ".join(salt_cmd)}\' failed.', file=sys.stderr)
|
||||||
|
return cmd.returncode
|
||||||
|
if cmd.returncode != 0:
|
||||||
|
print('Failed to restart OSSEC server.')
|
||||||
|
return cmd.returncode
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
@@ -92,7 +163,11 @@ def main():
|
|||||||
group.add_argument('-b', dest='roles', action='append_const', const=VALID_ROLES['b']['role'], help="Logstash Beat - 5044/tcp")
|
group.add_argument('-b', dest='roles', action='append_const', const=VALID_ROLES['b']['role'], help="Logstash Beat - 5044/tcp")
|
||||||
group.add_argument('-e', dest='roles', action='append_const', const=VALID_ROLES['e']['role'], help="Elasticsearch REST API - 9200/tcp")
|
group.add_argument('-e', dest='roles', action='append_const', const=VALID_ROLES['e']['role'], help="Elasticsearch REST API - 9200/tcp")
|
||||||
group.add_argument('-f', dest='roles', action='append_const', const=VALID_ROLES['f']['role'], help="Strelka frontend - 57314/tcp")
|
group.add_argument('-f', dest='roles', action='append_const', const=VALID_ROLES['f']['role'], help="Strelka frontend - 57314/tcp")
|
||||||
|
group.add_argument('-o', dest='roles', action='append_const', const=VALID_ROLES['o']['role'], help="Osquery endpoint - 8090/tcp")
|
||||||
group.add_argument('-s', dest='roles', action='append_const', const=VALID_ROLES['s']['role'], help="Syslog device - 514/tcp/udp")
|
group.add_argument('-s', dest='roles', action='append_const', const=VALID_ROLES['s']['role'], help="Syslog device - 514/tcp/udp")
|
||||||
|
group.add_argument('-w', dest='roles', action='append_const', const=VALID_ROLES['w']['role'], help="Wazuh agent - 1514/tcp/udp")
|
||||||
|
group.add_argument('-p', dest='roles', action='append_const', const=VALID_ROLES['p']['role'], help="Wazuh API - 55000/tcp")
|
||||||
|
group.add_argument('-r', dest='roles', action='append_const', const=VALID_ROLES['r']['role'], help="Wazuh registration service - 1515/tcp")
|
||||||
|
|
||||||
ip_g = main_parser.add_argument_group(title='allow')
|
ip_g = main_parser.add_argument_group(title='allow')
|
||||||
ip_g.add_argument('-i', help="IP or CIDR block to disallow connections from, requires at least one role argument", metavar='', dest='ip')
|
ip_g.add_argument('-i', help="IP or CIDR block to disallow connections from, requires at least one role argument", metavar='', dest='ip')
|
||||||
@@ -1,11 +1,19 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at
|
#
|
||||||
# https://securityonion.net/license; you may not use this file except in compliance with the
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# Elastic License 2.0.
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
import sys, argparse, re, docker
|
import sys, argparse, re, docker
|
||||||
from packaging.version import Version, InvalidVersion
|
from packaging.version import Version, InvalidVersion
|
||||||
|
|||||||
22
salt/common/tools/sbin/so-docker-refresh
Executable file
@@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Copyright 2014,2015,2016,2017,2018,2019,2020,2021 Security Onion Solutions, LLC
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
. /usr/sbin/so-common
|
||||||
|
. /usr/sbin/so-image-common
|
||||||
|
|
||||||
|
require_manager
|
||||||
|
update_docker_containers "refresh"
|
||||||