mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-16 14:02:52 +01:00
Compare commits
23 Commits
TOoSmOotH-
...
fixsource
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3910e83436 | ||
|
|
26b329a9bd | ||
|
|
506cbc62bb | ||
|
|
285b0e4af9 | ||
|
|
f9edfd6391 | ||
|
|
f6301bc3e5 | ||
|
|
6c5c176b7d | ||
|
|
c6d52b5eb1 | ||
|
|
7cac528389 | ||
|
|
6fe817ca4a | ||
|
|
cb9a6fac25 | ||
|
|
a945768251 | ||
|
|
c6646e3821 | ||
|
|
99dc72cece | ||
|
|
04d6cca204 | ||
|
|
5ab6bda639 | ||
|
|
f433de7e12 | ||
|
|
8ef6c2f91d | ||
|
|
7575218697 | ||
|
|
94694d394e | ||
|
|
03dd746601 | ||
|
|
eec3373ae7 | ||
|
|
db45ce07ed |
@@ -299,6 +299,19 @@ elasticsearch:
|
||||
hot:
|
||||
actions: {}
|
||||
min_age: 0ms
|
||||
sos-backup:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
composed_of: []
|
||||
ignore_missing_component_templates: []
|
||||
index_patterns:
|
||||
- sos-backup-*
|
||||
priority: 501
|
||||
template:
|
||||
settings:
|
||||
index:
|
||||
number_of_replicas: 0
|
||||
number_of_shards: 1
|
||||
so-assistant-chat:
|
||||
index_sorting: false
|
||||
index_template:
|
||||
|
||||
@@ -126,16 +126,130 @@ ADVPILLARFILE=/opt/so/saltstack/local/pillar/minions/adv_$MINION_ID.sls
|
||||
|
||||
function getinstallinfo() {
|
||||
log "INFO" "Getting install info for minion $MINION_ID"
|
||||
# Pull from file
|
||||
INSTALLVARS=$(sudo salt "$MINION_ID" cp.get_file_str /opt/so/install.txt --out=newline_values_only)
|
||||
if [ $? -ne 0 ]; then
|
||||
|
||||
local install_json
|
||||
local install_text
|
||||
|
||||
# Reset any prior values so we fail closed if the file is missing keys
|
||||
MAINIP=""
|
||||
MNIC=""
|
||||
NODE_DESCRIPTION=""
|
||||
ES_HEAP_SIZE=""
|
||||
PATCHSCHEDULENAME=""
|
||||
INTERFACE=""
|
||||
NODETYPE=""
|
||||
CORECOUNT=""
|
||||
LSHOSTNAME=""
|
||||
LSHEAP=""
|
||||
CPUCORES=""
|
||||
IDH_MGTRESTRICT=""
|
||||
IDH_SERVICES=""
|
||||
|
||||
# Pull from file (treat it as data, not code)
|
||||
install_json=$(sudo salt "$MINION_ID" cp.get_file_str /opt/so/install.txt --out=json 2>/dev/null)
|
||||
if [ $? -ne 0 ] || [ -z "$install_json" ]; then
|
||||
log "ERROR" "Failed to get install info from $MINION_ID"
|
||||
return 1
|
||||
fi
|
||||
|
||||
source <(echo $INSTALLVARS)
|
||||
if [ $? -ne 0 ]; then
|
||||
log "ERROR" "Failed to source install variables"
|
||||
|
||||
install_text=$(jq -r --arg id "$MINION_ID" '.[$id] // empty' <<<"$install_json" 2>/dev/null)
|
||||
if [ $? -ne 0 ] || [ -z "$install_text" ] || [ "$install_text" == "null" ]; then
|
||||
log "ERROR" "Failed to parse install info response for $MINION_ID"
|
||||
return 1
|
||||
fi
|
||||
|
||||
while IFS= read -r line; do
|
||||
# Trim trailing CR (in case of CRLF files)
|
||||
line=${line%$'\r'}
|
||||
|
||||
# Skip empty/comment lines
|
||||
[[ -z "$line" || "$line" =~ ^[[:space:]]*# ]] && continue
|
||||
|
||||
if [[ "$line" =~ ^[[:space:]]*([A-Z0-9_]+)[[:space:]]*=(.*)$ ]]; then
|
||||
local key="${BASH_REMATCH[1]}"
|
||||
local value="${BASH_REMATCH[2]}"
|
||||
|
||||
# Trim leading whitespace from value (writers shouldn't include it, but tolerate it)
|
||||
value="${value#"${value%%[![:space:]]*}"}"
|
||||
|
||||
# Strip a single layer of surrounding quotes
|
||||
if [[ "$value" =~ ^\".*\"$ ]]; then
|
||||
value="${value:1:${#value}-2}"
|
||||
elif [[ "$value" =~ ^\'.*\'$ ]]; then
|
||||
value="${value:1:${#value}-2}"
|
||||
fi
|
||||
|
||||
case "$key" in
|
||||
MAINIP)
|
||||
[[ "$value" =~ ^[A-Za-z0-9.:-]+$ ]] || { log "ERROR" "Invalid MAINIP in install info"; return 1; }
|
||||
MAINIP="$value"
|
||||
;;
|
||||
MNIC)
|
||||
[[ "$value" =~ ^[A-Za-z0-9_.:-]+$ ]] || { log "ERROR" "Invalid MNIC in install info"; return 1; }
|
||||
MNIC="$value"
|
||||
;;
|
||||
NODE_DESCRIPTION)
|
||||
# Allow spaces and common punctuation, but reject control chars
|
||||
[[ "$value" =~ ^[[:print:]]{0,256}$ ]] || { log "ERROR" "Invalid NODE_DESCRIPTION in install info"; return 1; }
|
||||
NODE_DESCRIPTION="$value"
|
||||
;;
|
||||
ES_HEAP_SIZE)
|
||||
[[ "$value" =~ ^[0-9]+[kKmMgGtTpPeE]?$ ]] || { log "ERROR" "Invalid ES_HEAP_SIZE in install info"; return 1; }
|
||||
ES_HEAP_SIZE="$value"
|
||||
;;
|
||||
PATCHSCHEDULENAME)
|
||||
[[ "$value" =~ ^[A-Za-z0-9._-]*$ ]] || { log "ERROR" "Invalid PATCHSCHEDULENAME in install info"; return 1; }
|
||||
PATCHSCHEDULENAME="$value"
|
||||
;;
|
||||
INTERFACE)
|
||||
[[ "$value" =~ ^[A-Za-z0-9._:,-]+$ ]] || { log "ERROR" "Invalid INTERFACE in install info"; return 1; }
|
||||
INTERFACE="$value"
|
||||
;;
|
||||
NODETYPE)
|
||||
[[ "$value" =~ ^[A-Z0-9_]+$ ]] || { log "ERROR" "Invalid NODETYPE in install info"; return 1; }
|
||||
if ! declare -F "create${value}" >/dev/null; then
|
||||
log "ERROR" "Unknown NODETYPE '$value' in install info"
|
||||
return 1
|
||||
fi
|
||||
NODETYPE="$value"
|
||||
;;
|
||||
CORECOUNT)
|
||||
[[ "$value" =~ ^[0-9]+$ ]] || { log "ERROR" "Invalid CORECOUNT in install info"; return 1; }
|
||||
CORECOUNT="$value"
|
||||
;;
|
||||
LSHOSTNAME)
|
||||
[[ "$value" =~ ^[A-Za-z0-9.-]+$ ]] || { log "ERROR" "Invalid LSHOSTNAME in install info"; return 1; }
|
||||
LSHOSTNAME="$value"
|
||||
;;
|
||||
LSHEAP)
|
||||
[[ "$value" =~ ^[0-9]+[kKmMgGtTpPeE]?$ ]] || { log "ERROR" "Invalid LSHEAP in install info"; return 1; }
|
||||
LSHEAP="$value"
|
||||
;;
|
||||
CPUCORES)
|
||||
[[ "$value" =~ ^[0-9]+$ ]] || { log "ERROR" "Invalid CPUCORES in install info"; return 1; }
|
||||
CPUCORES="$value"
|
||||
;;
|
||||
IDH_MGTRESTRICT)
|
||||
[[ "$value" == "True" || "$value" == "False" ]] || { log "ERROR" "Invalid IDH_MGTRESTRICT in install info"; return 1; }
|
||||
IDH_MGTRESTRICT="$value"
|
||||
;;
|
||||
IDH_SERVICES)
|
||||
[[ "$value" =~ ^[[:print:]]{0,512}$ ]] || { log "ERROR" "Invalid IDH_SERVICES in install info"; return 1; }
|
||||
IDH_SERVICES="$value"
|
||||
;;
|
||||
*)
|
||||
# Ignore unknown keys for forward compatibility
|
||||
:
|
||||
;;
|
||||
esac
|
||||
else
|
||||
log "ERROR" "Invalid install info line from $MINION_ID"
|
||||
return 1
|
||||
fi
|
||||
done <<<"$install_text"
|
||||
|
||||
if [[ -z "$NODETYPE" || -z "$MAINIP" || -z "$MNIC" ]]; then
|
||||
log "ERROR" "Missing required install variables from $MINION_ID"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
@@ -996,7 +1110,7 @@ function setupMinionFiles() {
|
||||
log "INFO" "Setting up minion files for $MINION_ID"
|
||||
|
||||
# Check to see if nodetype is set
|
||||
if [ -z $NODETYPE ]; then
|
||||
if [ -z "$NODETYPE" ]; then
|
||||
error_msg="No node type specified"
|
||||
log "ERROR" "$error_msg"
|
||||
echo "$error_msg"
|
||||
@@ -1018,7 +1132,12 @@ function setupMinionFiles() {
|
||||
fi
|
||||
|
||||
# Create node-specific configuration
|
||||
create$NODETYPE || return 1
|
||||
local create_func="create${NODETYPE}"
|
||||
if ! declare -F "$create_func" >/dev/null; then
|
||||
log "ERROR" "Unknown node type '$NODETYPE'"
|
||||
return 1
|
||||
fi
|
||||
"$create_func" || return 1
|
||||
|
||||
# Ensure proper ownership after all content is written
|
||||
ensure_socore_ownership || return 1
|
||||
|
||||
@@ -1113,7 +1113,7 @@ suricata_idstools_removal_pre() {
|
||||
install -d -o 939 -g 939 -m 755 /opt/so/conf/soc/fingerprints
|
||||
install -o 939 -g 939 -m 644 /dev/null /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
|
||||
cat > /opt/so/conf/soc/fingerprints/suricataengine.syncBlock << EOF
|
||||
Suricata ruleset sync is blocked until this file is removed. Make sure that you have manually added any custom Suricata rulesets via SOC config - review the documentation for more details: securityonion.net/docs
|
||||
Suricata ruleset sync is blocked until this file is removed. **CRITICAL** Make sure that you have manually added any custom Suricata rulesets via SOC config before removing this file - review the documentation for more details: https://docs.securityonion.net/en/2.4/nids.html#sync-block
|
||||
EOF
|
||||
|
||||
# Remove possible symlink & create salt local rules dir
|
||||
@@ -1125,39 +1125,35 @@ mkdir -p /nsm/backup/detections-migration/2-4-200
|
||||
cp /usr/sbin/so-rule-update /nsm/backup/detections-migration/2-4-200
|
||||
cp /opt/so/conf/idstools/etc/rulecat.conf /nsm/backup/detections-migration/2-4-200
|
||||
|
||||
if [[ -f /opt/so/conf/soc/so-detections-backup.py ]]; then
|
||||
python3 /opt/so/conf/soc/so-detections-backup.py
|
||||
# Backup so-detection index via reindex
|
||||
echo "Creating sos-backup index template..."
|
||||
template_result=$(/sbin/so-elasticsearch-query '_index_template/sos-backup' -X PUT \
|
||||
--retry 5 --retry-delay 15 --retry-all-errors \
|
||||
-d '{"index_patterns":["sos-backup-*"],"priority":501,"template":{"settings":{"index":{"number_of_replicas":0,"number_of_shards":1}}}}')
|
||||
|
||||
# Verify backup by comparing counts
|
||||
echo "Verifying detection overrides backup..."
|
||||
es_override_count=$(/sbin/so-elasticsearch-query 'so-detection/_count' \
|
||||
-d '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}}}' | jq -r '.count') || {
|
||||
echo " Error: Failed to query Elasticsearch for override count"
|
||||
exit 1
|
||||
}
|
||||
if [[ -z "$template_result" ]] || ! echo "$template_result" | jq -e '.acknowledged == true' > /dev/null 2>&1; then
|
||||
echo "Error: Failed to create sos-backup index template"
|
||||
echo "$template_result"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! "$es_override_count" =~ ^[0-9]+$ ]]; then
|
||||
echo " Error: Invalid override count from Elasticsearch: '$es_override_count'"
|
||||
exit 1
|
||||
fi
|
||||
BACKUP_INDEX="sos-backup-detection-$(date +%Y%m%d-%H%M%S)"
|
||||
echo "Backing up so-detection index to $BACKUP_INDEX..."
|
||||
reindex_result=$(/sbin/so-elasticsearch-query '_reindex?wait_for_completion=true' \
|
||||
--retry 5 --retry-delay 15 --retry-all-errors \
|
||||
-X POST -d "{\"source\": {\"index\": \"so-detection\"}, \"dest\": {\"index\": \"$BACKUP_INDEX\"}}")
|
||||
|
||||
backup_override_count=$(find /nsm/backup/detections/repo/*/overrides -type f 2>/dev/null | wc -l)
|
||||
|
||||
echo " Elasticsearch overrides: $es_override_count"
|
||||
echo " Backed up overrides: $backup_override_count"
|
||||
|
||||
if [[ "$es_override_count" -gt 0 ]]; then
|
||||
if [[ "$backup_override_count" -gt 0 ]]; then
|
||||
echo " Override backup verified successfully"
|
||||
else
|
||||
echo " Error: Elasticsearch has $es_override_count overrides but backup has 0 files"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
echo " No overrides to backup"
|
||||
fi
|
||||
if [[ -z "$reindex_result" ]]; then
|
||||
echo "Error: Backup of detections failed - no response from Elasticsearch"
|
||||
exit 1
|
||||
elif echo "$reindex_result" | jq -e '.created >= 0' > /dev/null 2>&1; then
|
||||
echo "Backup complete: $(echo "$reindex_result" | jq -r '.created') documents copied"
|
||||
elif echo "$reindex_result" | grep -q "index_not_found_exception"; then
|
||||
echo "so-detection index does not exist, skipping backup"
|
||||
else
|
||||
echo "SOC Detections backup script not found, skipping detection backup"
|
||||
echo "Error: Backup of detections failed"
|
||||
echo "$reindex_result"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
}
|
||||
@@ -1178,11 +1174,12 @@ hash_normalized_file() {
|
||||
return 1
|
||||
fi
|
||||
|
||||
sed -E \
|
||||
# Ensure trailing newline for consistent hashing regardless of source file
|
||||
{ sed -E \
|
||||
-e 's/^[[:space:]]+//; s/[[:space:]]+$//' \
|
||||
-e '/^$/d' \
|
||||
-e 's|--url=http://[^:]+:7788|--url=http://MANAGER:7788|' \
|
||||
"$file" | sha256sum | awk '{print $1}'
|
||||
"$file"; echo; } | sed '/^$/d' | sha256sum | awk '{print $1}'
|
||||
}
|
||||
|
||||
# Known-default hashes for so-rule-update (ETOPEN ruleset)
|
||||
@@ -1278,6 +1275,13 @@ custom_found=0
|
||||
check_config_file "$SO_RULE_UPDATE" "KNOWN_SO_RULE_UPDATE_HASHES" || custom_found=1
|
||||
check_config_file "$RULECAT_CONF" "KNOWN_RULECAT_CONF_HASHES" || custom_found=1
|
||||
|
||||
# Check for ETPRO rules on airgap systems
|
||||
if [[ $is_airgap -eq 0 ]] && grep -q 'ETPRO ' /nsm/rules/suricata/emerging-all.rules 2>/dev/null; then
|
||||
echo "ETPRO rules detected on airgap system - custom configuration"
|
||||
echo "ETPRO rules detected on Airgap in /nsm/rules/suricata/emerging-all.rules" >> /opt/so/conf/soc/fingerprints/suricataengine.syncBlock
|
||||
custom_found=1
|
||||
fi
|
||||
|
||||
# If no custom configs found, remove syncBlock
|
||||
if [[ $custom_found -eq 0 ]]; then
|
||||
echo "idstools migration completed successfully - removing Suricata engine syncBlock"
|
||||
@@ -1303,6 +1307,7 @@ fi
|
||||
echo "Removing idstools symlink and scripts..."
|
||||
rm -rf /usr/sbin/so-idstools*
|
||||
sed -i '/^#\?so-idstools$/d' /opt/so/conf/so-status/so-status.conf
|
||||
crontab -l | grep -v 'so-rule-update' | crontab -
|
||||
|
||||
# Backup the salt master config & manager pillar before editing it
|
||||
cp /opt/so/saltstack/local/pillar/minions/$MINIONID.sls /nsm/backup/detections-migration/2-4-200/
|
||||
@@ -1868,7 +1873,7 @@ main() {
|
||||
if [[ $is_airgap -eq 0 ]]; then
|
||||
echo ""
|
||||
echo "Cleaning repos on remote Security Onion nodes."
|
||||
salt -C 'not *_eval and not *_manager and not *_managersearch and not *_standalone and G@os:CentOS' cmd.run "yum clean all"
|
||||
salt -C 'not *_eval and not *_manager* and not *_standalone and G@os:OEL' cmd.run "dnf clean all"
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -2653,19 +2653,15 @@ soc:
|
||||
thresholdColorRatioMax: 1
|
||||
availableModels:
|
||||
- id: sonnet-4.5
|
||||
displayName: Claude Sonnet 4.5
|
||||
displayName: Claude Sonnet 4.5 ($$$)
|
||||
origin: USA
|
||||
contextLimitSmall: 200000
|
||||
contextLimitLarge: 1000000
|
||||
lowBalanceColorAlert: 500000
|
||||
enabled: true
|
||||
- id: gptoss-120b
|
||||
displayName: GPT-OSS 120B
|
||||
contextLimitSmall: 128000
|
||||
contextLimitLarge: 128000
|
||||
lowBalanceColorAlert: 500000
|
||||
enabled: true
|
||||
- id: qwen-235b
|
||||
displayName: QWEN 235B
|
||||
displayName: QWEN 235B ($)
|
||||
origin: China
|
||||
contextLimitSmall: 256000
|
||||
contextLimitLarge: 256000
|
||||
lowBalanceColorAlert: 500000
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
# This script queries Elasticsearch for Custom Detections and all Overrides,
|
||||
# and git commits them to disk at $OUTPUT_DIR
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
@@ -18,10 +19,10 @@ from datetime import datetime
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
# Constants
|
||||
ES_URL = "https://localhost:9200/so-detection/_search"
|
||||
DEFAULT_INDEX = "so-detection"
|
||||
DEFAULT_OUTPUT_DIR = "/nsm/backup/detections/repo"
|
||||
QUERY_DETECTIONS = '{"query": {"bool": {"must": [{"match_all": {}}, {"term": {"so_detection.ruleset": "__custom__"}}]}},"size": 10000}'
|
||||
QUERY_OVERRIDES = '{"query": {"bool": {"must": [{"exists": {"field": "so_detection.overrides"}}]}},"size": 10000}'
|
||||
OUTPUT_DIR = "/nsm/backup/detections/repo"
|
||||
AUTH_FILE = "/opt/so/conf/elasticsearch/curl.config"
|
||||
|
||||
def get_auth_credentials(auth_file):
|
||||
@@ -30,9 +31,10 @@ def get_auth_credentials(auth_file):
|
||||
if line.startswith('user ='):
|
||||
return line.split('=', 1)[1].strip().replace('"', '')
|
||||
|
||||
def query_elasticsearch(query, auth):
|
||||
def query_elasticsearch(query, auth, index):
|
||||
url = f"https://localhost:9200/{index}/_search"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.get(ES_URL, headers=headers, data=query, auth=auth, verify=False)
|
||||
response = requests.get(url, headers=headers, data=query, auth=auth, verify=False)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
@@ -47,12 +49,12 @@ def save_content(hit, base_folder, subfolder="", extension="txt"):
|
||||
f.write(content)
|
||||
return file_path
|
||||
|
||||
def save_overrides(hit):
|
||||
def save_overrides(hit, output_dir):
|
||||
so_detection = hit["_source"]["so_detection"]
|
||||
public_id = so_detection["publicId"]
|
||||
overrides = so_detection["overrides"]
|
||||
language = so_detection["language"]
|
||||
folder = os.path.join(OUTPUT_DIR, language, "overrides")
|
||||
folder = os.path.join(output_dir, language, "overrides")
|
||||
os.makedirs(folder, exist_ok=True)
|
||||
extension = "yaml" if language == "sigma" else "txt"
|
||||
file_path = os.path.join(folder, f"{public_id}.{extension}")
|
||||
@@ -60,20 +62,20 @@ def save_overrides(hit):
|
||||
f.write('\n'.join(json.dumps(override) for override in overrides) if isinstance(overrides, list) else overrides)
|
||||
return file_path
|
||||
|
||||
def ensure_git_repo():
|
||||
if not os.path.isdir(os.path.join(OUTPUT_DIR, '.git')):
|
||||
def ensure_git_repo(output_dir):
|
||||
if not os.path.isdir(os.path.join(output_dir, '.git')):
|
||||
subprocess.run(["git", "config", "--global", "init.defaultBranch", "main"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "init"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "remote", "add", "origin", "default"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "init"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "remote", "add", "origin", "default"], check=True)
|
||||
|
||||
def commit_changes():
|
||||
ensure_git_repo()
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.email", "securityonion@local.invalid"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "config", "user.name", "securityonion"], check=True)
|
||||
subprocess.run(["git", "-C", OUTPUT_DIR, "add", "."], check=True)
|
||||
status_result = subprocess.run(["git", "-C", OUTPUT_DIR, "status"], capture_output=True, text=True)
|
||||
def commit_changes(output_dir):
|
||||
ensure_git_repo(output_dir)
|
||||
subprocess.run(["git", "-C", output_dir, "config", "user.email", "securityonion@local.invalid"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "config", "user.name", "securityonion"], check=True)
|
||||
subprocess.run(["git", "-C", output_dir, "add", "."], check=True)
|
||||
status_result = subprocess.run(["git", "-C", output_dir, "status"], capture_output=True, text=True)
|
||||
print(status_result.stdout)
|
||||
commit_result = subprocess.run(["git", "-C", OUTPUT_DIR, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
|
||||
commit_result = subprocess.run(["git", "-C", output_dir, "commit", "-m", "Update detections and overrides"], check=False, capture_output=True)
|
||||
if commit_result.returncode == 1:
|
||||
print("No changes to commit.")
|
||||
elif commit_result.returncode == 0:
|
||||
@@ -81,29 +83,41 @@ def commit_changes():
|
||||
else:
|
||||
commit_result.check_returncode()
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(description="Backup custom detections and overrides from Elasticsearch")
|
||||
parser.add_argument("--output", "-o", default=DEFAULT_OUTPUT_DIR,
|
||||
help=f"Output directory for backups (default: {DEFAULT_OUTPUT_DIR})")
|
||||
parser.add_argument("--index", "-i", default=DEFAULT_INDEX,
|
||||
help=f"Elasticsearch index to query (default: {DEFAULT_INDEX})")
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = parse_args()
|
||||
output_dir = args.output
|
||||
index = args.index
|
||||
|
||||
try:
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"Backing up Custom Detections and all Overrides to {OUTPUT_DIR} - {timestamp}\n")
|
||||
|
||||
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
||||
print(f"Backing up Custom Detections and all Overrides to {output_dir} - {timestamp}\n")
|
||||
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
auth_credentials = get_auth_credentials(AUTH_FILE)
|
||||
username, password = auth_credentials.split(':', 1)
|
||||
auth = HTTPBasicAuth(username, password)
|
||||
|
||||
|
||||
# Query and save custom detections
|
||||
detections = query_elasticsearch(QUERY_DETECTIONS, auth)["hits"]["hits"]
|
||||
detections = query_elasticsearch(QUERY_DETECTIONS, auth, index)["hits"]["hits"]
|
||||
for hit in detections:
|
||||
save_content(hit, OUTPUT_DIR, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
|
||||
|
||||
save_content(hit, output_dir, hit["_source"]["so_detection"]["language"], "yaml" if hit["_source"]["so_detection"]["language"] == "sigma" else "txt")
|
||||
|
||||
# Query and save overrides
|
||||
overrides = query_elasticsearch(QUERY_OVERRIDES, auth)["hits"]["hits"]
|
||||
overrides = query_elasticsearch(QUERY_OVERRIDES, auth, index)["hits"]["hits"]
|
||||
for hit in overrides:
|
||||
save_overrides(hit)
|
||||
|
||||
commit_changes()
|
||||
|
||||
save_overrides(hit, output_dir)
|
||||
|
||||
commit_changes(output_dir)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
print(f"Backup Completed - {timestamp}")
|
||||
except Exception as e:
|
||||
|
||||
@@ -57,12 +57,12 @@ class TestBackupScript(unittest.TestCase):
|
||||
mock_response.json.return_value = {'hits': {'hits': []}}
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_get.return_value = mock_response
|
||||
|
||||
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth)
|
||||
|
||||
|
||||
response = ds.query_elasticsearch(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX)
|
||||
|
||||
self.assertEqual(response, {'hits': {'hits': []}})
|
||||
mock_get.assert_called_once_with(
|
||||
ds.ES_URL,
|
||||
f"https://localhost:9200/{ds.DEFAULT_INDEX}/_search",
|
||||
headers={"Content-Type": "application/json"},
|
||||
data=ds.QUERY_DETECTIONS,
|
||||
auth=self.auth,
|
||||
@@ -81,7 +81,7 @@ class TestBackupScript(unittest.TestCase):
|
||||
@patch('os.makedirs')
|
||||
@patch('builtins.open', new_callable=mock_open)
|
||||
def test_save_overrides(self, mock_file, mock_makedirs):
|
||||
file_path = ds.save_overrides(self.mock_override_hit)
|
||||
file_path = ds.save_overrides(self.mock_override_hit, self.output_dir)
|
||||
expected_path = f'{self.output_dir}/sigma/overrides/test_id.yaml'
|
||||
self.assertEqual(file_path, expected_path)
|
||||
mock_makedirs.assert_called_once_with(f'{self.output_dir}/sigma/overrides', exist_ok=True)
|
||||
@@ -90,9 +90,9 @@ class TestBackupScript(unittest.TestCase):
|
||||
@patch('subprocess.run')
|
||||
def test_ensure_git_repo(self, mock_run):
|
||||
mock_run.return_value = MagicMock(returncode=0)
|
||||
|
||||
ds.ensure_git_repo()
|
||||
|
||||
|
||||
ds.ensure_git_repo(self.output_dir)
|
||||
|
||||
mock_run.assert_has_calls([
|
||||
call(["git", "config", "--global", "init.defaultBranch", "main"], check=True),
|
||||
call(["git", "-C", self.output_dir, "init"], check=True),
|
||||
@@ -106,9 +106,9 @@ class TestBackupScript(unittest.TestCase):
|
||||
mock_commit_result = MagicMock(returncode=1)
|
||||
# Ensure sufficient number of MagicMock instances for each subprocess.run call
|
||||
mock_run.side_effect = [mock_status_result, mock_commit_result, MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0), MagicMock(returncode=0)]
|
||||
|
||||
|
||||
print("Running test_commit_changes...")
|
||||
ds.commit_changes()
|
||||
ds.commit_changes(self.output_dir)
|
||||
print("Finished test_commit_changes.")
|
||||
|
||||
mock_run.assert_has_calls([
|
||||
@@ -120,39 +120,45 @@ class TestBackupScript(unittest.TestCase):
|
||||
])
|
||||
|
||||
@patch('builtins.print')
|
||||
@patch('so-detections-backup.commit_changes')
|
||||
@patch('so-detections-backup.save_overrides')
|
||||
@patch('so-detections-backup.save_content')
|
||||
@patch('so-detections-backup.query_elasticsearch')
|
||||
@patch('so-detections-backup.get_auth_credentials')
|
||||
@patch.object(ds, 'commit_changes')
|
||||
@patch.object(ds, 'save_overrides')
|
||||
@patch.object(ds, 'save_content')
|
||||
@patch.object(ds, 'query_elasticsearch')
|
||||
@patch.object(ds, 'get_auth_credentials')
|
||||
@patch('os.makedirs')
|
||||
def test_main(self, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
|
||||
@patch.object(ds, 'parse_args')
|
||||
def test_main(self, mock_parse_args, mock_makedirs, mock_get_auth, mock_query, mock_save_content, mock_save_overrides, mock_commit, mock_print):
|
||||
mock_args = MagicMock()
|
||||
mock_args.output = self.output_dir
|
||||
mock_args.index = ds.DEFAULT_INDEX
|
||||
mock_parse_args.return_value = mock_args
|
||||
mock_get_auth.return_value = self.auth_credentials
|
||||
mock_query.side_effect = [
|
||||
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}}]}},
|
||||
{'hits': {'hits': [{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}]}}
|
||||
]
|
||||
|
||||
|
||||
with patch('datetime.datetime') as mock_datetime:
|
||||
mock_datetime.now.return_value.strftime.return_value = "2024-05-23 20:49:44"
|
||||
ds.main()
|
||||
|
||||
|
||||
mock_makedirs.assert_called_once_with(self.output_dir, exist_ok=True)
|
||||
mock_get_auth.assert_called_once_with(ds.AUTH_FILE)
|
||||
mock_query.assert_has_calls([
|
||||
call(ds.QUERY_DETECTIONS, self.auth),
|
||||
call(ds.QUERY_OVERRIDES, self.auth)
|
||||
call(ds.QUERY_DETECTIONS, self.auth, ds.DEFAULT_INDEX),
|
||||
call(ds.QUERY_OVERRIDES, self.auth, ds.DEFAULT_INDEX)
|
||||
])
|
||||
mock_save_content.assert_called_once_with(
|
||||
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
|
||||
self.output_dir,
|
||||
"sigma",
|
||||
{"_source": {"so_detection": {"publicId": "1", "content": "content1", "language": "sigma"}}},
|
||||
self.output_dir,
|
||||
"sigma",
|
||||
"yaml"
|
||||
)
|
||||
mock_save_overrides.assert_called_once_with(
|
||||
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}}
|
||||
{"_source": {"so_detection": {"publicId": "2", "overrides": [{"key": "value"}], "language": "suricata"}}},
|
||||
self.output_dir
|
||||
)
|
||||
mock_commit.assert_called_once()
|
||||
mock_commit.assert_called_once_with(self.output_dir)
|
||||
mock_print.assert_called()
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -608,6 +608,18 @@ soc:
|
||||
label: Delete Unreferenced (Deletes rules that are no longer referenced by ruleset source)
|
||||
forcedType: bool
|
||||
required: False
|
||||
- field: proxyURL
|
||||
label: HTTP/HTTPS proxy URL for downloading the ruleset.
|
||||
required: False
|
||||
- field: proxyUsername
|
||||
label: Proxy authentication username.
|
||||
required: False
|
||||
- field: proxyPassword
|
||||
label: Proxy authentication password.
|
||||
required: False
|
||||
- field: proxyCACert
|
||||
label: Path to CA certificate file for MITM proxy verification.
|
||||
required: False
|
||||
airgap: *serulesetSources
|
||||
navigator:
|
||||
intervalMinutes:
|
||||
@@ -696,6 +708,9 @@ soc:
|
||||
- field: displayName
|
||||
label: Display Name
|
||||
required: True
|
||||
- field: origin
|
||||
label: Country of Origin for the Model Training
|
||||
required: false
|
||||
- field: contextLimitSmall
|
||||
label: Context Limit (Small)
|
||||
forcedType: int
|
||||
|
||||
@@ -17,14 +17,23 @@ query() {
|
||||
|
||||
STATS=$(query "ruleset-stats")
|
||||
RELOAD=$(query "ruleset-reload-time")
|
||||
[ -z "$RELOAD" ] && RELOAD='{}'
|
||||
|
||||
if echo "$STATS" | jq -e '.return == "OK"' > /dev/null 2>&1; then
|
||||
LOADED=$(echo "$STATS" | jq -r '.message[0].rules_loaded')
|
||||
FAILED=$(echo "$STATS" | jq -r '.message[0].rules_failed')
|
||||
LAST_RELOAD=$(echo "$RELOAD" | jq -r '.message[0].last_reload')
|
||||
# Outputs valid JSON on success, empty on failure
|
||||
OUTPUT=$(jq -n \
|
||||
--argjson stats "$STATS" \
|
||||
--argjson reload "$RELOAD" \
|
||||
'if $stats.return == "OK" and ($stats.message[0].rules_loaded | type) == "number" and ($stats.message[0].rules_failed | type) == "number" then
|
||||
{
|
||||
rules_loaded: $stats.message[0].rules_loaded,
|
||||
rules_failed: $stats.message[0].rules_failed,
|
||||
last_reload: ($reload.message[0].last_reload // ""),
|
||||
return: "OK"
|
||||
}
|
||||
else empty end' 2>/dev/null)
|
||||
|
||||
jq -n --argjson loaded "$LOADED" --argjson failed "$FAILED" --arg reload "$LAST_RELOAD" \
|
||||
'{rules_loaded: $loaded, rules_failed: $failed, last_reload: $reload, return: "OK"}' > "$OUTFILE"
|
||||
if [ -n "$OUTPUT" ]; then
|
||||
echo "$OUTPUT" > "$OUTFILE"
|
||||
else
|
||||
echo '{"return":"FAIL"}' > "$OUTFILE"
|
||||
fi
|
||||
|
||||
@@ -18,11 +18,15 @@ if [[ ! "`pidof -x $(basename $0) -o %PPID`" ]]; then
|
||||
if [ -f "$STATSFILE" ] && [ $(($(date +%s) - $(stat -c %Y "$STATSFILE"))) -lt 90 ] && jq -e '.return == "OK" and .rules_loaded != null and .rules_failed != null' "$STATSFILE" > /dev/null 2>&1; then
|
||||
LOADED=$(jq -r '.rules_loaded' "$STATSFILE")
|
||||
FAILED=$(jq -r '.rules_failed' "$STATSFILE")
|
||||
RELOAD_TIME=$(jq -r '.last_reload // ""' "$STATSFILE")
|
||||
RELOAD_TIME=$(jq -r 'if .last_reload then .last_reload else "" end' "$STATSFILE")
|
||||
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
|
||||
if [ -n "$RELOAD_TIME" ]; then
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,reload_time=\"${RELOAD_TIME}\",status=\"ok\""
|
||||
else
|
||||
echo "surirules loaded=${LOADED}i,failed=${FAILED}i,status=\"ok\""
|
||||
fi
|
||||
else
|
||||
echo "surirules loaded=0i,failed=0i,reload_time=\"\",status=\"unknown\""
|
||||
echo "surirules loaded=0i,failed=0i,status=\"unknown\""
|
||||
fi
|
||||
|
||||
fi
|
||||
|
||||
@@ -1504,7 +1504,13 @@ networking_needful() {
|
||||
if [[ $reinit_networking ]] || ! [[ -f $net_init_file ]]; then
|
||||
collect_hostname
|
||||
fi
|
||||
[[ ! ( $is_eval || $is_import ) ]] && whiptail_node_description
|
||||
if [[ ! ( $is_eval || $is_import ) ]]; then
|
||||
whiptail_node_description
|
||||
while [[ "$NODE_DESCRIPTION" =~ [[:cntrl:]] ]]; do
|
||||
whiptail_error_message "Node description cannot contain control characters. Please enter a new description."
|
||||
whiptail_node_description
|
||||
done
|
||||
fi
|
||||
if [[ $reinit_networking ]] || ! [[ -f $net_init_file ]]; then
|
||||
network_init_whiptail
|
||||
else
|
||||
@@ -1756,6 +1762,50 @@ backup_dir() {
|
||||
}
|
||||
|
||||
drop_install_options() {
|
||||
# Ensure values written to install.txt won't later fail manager-side parsing in so-minion
|
||||
strip_control_chars() {
|
||||
# bash: remove ASCII control characters (incl. newlines/tabs/ESC)
|
||||
printf '%s' "$1" | tr -d '[:cntrl:]'
|
||||
}
|
||||
|
||||
validate_install_txt_vars() {
|
||||
# Sanitize first (fail closed if still invalid)
|
||||
MAINIP="$(strip_control_chars "$MAINIP")"
|
||||
MNIC="$(strip_control_chars "$MNIC")"
|
||||
NODE_DESCRIPTION="$(strip_control_chars "$NODE_DESCRIPTION")"
|
||||
ES_HEAP_SIZE="$(strip_control_chars "$ES_HEAP_SIZE")"
|
||||
PATCHSCHEDULENAME="$(strip_control_chars "$PATCHSCHEDULENAME")"
|
||||
INTERFACE="$(strip_control_chars "$INTERFACE")"
|
||||
HOSTNAME="$(strip_control_chars "$HOSTNAME")"
|
||||
LS_HEAP_SIZE="$(strip_control_chars "$LS_HEAP_SIZE")"
|
||||
IDH_MGTRESTRICT="$(strip_control_chars "$IDH_MGTRESTRICT")"
|
||||
IDH_SERVICES="$(strip_control_chars "$IDH_SERVICES")"
|
||||
|
||||
valid_ip4 "$MAINIP" || return 1
|
||||
[[ "$MNIC" =~ ^[A-Za-z0-9_.:-]+$ ]] || return 1
|
||||
[[ "$NODE_DESCRIPTION" =~ ^[[:print:]]{0,256}$ ]] || return 1
|
||||
[[ "$ES_HEAP_SIZE" =~ ^[0-9]+[kKmMgGtTpPeE]?$ ]] || return 1
|
||||
[[ "$PATCHSCHEDULENAME" =~ ^[A-Za-z0-9._-]*$ ]] || return 1
|
||||
[[ "$INTERFACE" =~ ^[A-Za-z0-9._:,-]+$ ]] || return 1
|
||||
valid_hostname "$HOSTNAME" || return 1
|
||||
[[ "$LS_HEAP_SIZE" =~ ^[0-9]+[kKmMgGtTpPeE]?$ ]] || return 1
|
||||
[[ "$lb_procs" =~ ^[0-9]+$ ]] || return 1
|
||||
[[ "$num_cpu_cores" =~ ^[0-9]+$ ]] || return 1
|
||||
[[ -z "$IDH_MGTRESTRICT" || "$IDH_MGTRESTRICT" == "True" || "$IDH_MGTRESTRICT" == "False" ]] || return 1
|
||||
[[ -z "$IDH_SERVICES" || "$IDH_SERVICES" =~ ^[[:print:]]{0,512}$ ]] || return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
if ! validate_install_txt_vars; then
|
||||
if declare -F whiptail_error_message >/dev/null; then
|
||||
whiptail_error_message "One or more setup values were invalid and would cause the manager to reject this node when adding it. Please re-run setup and verify hostname, management IP/interface, and node description."
|
||||
else
|
||||
echo "Error: invalid setup values detected; refusing to write /opt/so/install.txt"
|
||||
fi
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Drop the install Variable
|
||||
echo "MAINIP=$MAINIP" > /opt/so/install.txt
|
||||
echo "MNIC=$MNIC" >> /opt/so/install.txt
|
||||
|
||||
@@ -903,8 +903,9 @@ whiptail_management_nic() {
|
||||
local exitstatus=$?
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
|
||||
while [ -z "$MNIC" ]
|
||||
while [ -z "$MNIC" ] || [[ "$MNIC" =~ [[:cntrl:]] ]] || [[ ! "$MNIC" =~ ^[A-Za-z0-9_.:-]+$ ]]
|
||||
do
|
||||
whiptail_invalid_input
|
||||
MNIC=$(whiptail --title "$whiptail_title" --menu "Please select the NIC you would like to use for management.\n\nUse the arrow keys to move around and the Enter key to select." 22 75 12 "${nic_list_management[@]}" 3>&1 1>&2 2>&3 )
|
||||
local exitstatus=$?
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
@@ -1098,6 +1099,14 @@ whiptail_node_description() {
|
||||
|
||||
local exitstatus=$?
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
|
||||
while [[ "$NODE_DESCRIPTION" =~ [[:cntrl:]] ]]; do
|
||||
whiptail_error_message "Node description cannot contain control characters. Please enter a new description."
|
||||
NODE_DESCRIPTION=$(whiptail --title "$whiptail_title" \
|
||||
--inputbox "Enter a short description for the node or press ENTER to leave blank:" 10 75 3>&1 1>&2 2>&3)
|
||||
local exitstatus=$?
|
||||
whiptail_check_exitstatus $exitstatus
|
||||
done
|
||||
}
|
||||
|
||||
whiptail_ntp_ask() {
|
||||
|
||||
Reference in New Issue
Block a user