mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge pull request #9641 from Security-Onion-Solutions/2.4/firewall
2.4/firewall
This commit is contained in:
@@ -1,7 +1,5 @@
|
||||
{% set node_types = {} %}
|
||||
{% set manage_alived = salt.saltutil.runner('manage.alived', show_ip=True) %}
|
||||
{% set manager = grains.master %}
|
||||
{% set manager_type = manager.split('_')|last %}
|
||||
{% for minionid, ip in salt.saltutil.runner('mine.get', tgt='*', fun='network.ip_addrs', tgt_type='glob') | dictsort() %}
|
||||
{% set hostname = minionid.split('_')[0] %}
|
||||
{% set node_type = minionid.split('_')[1] %}
|
||||
@@ -24,10 +22,10 @@
|
||||
|
||||
node_data:
|
||||
{% for node_type, host_values in node_types.items() %}
|
||||
{{node_type}}:
|
||||
{% for hostname, details in host_values.items() %}
|
||||
{{hostname}}:
|
||||
ip: {{details.ip}}
|
||||
alive: {{ details.alive }}
|
||||
{{hostname}}:
|
||||
ip: {{details.ip}}
|
||||
alive: {{ details.alive }}
|
||||
role: {{node_type}}
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
@@ -10,6 +10,7 @@ base:
|
||||
- sensoroni.adv_sensoroni
|
||||
- telegraf.soc_telegraf
|
||||
- telegraf.adv_telegraf
|
||||
- node_data.ips
|
||||
|
||||
'* and not *_eval and not *_import':
|
||||
- logstash.nodes
|
||||
@@ -23,11 +24,15 @@ base:
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
|
||||
'*_manager':
|
||||
- logstash
|
||||
- logstash.manager
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
|
||||
'*_manager or *_managersearch':
|
||||
@@ -51,6 +56,8 @@ base:
|
||||
- redis.adv_redis
|
||||
- influxdb.soc_influxdb
|
||||
- influxdb.adv_influxdb
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- backup.soc_backup
|
||||
- backup.adv_backup
|
||||
- minions.{{ grains.id }}
|
||||
@@ -76,6 +83,7 @@ base:
|
||||
- soc_global
|
||||
- kratos.soc_kratos
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- manager.soc_manager
|
||||
- soc.soc_soc
|
||||
- kratos.soc_kratos
|
||||
@@ -94,6 +102,7 @@ base:
|
||||
- logstash.manager
|
||||
- logstash.search
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
@@ -111,6 +120,7 @@ base:
|
||||
- influxdb.soc_influxdb
|
||||
- influxdb.adv_influxdb
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- manager.soc_manager
|
||||
- soc.soc_soc
|
||||
- backup.soc_backup
|
||||
@@ -134,6 +144,8 @@ base:
|
||||
'*_searchnode':
|
||||
- logstash
|
||||
- logstash.search
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
- elasticsearch.index_templates
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
@@ -148,6 +160,8 @@ base:
|
||||
'*_receiver':
|
||||
- logstash
|
||||
- logstash.receiver
|
||||
- logstash.soc_logstash
|
||||
- logstash.adv_logstash
|
||||
{% if salt['file.file_exists']('/opt/so/saltstack/local/pillar/elasticsearch/auth.sls') %}
|
||||
- elasticsearch.auth
|
||||
{% endif %}
|
||||
@@ -169,6 +183,7 @@ base:
|
||||
{% endif %}
|
||||
- kratos.soc_kratos
|
||||
- elasticsearch.soc_elasticsearch
|
||||
- elasticsearch.adv_elasticsearch
|
||||
- manager.soc_manager
|
||||
- soc.soc_soc
|
||||
- soc_global
|
||||
|
||||
@@ -119,6 +119,18 @@ function add_elastic_to_minion() {
|
||||
" " >> $PILLARFILE
|
||||
}
|
||||
|
||||
function add_logstash_to_minion() {
|
||||
# Create the logstash advanced pillar
|
||||
printf '%s\n'\
|
||||
"logstash_settings:"\
|
||||
" ls_host: '$LSHOSTNAME'"\
|
||||
" ls_pipeline_batch_size: 125"\
|
||||
" ls_input_threads: 1"\
|
||||
" lsheap: $LSHEAP"\
|
||||
" ls_pipeline_workers: $CPUCORES"\
|
||||
" " >> $PILLARFILE
|
||||
}
|
||||
|
||||
# Analyst Workstation
|
||||
function add_analyst_to_minion() {
|
||||
printf '%s\n'\
|
||||
@@ -167,6 +179,7 @@ function add_sensor_to_minion() {
|
||||
|
||||
function createEVAL() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
add_sensor_to_minion
|
||||
}
|
||||
|
||||
@@ -176,20 +189,24 @@ function createIDHNODE() {
|
||||
|
||||
function createIMPORT() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
add_sensor_to_minion
|
||||
}
|
||||
|
||||
function createHEAVYNODE() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
add_sensor_to_minion
|
||||
}
|
||||
|
||||
function createMANAGER() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
}
|
||||
|
||||
function createMANAGERSEARCH() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
}
|
||||
|
||||
function createSENSOR() {
|
||||
@@ -198,10 +215,12 @@ function createSENSOR() {
|
||||
|
||||
function createSEARCHNODE() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
}
|
||||
|
||||
function createSTANDALONE() {
|
||||
add_elastic_to_minion
|
||||
add_logstash_to_minion
|
||||
add_sensor_to_minion
|
||||
}
|
||||
|
||||
|
||||
@@ -94,10 +94,10 @@ wait_for_influxdb:
|
||||
- ssl: True
|
||||
- verify_ssl: False
|
||||
- status: 200
|
||||
- timeout: 30
|
||||
- timeout: 10
|
||||
- retry:
|
||||
attempts: 5
|
||||
interval: 60
|
||||
attempts: 20
|
||||
interval: 5
|
||||
- require:
|
||||
- docker_container: so-influxdb
|
||||
|
||||
|
||||
@@ -110,8 +110,9 @@ so-soc:
|
||||
- /opt/so/conf/soc/soc_users_roles:/opt/sensoroni/rbac/users_roles:rw
|
||||
- /opt/so/conf/soc/salt:/opt/sensoroni/salt:rw
|
||||
- /opt/so/saltstack:/opt/so/saltstack:rw
|
||||
{%- if salt['pillar.get']('nodestab', {}) %}
|
||||
- extra_hosts:
|
||||
- {{GLOBALS.influxdb_host}}:{{pillar.node_data[GLOBALS.influxdb_host].ip}}
|
||||
{%- if salt['pillar.get']('nodestab', {}) %}
|
||||
{%- for SN, SNDATA in salt['pillar.get']('nodestab', {}).items() %}
|
||||
- {{ SN.split('_')|first }}:{{ SNDATA.ip }}
|
||||
{%- endfor %}
|
||||
|
||||
15
salt/vars/manager.map.jinja
Normal file
15
salt/vars/manager.map.jinja
Normal file
@@ -0,0 +1,15 @@
|
||||
{% from 'vars/elasticsearch.map.jinja' import ELASTICSEARCH_GLOBALS %}
|
||||
{% from 'vars/logstash.map.jinja' import LOGSTASH_GLOBALS %}
|
||||
|
||||
{% set ROLE_GLOBALS = {} %}
|
||||
|
||||
{% set MANAGER_GLOBALS =
|
||||
[
|
||||
ELASTICSEARCH_GLOBALS,
|
||||
LOGSTASH_GLOBALS
|
||||
]
|
||||
%}
|
||||
|
||||
{% for sg in MANAGER_GLOBALS %}
|
||||
{% do salt['defaults.merge'](ROLE_GLOBALS, sg, merge_lists=False, in_place=True) %}
|
||||
{% endfor %}
|
||||
@@ -200,7 +200,7 @@ check_service_status() {
|
||||
}
|
||||
|
||||
check_web_pass() {
|
||||
info Making sure web credential passwords match
|
||||
info "Making sure web credential passwords match"
|
||||
check_pass_match "$WEBPASSWD1" "$WEBPASSWD2" "WPMATCH"
|
||||
}
|
||||
|
||||
@@ -1301,15 +1301,7 @@ idh_pillar() {
|
||||
logstash_pillar() {
|
||||
# Create the logstash advanced pillar
|
||||
touch $adv_logstash_pillar_file
|
||||
title "Create the logstash pillar"
|
||||
printf '%s\n'\
|
||||
"logstash_settings:"\
|
||||
" ls_host: '$HOSTNAME'"\
|
||||
" ls_pipeline_batch_size: 125"\
|
||||
" ls_input_threads: 1"\
|
||||
" lsheap: $NODE_LS_HEAP_SIZE"\
|
||||
" ls_pipeline_workers: $num_cpu_cores"\
|
||||
"" > "$logstash_pillar_file"
|
||||
touch $logstash_pillar_file
|
||||
}
|
||||
|
||||
# Set Logstash heap size based on total memory
|
||||
@@ -1333,10 +1325,6 @@ ls_heapsize() {
|
||||
esac
|
||||
export LS_HEAP_SIZE
|
||||
|
||||
if [[ "$install_type" =~ ^(EVAL|MANAGERSEARCH|STANDALONE)$ ]]; then
|
||||
NODE_LS_HEAP_SIZE=$LS_HEAP_SIZE
|
||||
export NODE_LS_HEAP_SIZE
|
||||
fi
|
||||
}
|
||||
|
||||
idstools_pillar() {
|
||||
@@ -1881,6 +1869,9 @@ drop_install_options() {
|
||||
NODETYPE=${install_type^^}
|
||||
echo "NODETYPE=$NODETYPE" >> /opt/so/install.txt
|
||||
echo "CORECOUNT=$lb_procs" >> /opt/so/install.txt
|
||||
echo "LSHOSTNAME=$HOSTNAME" >> /opt/so/install.txt
|
||||
echo "LSHEAP=$LS_HEAP_SIZE" >> /opt/so/install.txt
|
||||
echo "CPUCORES=$num_cpu_cores" >> /opt/so/install.txt
|
||||
}
|
||||
|
||||
remove_package() {
|
||||
@@ -2439,4 +2430,4 @@ verify_setup() {
|
||||
else
|
||||
whiptail_setup_failed
|
||||
fi
|
||||
}
|
||||
}
|
||||
|
||||
@@ -373,6 +373,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
whiptail_airgap
|
||||
fi
|
||||
detect_cloud
|
||||
set_minion_info
|
||||
set_default_log_size >> $setup_log 2>&1
|
||||
info "Verifying all network devices are managed by Network Manager that should be"
|
||||
check_network_manager_conf
|
||||
@@ -394,6 +395,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
whiptail_airgap
|
||||
fi
|
||||
detect_cloud
|
||||
set_minion_info
|
||||
set_default_log_size >> $setup_log 2>&1
|
||||
info "Verifying all network devices are managed by Network Manager that should be"
|
||||
check_network_manager_conf
|
||||
@@ -441,6 +443,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
collect_mngr_hostname
|
||||
add_mngr_ip_to_hosts
|
||||
check_manager_connection
|
||||
set_minion_info
|
||||
whiptail_end_settings
|
||||
|
||||
elif [[ $is_idh ]]; then
|
||||
@@ -450,6 +453,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
collect_mngr_hostname
|
||||
add_mngr_ip_to_hosts
|
||||
check_manager_connection
|
||||
set_minion_info
|
||||
whiptail_end_settings
|
||||
|
||||
elif [[ $is_import ]]; then
|
||||
@@ -481,6 +485,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
collect_mngr_hostname
|
||||
add_mngr_ip_to_hosts
|
||||
check_manager_connection
|
||||
set_minion_info
|
||||
whiptail_end_settings
|
||||
fi
|
||||
|
||||
@@ -541,6 +546,9 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
export PATCHSCHEDULENAME=$PATCHSCHEDULENAME
|
||||
export INTERFACE="bond0"
|
||||
export CORECOUNT=$lb_procs
|
||||
export LSHOSTNAME=$HOSTNAME
|
||||
export LSHEAP=$LS_HEAP_SIZE
|
||||
export CPUCORES=$num_cpu_cores
|
||||
logCmd "so-minion -o=setup"
|
||||
title "Creating Global SLS"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user