mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 09:12:45 +01:00
Merge pull request #7564 from Security-Onion-Solutions/removethehive
Removethehive
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
|
||||
{% set WAZUH = salt['pillar.get']('global:wazuh', '0') %}
|
||||
{% set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||
{% set FREQSERVER = salt['pillar.get']('manager:freq', '0') %}
|
||||
{% set DOMAINSTATS = salt['pillar.get']('manager:domainstats', '0') %}
|
||||
@@ -273,10 +272,6 @@
|
||||
{% do allowed_states.append('elastalert') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (THEHIVE != 0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('thehive') %}
|
||||
{% endif %}
|
||||
|
||||
{% if (PLAYBOOK !=0) and grains.role in ['so-eval', 'so-manager', 'so-standalone', 'so-managersearch'] %}
|
||||
{% do allowed_states.append('playbook') %}
|
||||
{% endif %}
|
||||
|
||||
@@ -17,5 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop cortex $1
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop cortex $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,38 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <new-user-name>"
|
||||
echo ""
|
||||
echo "Adds a new user to Cortex. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||
CORTEX_ORG_NAME=$(lookup_pillar cortexorgname)
|
||||
CORTEX_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs CORTEX_PASS
|
||||
|
||||
# Create new user in Cortex
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user" -d "{\"name\": \"$CORTEX_USER\",\"roles\": [\"read\",\"analyze\",\"orgadmin\"],\"organization\": \"$CORTEX_ORG_NAME\",\"login\": \"$CORTEX_USER\",\"password\" : \"$CORTEX_PASS\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully added user to Cortex."
|
||||
else
|
||||
echo "Unable to add user to Cortex; user might already exist."
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name> <true|false>"
|
||||
echo ""
|
||||
echo "Enables or disables a user in Cortex."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
CORTEX_KEY=$(lookup_pillar cortexorguserkey)
|
||||
CORTEX_API_URL="$(lookup_pillar url_base)/cortex/api"
|
||||
CORTEX_USER=$USER
|
||||
|
||||
case "${2^^}" in
|
||||
FALSE | NO | 0)
|
||||
CORTEX_STATUS=Locked
|
||||
;;
|
||||
TRUE | YES | 1)
|
||||
CORTEX_STATUS=Ok
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
|
||||
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "https://$CORTEX_API_URL/user/${CORTEX_USER}" -d "{\"status\":\"${CORTEX_STATUS}\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully updated user in Cortex."
|
||||
else
|
||||
echo "Failed to update user in Cortex."
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,9 +17,7 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
IP={{ salt['grains.get']('ip_interfaces').get(salt['pillar.get']('sensor:mainint', salt['pillar.get']('manager:mainint', salt['pillar.get']('elasticsearch:mainint', salt['pillar.get']('host:mainint')))))[0] }}
|
||||
ESPORT=9200
|
||||
THEHIVEESPORT=9400
|
||||
|
||||
echo "Removing read only attributes for indices..."
|
||||
echo
|
||||
{{ ELASTICCURL }} -s -k -XPUT -H "Content-Type: application/json" -L https://$IP:9200/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
|
||||
{{ ELASTICCURL }} -XPUT -H "Content-Type: application/json" -L http://$IP:9400/_all/_settings -d '{"index.blocks.read_only_allow_delete": null}' 2>&1 | if grep -q ack; then echo "Index settings updated..."; else echo "There was any issue updating the read-only attribute. Please ensure Elasticsearch is running.";fi;
|
||||
|
||||
@@ -75,9 +75,6 @@ container_list() {
|
||||
"so-strelka-manager"
|
||||
"so-suricata"
|
||||
"so-telegraf"
|
||||
"so-thehive"
|
||||
"so-thehive-cortex"
|
||||
"so-thehive-es"
|
||||
"so-wazuh"
|
||||
"so-zeek"
|
||||
)
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Usage: so-restart filebeat | kibana | playbook | thehive
|
||||
# Usage: so-restart filebeat | kibana | playbook
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
@@ -31,7 +31,6 @@ if [ $# -ge 1 ]; then
|
||||
fi
|
||||
|
||||
case $1 in
|
||||
"cortex") docker stop so-thehive-cortex so-thehive && docker rm so-thehive-cortex so-thehive && salt-call state.apply hive queue=True;;
|
||||
"steno") docker stop so-steno && docker rm so-steno && salt-call state.apply pcap queue=True;;
|
||||
*) docker stop so-$1 ; docker rm so-$1 ; salt-call state.apply $1 queue=True;;
|
||||
esac
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Usage: so-start all | filebeat | kibana | playbook | thehive
|
||||
# Usage: so-start all | filebeat | kibana | playbook
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
|
||||
@@ -17,5 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive-es $1
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive-es $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-restart thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-start thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,4 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
/usr/sbin/so-stop thehive $1
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,38 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <new-user-name>"
|
||||
echo ""
|
||||
echo "Adds a new user to TheHive. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs THEHIVE_PASS
|
||||
|
||||
check_password_and_exit "$THEHIVE_PASS"
|
||||
|
||||
# Create new user in TheHive
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user" -d "{\"login\" : \"$THEHIVE_USER\",\"name\" : \"$THEHIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$THEHIVE_PASS\"}")
|
||||
if [[ "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully added user to TheHive"
|
||||
else
|
||||
echo "Unable to add user to TheHive; user might already exist"
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name> <true|false>"
|
||||
echo ""
|
||||
echo "Enables or disables a user in TheHive."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 2 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
case "${2^^}" in
|
||||
FALSE | NO | 0)
|
||||
THEHIVE_STATUS=Locked
|
||||
;;
|
||||
TRUE | YES | 1)
|
||||
THEHIVE_STATUS=Ok
|
||||
;;
|
||||
*)
|
||||
usage
|
||||
;;
|
||||
esac
|
||||
|
||||
resp=$(curl -sk -XPATCH -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}" -d "{\"status\":\"${THEHIVE_STATUS}\" }")
|
||||
if [[ "$resp" =~ \"status\":\"Locked\" || "$resp" =~ \"status\":\"Ok\" ]]; then
|
||||
echo "Successfully updated user in TheHive"
|
||||
else
|
||||
echo "Failed to update user in TheHive"
|
||||
echo "$resp"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -17,41 +17,4 @@
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
usage() {
|
||||
echo "Usage: $0 <user-name>"
|
||||
echo ""
|
||||
echo "Update password for an existing TheHive user. The new password will be read from STDIN."
|
||||
exit 1
|
||||
}
|
||||
|
||||
if [ $# -ne 1 ]; then
|
||||
usage
|
||||
fi
|
||||
|
||||
USER=$1
|
||||
|
||||
THEHIVE_KEY=$(lookup_pillar hivekey)
|
||||
THEHVIE_API_URL="$(lookup_pillar url_base)/thehive/api"
|
||||
THEHIVE_USER=$USER
|
||||
|
||||
# Read password for new user from stdin
|
||||
test -t 0
|
||||
if [[ $? == 0 ]]; then
|
||||
echo "Enter new password:"
|
||||
fi
|
||||
read -rs THEHIVE_PASS
|
||||
|
||||
if ! check_password "$THEHIVE_PASS"; then
|
||||
echo "Password is invalid. Please exclude single quotes, double quotes, dollar signs, and backslashes from the password."
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Change password for user in TheHive
|
||||
resp=$(curl -sk -XPOST -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -L "https://$THEHVIE_API_URL/user/${THEHIVE_USER}/password/set" -d "{\"password\" : \"$THEHIVE_PASS\"}")
|
||||
if [[ -z "$resp" ]]; then
|
||||
echo "Successfully updated TheHive user password"
|
||||
else
|
||||
echo "Unable to update TheHive user password"
|
||||
echo $resp
|
||||
exit 2
|
||||
fi
|
||||
echo "TheHive and its components are no longer part of Security Onion"
|
||||
@@ -476,7 +476,6 @@ case "${operation}" in
|
||||
createUser "$email" "${role:-$DEFAULT_ROLE}"
|
||||
syncAll
|
||||
echo "Successfully added new user to SOC"
|
||||
check_container thehive && echo "$password" | so-thehive-user-add "$email"
|
||||
check_container fleet && echo "$password" | so-fleet-user-add "$email"
|
||||
;;
|
||||
|
||||
@@ -528,7 +527,6 @@ case "${operation}" in
|
||||
updateStatus "$email" 'active'
|
||||
syncAll
|
||||
echo "Successfully enabled user"
|
||||
check_container thehive && so-thehive-user-enable "$email" true
|
||||
echo "Fleet user will need to be recreated manually with so-fleet-user-add"
|
||||
;;
|
||||
|
||||
@@ -540,7 +538,6 @@ case "${operation}" in
|
||||
updateStatus "$email" 'locked'
|
||||
syncAll
|
||||
echo "Successfully disabled user"
|
||||
check_container thehive && so-thehive-user-enable "$email" false
|
||||
check_container fleet && so-fleet-user-delete "$email"
|
||||
;;
|
||||
|
||||
@@ -552,7 +549,6 @@ case "${operation}" in
|
||||
deleteUser "$email"
|
||||
syncAll
|
||||
echo "Successfully deleted user"
|
||||
check_container thehive && so-thehive-user-enable "$email" false
|
||||
check_container fleet && so-fleet-user-delete "$email"
|
||||
;;
|
||||
|
||||
|
||||
@@ -765,29 +765,6 @@ upgrade_space() {
|
||||
fi
|
||||
}
|
||||
|
||||
thehive_maint() {
|
||||
echo -n "Waiting for TheHive..."
|
||||
COUNT=0
|
||||
THEHIVE_CONNECTED="no"
|
||||
while [[ "$COUNT" -le 240 ]]; do
|
||||
curl --output /dev/null --silent --head --fail -k "https://localhost/thehive/api/alert"
|
||||
if [ $? -eq 0 ]; then
|
||||
THEHIVE_CONNECTED="yes"
|
||||
echo "connected!"
|
||||
break
|
||||
else
|
||||
((COUNT+=1))
|
||||
sleep 1
|
||||
echo -n "."
|
||||
fi
|
||||
done
|
||||
if [ "$THEHIVE_CONNECTED" == "yes" ]; then
|
||||
echo "Migrating thehive databases if needed."
|
||||
curl -v -k -XPOST -L "https://localhost/thehive/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
|
||||
curl -v -k -XPOST -L "https://localhost/cortex/api/maintenance/migrate" >> "$SOUP_LOG" 2>&1
|
||||
fi
|
||||
}
|
||||
|
||||
unmount_update() {
|
||||
cd /tmp
|
||||
umount /tmp/soagupdate
|
||||
@@ -1175,7 +1152,6 @@ main() {
|
||||
salt-call state.highstate -l info queue=True
|
||||
postupgrade_changes
|
||||
[[ $is_airgap -eq 0 ]] && unmount_update
|
||||
thehive_maint
|
||||
|
||||
echo ""
|
||||
echo "Upgrade to $NEWVERSION complete."
|
||||
|
||||
@@ -335,30 +335,6 @@ http {
|
||||
|
||||
{%- endif %}
|
||||
|
||||
location /thehive/ {
|
||||
proxy_pass http://{{ manager_ip }}:9000/thehive/;
|
||||
proxy_read_timeout 90;
|
||||
proxy_connect_timeout 90;
|
||||
proxy_http_version 1.1; # this is essential for chunked responses to work
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /cortex/ {
|
||||
proxy_pass http://{{ manager_ip }}:9001/cortex/;
|
||||
proxy_read_timeout 90;
|
||||
proxy_connect_timeout 90;
|
||||
proxy_http_version 1.1; # this is essential for chunked responses to work
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header Proxy "";
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
location /soctopus/ {
|
||||
auth_request /auth/sessions/whoami;
|
||||
proxy_pass http://{{ manager_ip }}:7000/;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{%- set MANAGERIP = salt['pillar.get']('global:managerip', '') %}
|
||||
{%- set SENSORONIKEY = salt['pillar.get']('global:sensoronikey', '') %}
|
||||
{%- set THEHIVEKEY = salt['pillar.get']('global:hivekey', '') %}
|
||||
{%- set THEHIVEURL = salt['pillar.get']('global:hiveurl', '') %}
|
||||
{%- set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||
{%- set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||
{%- set FLEETMANAGER = salt['pillar.get']('global:fleet_manager', False) %}
|
||||
{%- set FLEETNODE = salt['pillar.get']('global:fleet_node', False) %}
|
||||
{%- set GRAFANA = salt['pillar.get']('manager:grafana', '0') %}
|
||||
@@ -91,7 +91,7 @@
|
||||
},
|
||||
{%- if CASE_MODULE == 'thehive' and THEHIVEKEY != '' %}
|
||||
"thehive": {
|
||||
"hostUrl": "http://{{ MANAGERIP }}:9000/thehive",
|
||||
"hostUrl": "http://{{ HIVEURL }}:9000/thehive",
|
||||
"key": "{{ THEHIVEKEY }}",
|
||||
"verifyCert": false
|
||||
},
|
||||
@@ -140,9 +140,6 @@
|
||||
{%- if PLAYBOOK == 0 %}
|
||||
"toolPlaybook",
|
||||
{%- endif %}
|
||||
{%- if THEHIVE == 0 %}
|
||||
"toolTheHive",
|
||||
{%- endif %}
|
||||
{%- if not FLEETMANAGER and not FLEETNODE %}
|
||||
"toolFleet",
|
||||
{%- endif %}
|
||||
|
||||
@@ -4,6 +4,5 @@
|
||||
{ "name": "toolCyberchef", "description": "toolCyberchefHelp", "icon": "fa-external-link-alt", "target": "so-cyberchef", "link": "/cyberchef/" },
|
||||
{ "name": "toolPlaybook", "description": "toolPlaybookHelp", "icon": "fa-external-link-alt", "target": "so-playbook", "link": "/playbook/projects/detection-playbooks/issues/" },
|
||||
{ "name": "toolFleet", "description": "toolFleetHelp", "icon": "fa-external-link-alt", "target": "so-fleet", "link": "/fleet/" },
|
||||
{ "name": "toolTheHive", "description": "toolTheHiveHelp", "icon": "fa-external-link-alt", "target": "so-thehive", "link": "/thehive/" },
|
||||
{ "name": "toolNavigator", "description": "toolNavigatorHelp", "icon": "fa-external-link-alt", "target": "so-navigator", "link": "/navigator/" }
|
||||
]
|
||||
@@ -1,6 +1,7 @@
|
||||
{%- set MANAGER = salt['pillar.get']('manager:mainip', '') %}
|
||||
{%- set URLBASE = salt['pillar.get']('global:url_base', '') %}
|
||||
{%- set HIVEKEY = salt['pillar.get']('global:hivekey', '') %}
|
||||
{%- set THEHIVEURL = salt['pillar.get']('global:hiveurl', '') %}
|
||||
{%- set CORTEXKEY = salt['pillar.get']('global:cortexorguserkey', '') %}
|
||||
{%- set PLAYBOOK_KEY = salt['pillar.get']('playbook:api_key', '') %}
|
||||
{%- if salt['pillar.get']('elasticsearch:auth:enabled') is sameas true %}
|
||||
@@ -21,7 +22,7 @@ es_verifycert = no
|
||||
|
||||
[cortex]
|
||||
auto_analyze_alerts = no
|
||||
cortex_url = https://{{URLBASE}}/cortex/
|
||||
cortex_url = https://{{THEHIVEURL}}/cortex/
|
||||
cortex_key = {{ CORTEXKEY }}
|
||||
supported_analyzers = Urlscan_io_Search,CERTatPassiveDNS
|
||||
|
||||
@@ -42,7 +43,7 @@ grr_user = YOURGRRUSER
|
||||
grr_pass = YOURGRRPASS
|
||||
|
||||
[hive]
|
||||
hive_url = https://{{URLBASE}}/thehive/
|
||||
hive_url = https://{{THEHIVEURL}}/thehive/
|
||||
hive_key = {{ HIVEKEY }}
|
||||
hive_tlp = 3
|
||||
hive_verifycert = no
|
||||
|
||||
@@ -1,219 +0,0 @@
|
||||
{%- set MANAGERIP = salt['pillar.get']('global:managerip', '') %}
|
||||
{%- set CORTEXKEY = salt['pillar.get']('global:cortexorguserkey', '') %}
|
||||
{%- set HIVEPLAYSECRET = salt['pillar.get']('global:hiveplaysecret', '') %}
|
||||
|
||||
# Secret Key
|
||||
# The secret key is used to secure cryptographic functions.
|
||||
# WARNING: If you deploy your application on several servers, make sure to use the same key.
|
||||
play.http.secret.key="{{ HIVEPLAYSECRET }}"
|
||||
play.http.context=/thehive/
|
||||
search.uri = "http://{{ MANAGERIP }}:9400"
|
||||
# Elasticsearch
|
||||
search {
|
||||
# Name of the index
|
||||
index = the_hive
|
||||
# Name of the Elasticsearch cluster
|
||||
cluster = thehive
|
||||
# Address of the Elasticsearch instance
|
||||
host = ["{{ MANAGERIP }}:9500"]
|
||||
#search.uri = "http://{{ MANAGERIP }}:9500"
|
||||
# Scroll keepalive
|
||||
keepalive = 1m
|
||||
# Size of the page for scroll
|
||||
pagesize = 50
|
||||
# Number of shards
|
||||
nbshards = 5
|
||||
# Number of replicas
|
||||
nbreplicas = 0
|
||||
# Arbitrary settings
|
||||
settings {
|
||||
# Maximum number of nested fields
|
||||
mapping.nested_fields.limit = 100
|
||||
}
|
||||
|
||||
### XPack SSL configuration
|
||||
# Username for XPack authentication
|
||||
#username
|
||||
# Password for XPack authentication
|
||||
#password
|
||||
# Enable SSL to connect to ElasticSearch
|
||||
ssl.enabled = false
|
||||
# Path to certificate authority file
|
||||
#ssl.ca
|
||||
# Path to certificate file
|
||||
#ssl.certificate
|
||||
# Path to key file
|
||||
#ssl.key
|
||||
|
||||
### SearchGuard configuration
|
||||
# Path to JKS file containing client certificate
|
||||
#guard.keyStore.path
|
||||
# Password of the keystore
|
||||
#guard.keyStore.password
|
||||
# Path to JKS file containing certificate authorities
|
||||
#guard.trustStore.path
|
||||
## Password of the truststore
|
||||
#guard.trustStore.password
|
||||
# Enforce hostname verification
|
||||
#guard.hostVerification
|
||||
# If hostname verification is enabled specify if hostname should be resolved
|
||||
#guard.hostVerificationResolveHostname
|
||||
}
|
||||
|
||||
# Authentication
|
||||
auth {
|
||||
# "provider" parameter contains authentication provider. It can be multi-valued (useful for migration)
|
||||
# available auth types are:
|
||||
# services.LocalAuthSrv : passwords are stored in user entity (in Elasticsearch). No configuration is required.
|
||||
# ad : use ActiveDirectory to authenticate users. Configuration is under "auth.ad" key
|
||||
# ldap : use LDAP to authenticate users. Configuration is under "auth.ldap" key
|
||||
provider = [local]
|
||||
|
||||
# By default, basic authentication is disabled. You can enable it by setting "method.basic" to true.
|
||||
#method.basic = true
|
||||
|
||||
|
||||
ad {
|
||||
# The Windows domain name in DNS format. This parameter is required if you do not use
|
||||
# 'serverNames' below.
|
||||
#domainFQDN = "mydomain.local"
|
||||
|
||||
# Optionally you can specify the host names of the domain controllers instead of using 'domainFQDN
|
||||
# above. If this parameter is not set, TheHive uses 'domainFQDN'.
|
||||
#serverNames = [ad1.mydomain.local, ad2.mydomain.local]
|
||||
|
||||
# The Windows domain name using short format. This parameter is required.
|
||||
#domainName = "MYDOMAIN"
|
||||
|
||||
# If 'true', use SSL to connect to the domain controller.
|
||||
#useSSL = true
|
||||
}
|
||||
|
||||
ldap {
|
||||
# The LDAP server name or address. The port can be specified using the 'host:port'
|
||||
# syntax. This parameter is required if you don't use 'serverNames' below.
|
||||
#serverName = "ldap.mydomain.local:389"
|
||||
|
||||
# If you have multiple LDAP servers, use the multi-valued setting 'serverNames' instead.
|
||||
#serverNames = [ldap1.mydomain.local, ldap2.mydomain.local]
|
||||
|
||||
# Account to use to bind to the LDAP server. This parameter is required.
|
||||
#bindDN = "cn=thehive,ou=services,dc=mydomain,dc=local"
|
||||
|
||||
# Password of the binding account. This parameter is required.
|
||||
#bindPW = "***secret*password***"
|
||||
|
||||
# Base DN to search users. This parameter is required.
|
||||
#baseDN = "ou=users,dc=mydomain,dc=local"
|
||||
|
||||
# Filter to search user in the directory server. Please note that {0} is replaced
|
||||
# by the actual user name. This parameter is required.
|
||||
#filter = "(cn={0})"
|
||||
|
||||
# If 'true', use SSL to connect to the LDAP directory server.
|
||||
#useSSL = true
|
||||
}
|
||||
}
|
||||
|
||||
# Maximum time between two requests without requesting authentication
|
||||
session {
|
||||
warning = 5m
|
||||
inactivity = 1h
|
||||
}
|
||||
|
||||
# Max textual content length
|
||||
play.http.parser.maxMemoryBuffer= 1M
|
||||
# Max file size
|
||||
play.http.parser.maxDiskBuffer = 1G
|
||||
|
||||
# Cortex
|
||||
# TheHive can connect to one or multiple Cortex instances. Give each
|
||||
# Cortex instance a name and specify the associated URL.
|
||||
#
|
||||
# In order to use Cortex, first you need to enable the Cortex module by uncommenting the next line
|
||||
|
||||
play.modules.enabled += connectors.cortex.CortexConnector
|
||||
|
||||
cortex {
|
||||
"CORTEX-SERVER-ID" {
|
||||
url = "http://{{ MANAGERIP }}:9001/cortex/"
|
||||
key = "{{ CORTEXKEY }}"
|
||||
# # HTTP client configuration (SSL and proxy)
|
||||
# ws {}
|
||||
}
|
||||
}
|
||||
|
||||
# MISP
|
||||
# TheHive can connect to one or multiple MISP instances. Give each MISP
|
||||
# instance a name and specify the associated Authkey that must be used
|
||||
# to poll events, the case template that should be used by default when
|
||||
# importing events as well as the tags that must be added to cases upon
|
||||
# import.
|
||||
|
||||
# Prior to configuring the integration with a MISP instance, you must
|
||||
# enable the MISP connector. This will allow you to import events to
|
||||
# and/or export cases to the MISP instance(s).
|
||||
|
||||
#play.modules.enabled += connectors.misp.MispConnector
|
||||
|
||||
misp {
|
||||
# Interval between consecutive MISP event imports in hours (h) or
|
||||
# minutes (m).
|
||||
interval = 1h
|
||||
|
||||
#"MISP-SERVER-ID" {
|
||||
# # MISP connection configuration requires at least an url and a key. The key must
|
||||
# # be linked with a sync account on MISP.
|
||||
# url = ""
|
||||
# key = ""
|
||||
#
|
||||
# # Name of the case template in TheHive that shall be used to import
|
||||
# # MISP events as cases by default.
|
||||
# caseTemplate = "<Template_Name_goes_here>"
|
||||
#
|
||||
# # Optional tags to add to each observable imported from an event
|
||||
# # available on this instance.
|
||||
# tags = ["misp-server-id"]
|
||||
#
|
||||
# ## MISP event filters
|
||||
# # MISP filters is used to exclude events from the import.
|
||||
# # Filter criteria are:
|
||||
# # The number of attribute
|
||||
# max-attributes = 1000
|
||||
# # The size of its JSON representation
|
||||
# max-size = 1 MiB
|
||||
# # The age of the last publish date
|
||||
# max-age = 7 days
|
||||
# # Organization and tags
|
||||
# exclusion {
|
||||
# organisation = ["bad organisation", "other organisations"]
|
||||
# tags = ["tag1", "tag2"]
|
||||
# }
|
||||
#
|
||||
# ## HTTP client configuration (SSL and proxy)
|
||||
# # Truststore to use to validate the X.509 certificate of the MISP
|
||||
# # instance if the default truststore is not sufficient.
|
||||
# # Proxy can also be used
|
||||
# ws {
|
||||
# ssl.trustManager.stores = [ {
|
||||
# path = /path/to/truststore.jks
|
||||
# } ]
|
||||
# proxy {
|
||||
# host = proxy.mydomain.org
|
||||
# port = 3128
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# # MISP purpose defines if this instance can be used to import events (ImportOnly), export cases (ExportOnly) or both (ImportAndExport)
|
||||
# # Default is ImportAndExport
|
||||
# purpose = ImportAndExport
|
||||
#} ## <-- Uncomment to complete the configuration
|
||||
}
|
||||
webhooks {
|
||||
NodeRedWebHook {
|
||||
url = "http://{{ MANAGERIP }}:1880/thehive"
|
||||
}
|
||||
#SOCtopusWebHook {
|
||||
# url = "http://{{ MANAGERIP }}:7000/enrich"
|
||||
#}
|
||||
}
|
||||
@@ -1,148 +0,0 @@
|
||||
{%- set MANAGERIP = salt['pillar.get']('global:managerip', '') %}
|
||||
{%- set CORTEXPLAYSECRET = salt['pillar.get']('global:cortexplaysecret', '') %}
|
||||
|
||||
# Secret Key
|
||||
# The secret key is used to secure cryptographic functions.
|
||||
# WARNING: If you deploy your application on several servers, make sure to use the same key.
|
||||
play.http.secret.key="{{ CORTEXPLAYSECRET }}"
|
||||
play.http.context=/cortex/
|
||||
pidfile.path = "/dev/null"
|
||||
search.uri = "http://{{ MANAGERIP }}:9400"
|
||||
|
||||
# Elasticsearch
|
||||
search {
|
||||
# Name of the index
|
||||
index = cortex
|
||||
# Name of the Elasticsearch cluster
|
||||
cluster = thehive
|
||||
# Address of the Elasticsearch instance
|
||||
host = ["{{ MANAGERIP }}:9500"]
|
||||
# Scroll keepalive
|
||||
keepalive = 1m
|
||||
# Size of the page for scroll
|
||||
pagesize = 50
|
||||
# Number of shards
|
||||
nbshards = 5
|
||||
# Number of replicas
|
||||
nbreplicas = 0
|
||||
# Arbitrary settings
|
||||
settings {
|
||||
# Maximum number of nested fields
|
||||
mapping.nested_fields.limit = 100
|
||||
}
|
||||
|
||||
## Authentication configuration
|
||||
#search.username = ""
|
||||
#search.password = ""
|
||||
|
||||
## SSL configuration
|
||||
#search.keyStore {
|
||||
# path = "/path/to/keystore"
|
||||
# type = "JKS" # or PKCS12
|
||||
# password = "keystore-password"
|
||||
#}
|
||||
#search.trustStore {
|
||||
# path = "/path/to/trustStore"
|
||||
# type = "JKS" # or PKCS12
|
||||
# password = "trustStore-password"
|
||||
#}
|
||||
}
|
||||
|
||||
## Cache
|
||||
#
|
||||
# If an analyzer is executed against the same observable, the previous report can be returned without re-executing the
|
||||
# analyzer. The cache is used only if the second job occurs within cache.job (the default is 10 minutes).
|
||||
cache.job = 10 minutes
|
||||
|
||||
## Authentication
|
||||
auth {
|
||||
# "provider" parameter contains the authentication provider(s). It can be multi-valued, which is useful
|
||||
# for migration.
|
||||
# The available auth types are:
|
||||
# - services.LocalAuthSrv : passwords are stored in the user entity within ElasticSearch). No
|
||||
# configuration are required.
|
||||
# - ad : use ActiveDirectory to authenticate users. The associated configuration shall be done in
|
||||
# the "ad" section below.
|
||||
# - ldap : use LDAP to authenticate users. The associated configuration shall be done in the
|
||||
# "ldap" section below.
|
||||
provider = [local]
|
||||
|
||||
ad {
|
||||
# The Windows domain name in DNS format. This parameter is required if you do not use
|
||||
# 'serverNames' below.
|
||||
#domainFQDN = "mydomain.local"
|
||||
|
||||
# Optionally you can specify the host names of the domain controllers instead of using 'domainFQDN
|
||||
# above. If this parameter is not set, TheHive uses 'domainFQDN'.
|
||||
#serverNames = [ad1.mydomain.local, ad2.mydomain.local]
|
||||
|
||||
# The Windows domain name using short format. This parameter is required.
|
||||
#domainName = "MYDOMAIN"
|
||||
|
||||
# If 'true', use SSL to connect to the domain controller.
|
||||
#useSSL = true
|
||||
}
|
||||
|
||||
ldap {
|
||||
# The LDAP server name or address. The port can be specified using the 'host:port'
|
||||
# syntax. This parameter is required if you don't use 'serverNames' below.
|
||||
#serverName = "ldap.mydomain.local:389"
|
||||
|
||||
# If you have multiple LDAP servers, use the multi-valued setting 'serverNames' instead.
|
||||
#serverNames = [ldap1.mydomain.local, ldap2.mydomain.local]
|
||||
|
||||
# Account to use to bind to the LDAP server. This parameter is required.
|
||||
#bindDN = "cn=thehive,ou=services,dc=mydomain,dc=local"
|
||||
|
||||
# Password of the binding account. This parameter is required.
|
||||
#bindPW = "***secret*password***"
|
||||
|
||||
# Base DN to search users. This parameter is required.
|
||||
#baseDN = "ou=users,dc=mydomain,dc=local"
|
||||
|
||||
# Filter to search user in the directory server. Please note that {0} is replaced
|
||||
# by the actual user name. This parameter is required.
|
||||
#filter = "(cn={0})"
|
||||
|
||||
# If 'true', use SSL to connect to the LDAP directory server.
|
||||
#useSSL = true
|
||||
}
|
||||
}
|
||||
|
||||
## ANALYZERS
|
||||
#
|
||||
analyzer {
|
||||
# Absolute path where you have pulled the Cortex-Analyzers repository.
|
||||
path = ["/Cortex-Analyzers/analyzers"]
|
||||
|
||||
# Sane defaults. Do not change unless you know what you are doing.
|
||||
fork-join-executor {
|
||||
|
||||
# Min number of threads available for analysis.
|
||||
parallelism-min = 2
|
||||
|
||||
# Parallelism (threads) ... ceil(available processors * factor).
|
||||
parallelism-factor = 2.0
|
||||
|
||||
# Max number of threads available for analysis.
|
||||
parallelism-max = 4
|
||||
}
|
||||
}
|
||||
|
||||
## RESPONDERS
|
||||
##
|
||||
responder {
|
||||
# Directory that holds responders
|
||||
urls = ["/Cortex-Analyzers/responders", "/custom-responders"]
|
||||
|
||||
fork-join-executor {
|
||||
# Min number of threads available for analyze
|
||||
parallelism-min = 2
|
||||
# Parallelism (threads) ... ceil(available processors * factor)
|
||||
parallelism-factor = 2.0
|
||||
# Max number of threads available for analyze
|
||||
parallelism-max = 4
|
||||
}
|
||||
}
|
||||
|
||||
# It's the end my friend. Happy hunting!
|
||||
@@ -1,17 +0,0 @@
|
||||
cluster.name: thehive
|
||||
network.host: 0.0.0.0
|
||||
discovery.zen.minimum_master_nodes: 1
|
||||
# This is a test -- if this is here, then the volume is mounted correctly.
|
||||
path.logs: /var/log/elasticsearch
|
||||
action.destructive_requires_name: true
|
||||
discovery.type: single-node
|
||||
script.allowed_types: inline
|
||||
transport.bind_host: 0.0.0.0
|
||||
transport.publish_host: 0.0.0.0
|
||||
transport.publish_port: 9500
|
||||
http.host: 0.0.0.0
|
||||
http.port: 9400
|
||||
transport.tcp.port: 9500
|
||||
transport.host: 0.0.0.0
|
||||
thread_pool.search.queue_size: 100000
|
||||
thread_pool.write.queue_size: 100000
|
||||
@@ -1,20 +0,0 @@
|
||||
status = error
|
||||
#appender.console.type = Console
|
||||
#appender.console.name = console
|
||||
#appender.console.layout.type = PatternLayout
|
||||
#appender.console.layout.pattern = [%d{ISO8601}][%-5p][%-25c{1.}] %marker%m%n
|
||||
#rootLogger.level = info
|
||||
#rootLogger.appenderRef.console.ref = console
|
||||
# This is a test -- if this here, then the volume is mounted correctly.
|
||||
appender.rolling.type = RollingFile
|
||||
appender.rolling.name = rolling
|
||||
appender.rolling.fileName = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}.log
|
||||
appender.rolling.layout.type = PatternLayout
|
||||
appender.rolling.layout.pattern = [%d{ISO8601}][%-5p][%-25c] %.10000m%n
|
||||
appender.rolling.filePattern = ${sys:es.logs.base_path}${sys:file.separator}${sys:es.logs.cluster_name}-%d{yyyy-MM-dd}.log
|
||||
appender.rolling.policies.type = Policies
|
||||
appender.rolling.policies.time.type = TimeBasedTriggeringPolicy
|
||||
appender.rolling.policies.time.interval = 1
|
||||
appender.rolling.policies.time.modulate = true
|
||||
rootLogger.level = info
|
||||
rootLogger.appenderRef.rolling.ref = rolling
|
||||
@@ -1,180 +0,0 @@
|
||||
{% from 'allowed_states.map.jinja' import allowed_states %}
|
||||
{% if sls in allowed_states %}
|
||||
|
||||
{% set MANAGERIP = salt['pillar.get']('manager:mainip', '') %}
|
||||
{% set VERSION = salt['pillar.get']('global:soversion', 'HH1.2.2') %}
|
||||
{% set IMAGEREPO = salt['pillar.get']('global:imagerepo') %}
|
||||
{% set MANAGER = salt['grains.get']('master') %}
|
||||
thehiveconfdir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/thehive/etc
|
||||
- makedirs: True
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
thehivelogdir:
|
||||
file.directory:
|
||||
- name: /opt/so/log/thehive
|
||||
- makedirs: True
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
thehiveconf:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/thehive/etc
|
||||
- source: salt://thehive/etc
|
||||
- user: 939
|
||||
- group: 939
|
||||
- template: jinja
|
||||
|
||||
cortexconfdir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/cortex
|
||||
- makedirs: True
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
cortexlogdir:
|
||||
file.directory:
|
||||
- name: /opt/so/log/cortex
|
||||
- makedirs: True
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
cortexconf:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/cortex
|
||||
- source: salt://thehive/etc
|
||||
- user: 939
|
||||
- group: 939
|
||||
- template: jinja
|
||||
|
||||
cortexanalyzers:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/cortex/custom-analyzers
|
||||
- user: 939
|
||||
- group: 939
|
||||
- template: jinja
|
||||
|
||||
cortexresponders:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/cortex/custom-responders
|
||||
- user: 939
|
||||
- group: 939
|
||||
- template: jinja
|
||||
|
||||
# Install Elasticsearch
|
||||
|
||||
# Made directory for ES data to live in
|
||||
thehiveesdata:
|
||||
file.directory:
|
||||
- name: /nsm/thehive/esdata
|
||||
- makedirs: True
|
||||
- user: 939
|
||||
- group: 939
|
||||
|
||||
thehive_elasticsearch_yml:
|
||||
file.exists:
|
||||
- name: /opt/so/conf/thehive/etc/es/elasticsearch.yml
|
||||
|
||||
log4j2_properties:
|
||||
file.exists:
|
||||
- name: /opt/so/conf/thehive/etc/es/log4j2.properties
|
||||
|
||||
so-thehive-es:
|
||||
docker_container.running:
|
||||
- image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive-es:{{ VERSION }}
|
||||
- hostname: so-thehive-es
|
||||
- name: so-thehive-es
|
||||
- user: 939
|
||||
- interactive: True
|
||||
- tty: True
|
||||
- binds:
|
||||
- /nsm/thehive/esdata:/usr/share/elasticsearch/data:rw
|
||||
- /opt/so/conf/thehive/etc/es/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
||||
- /opt/so/conf/thehive/etc/es/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- /opt/so/log/thehive:/var/log/elasticsearch:rw
|
||||
- environment:
|
||||
- ES_JAVA_OPTS=-Xms512m -Xmx512m -Dlog4j2.formatMsgNoLookups=true
|
||||
- port_bindings:
|
||||
- 0.0.0.0:9400:9400
|
||||
- 0.0.0.0:9500:9500
|
||||
- require:
|
||||
- file: thehive_elasticsearch_yml
|
||||
- file: log4j2_properties
|
||||
|
||||
append_so-thehive-es_so-status.conf:
|
||||
file.append:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- text: so-thehive-es
|
||||
|
||||
cortex_application_conf:
|
||||
file.exists:
|
||||
- name: /opt/so/conf/thehive/etc/cortex-application.conf
|
||||
|
||||
application_conf:
|
||||
file.exists:
|
||||
- name: /opt/so/conf/thehive/etc/application.conf
|
||||
|
||||
# Install Cortex
|
||||
so-cortex:
|
||||
docker_container.running:
|
||||
- image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive-cortex:{{ VERSION }}
|
||||
- hostname: so-cortex
|
||||
- name: so-cortex
|
||||
- user: 939
|
||||
- binds:
|
||||
- /opt/so/conf/thehive/etc/cortex-application.conf:/opt/cortex/conf/application.conf:ro
|
||||
- /opt/so/conf/cortex/custom-analyzers:/custom-analyzers:ro
|
||||
- /opt/so/conf/cortex/custom-responders:/custom-responders:ro
|
||||
- port_bindings:
|
||||
- 0.0.0.0:9001:9001
|
||||
- require:
|
||||
- file: cortex_application_conf
|
||||
|
||||
append_so-cortex_so-status.conf:
|
||||
file.append:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- text: so-cortex
|
||||
|
||||
cortexscript:
|
||||
cmd.script:
|
||||
- source: salt://thehive/scripts/cortex_init
|
||||
- cwd: /opt/so
|
||||
- template: jinja
|
||||
- hide_output: False
|
||||
|
||||
so-thehive:
|
||||
docker_container.running:
|
||||
- image: {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-thehive:{{ VERSION }}
|
||||
- environment:
|
||||
- ELASTICSEARCH_HOST={{ MANAGERIP }}
|
||||
- hostname: so-thehive
|
||||
- name: so-thehive
|
||||
- user: 939
|
||||
- binds:
|
||||
- /opt/so/conf/thehive/etc/application.conf:/opt/thehive/conf/application.conf:ro
|
||||
- port_bindings:
|
||||
- 0.0.0.0:9000:9000
|
||||
- require:
|
||||
- file: application_conf
|
||||
|
||||
append_so-thehive_so-status.conf:
|
||||
file.append:
|
||||
- name: /opt/so/conf/so-status/so-status.conf
|
||||
- text: so-thehive
|
||||
|
||||
thehivescript:
|
||||
cmd.script:
|
||||
- source: salt://thehive/scripts/hive_init
|
||||
- cwd: /opt/so
|
||||
- template: jinja
|
||||
- hide_output: False
|
||||
|
||||
{% else %}
|
||||
|
||||
{{sls}}_state_not_allowed:
|
||||
test.fail_without_changes:
|
||||
- name: {{sls}}_state_not_allowed
|
||||
|
||||
{% endif %}
|
||||
@@ -1,75 +0,0 @@
|
||||
#!/bin/bash
|
||||
# {%- set MANAGERIP = salt['pillar.get']('global:managerip', '') %}
|
||||
# {%- set CORTEXUSER = salt['pillar.get']('global:cortexuser', 'cortexadmin') %}
|
||||
# {%- set CORTEXPASSWORD = salt['pillar.get']('global:cortexpassword', 'cortexchangeme') %}
|
||||
# {%- set CORTEXKEY = salt['pillar.get']('global:cortexkey', '') %}
|
||||
# {%- set CORTEXORGNAME = salt['pillar.get']('global:cortexorgname', '') %}
|
||||
# {%- set CORTEXORGUSER = salt['pillar.get']('global:cortexorguser', 'soadmin') %}
|
||||
# {%- set CORTEXORGUSERKEY = salt['pillar.get']('global:cortexorguserkey', '') %}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
default_salt_dir=/opt/so/saltstack/default
|
||||
|
||||
cortex_clean(){
|
||||
sed -i '/^ cortexuser:/d' /opt/so/saltstack/local/pillar/global.sls
|
||||
sed -i '/^ cortexpassword:/d' /opt/so/saltstack/local/pillar/global.sls
|
||||
sed -i '/^ cortexorguser:/d' /opt/so/saltstack/local/pillar/global.sls
|
||||
}
|
||||
|
||||
cortex_init(){
|
||||
CORTEX_URL="http://{{MANAGERIP}}:9001/cortex/"
|
||||
CORTEX_API_URL="${CORTEX_URL}api"
|
||||
CORTEX_USER="{{CORTEXUSER}}"
|
||||
CORTEX_PASSWORD="{{CORTEXPASSWORD}}"
|
||||
CORTEX_KEY="{{CORTEXKEY}}"
|
||||
CORTEX_ORG_NAME="{{CORTEXORGNAME}}"
|
||||
CORTEX_ORG_DESC="{{CORTEXORGNAME}} organization created by Security Onion setup"
|
||||
CORTEX_ORG_USER="{{CORTEXORGUSER}}"
|
||||
CORTEX_ORG_USER_KEY="{{CORTEXORGUSERKEY}}"
|
||||
SOCTOPUS_CONFIG="$default_salt_dir/salt/soctopus/files/SOCtopus.conf"
|
||||
|
||||
if wait_for_web_response $CORTEX_URL "Cortex" 120; then
|
||||
# Migrate DB
|
||||
curl -sk -XPOST -L "$CORTEX_API_URL/maintenance/migrate"
|
||||
|
||||
# Create intial Cortex superadmin
|
||||
curl -sk -L "$CORTEX_API_URL/user" -H "Content-Type: application/json" -d "{\"login\" : \"$CORTEX_USER\",\"name\" : \"$CORTEX_USER\",\"roles\" : [\"superadmin\"],\"preferences\" : \"{}\",\"password\" : \"$CORTEX_PASSWORD\", \"key\": \"$CORTEX_KEY\"}"
|
||||
|
||||
# Create user-supplied org
|
||||
curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "$CORTEX_API_URL/organization" -d "{ \"name\": \"$CORTEX_ORG_NAME\",\"description\": \"$CORTEX_ORG_DESC\",\"status\": \"Active\"}"
|
||||
|
||||
# Create user-supplied org user
|
||||
curl -sk -XPOST -H "Authorization: Bearer $CORTEX_KEY" -H "Content-Type: application/json" -L "$CORTEX_API_URL/user" -d "{\"name\": \"$CORTEX_ORG_USER\",\"roles\": [\"read\",\"analyze\",\"orgadmin\"],\"organization\": \"$CORTEX_ORG_NAME\",\"login\": \"$CORTEX_ORG_USER\",\"key\": \"$CORTEX_ORG_USER_KEY\" }"
|
||||
|
||||
# Enable URLScan.io Analyzer
|
||||
curl -sv -k -XPOST -H "Authorization: Bearer $CORTEX_ORG_USER_KEY" -H "Content-Type: application/json" -L "$CORTEX_API_URL/organization/analyzer/Urlscan_io_Search_0_1_0" -d '{"name":"Urlscan_io_Search_0_1_0","configuration":{"auto_extract_artifacts":false,"check_tlp":true,"max_tlp":2}}'
|
||||
|
||||
# Enable Cert PassiveDNS Analyzer
|
||||
curl -sv -k -XPOST -H "Authorization: Bearer $CORTEX_ORG_USER_KEY" -H "Content-Type: application/json" -L "$CORTEX_API_URL/organization/analyzer/CERTatPassiveDNS_2_0" -d '{"name":"CERTatPassiveDNS_2_0","configuration":{"auto_extract_artifacts":false,"check_tlp":true,"max_tlp":2, "limit": 100}}'
|
||||
|
||||
# Revoke $CORTEX_USER key
|
||||
curl -sk -XDELETE -H "Authorization: Bearer $CORTEX_KEY" -L "$CORTEX_API_URL/user/$CORTEX_USER/key"
|
||||
|
||||
# Update SOCtopus config with apikey value
|
||||
#sed -i "s/cortex_key = .*/cortex_key = $CORTEX_KEY/" $SOCTOPUS_CONFIG
|
||||
|
||||
touch /opt/so/state/cortex.txt
|
||||
else
|
||||
echo "We experienced an issue connecting to Cortex!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -f /opt/so/state/cortex.txt ]; then
|
||||
cortex_clean
|
||||
exit 0
|
||||
else
|
||||
if wait_for_web_response http://{{MANAGERIP}}:9400/_cluster/health '"status":"green"' 120; then
|
||||
cortex_init
|
||||
cortex_clean
|
||||
else
|
||||
echo "TheHive Elasticsearch server is not ready; unable to proceed with Cortex init."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
@@ -1,53 +0,0 @@
|
||||
#!/bin/bash
|
||||
# {%- set MANAGERIP = salt['pillar.get']('global:managerip', '') %}
|
||||
# {%- set THEHIVEUSER = salt['pillar.get']('global:hiveuser', 'hiveadmin') %}
|
||||
# {%- set THEHIVEPASSWORD = salt['pillar.get']('global:hivepassword', 'hivechangeme') %}
|
||||
# {%- set THEHIVEKEY = salt['pillar.get']('global:hivekey', '') %}
|
||||
|
||||
. /usr/sbin/so-common
|
||||
|
||||
thehive_clean(){
|
||||
sed -i '/^ hiveuser:/d' /opt/so/saltstack/local/pillar/global.sls
|
||||
sed -i '/^ hivepassword:/d' /opt/so/saltstack/local/pillar/global.sls
|
||||
}
|
||||
|
||||
thehive_init(){
|
||||
THEHIVE_URL="http://{{MANAGERIP}}:9000/thehive/"
|
||||
THEHIVE_API_URL="${THEHIVE_URL}api"
|
||||
THEHIVE_USER="{{THEHIVEUSER}}"
|
||||
THEHIVE_PASSWORD="{{THEHIVEPASSWORD}}"
|
||||
THEHIVE_KEY="{{THEHIVEKEY}}"
|
||||
SOCTOPUS_CONFIG="/opt/so/saltstack/salt/soctopus/files/SOCtopus.conf"
|
||||
|
||||
echo -n "Waiting for TheHive..."
|
||||
if wait_for_web_response $THEHIVE_URL "TheHive" 120; then
|
||||
# Migrate DB
|
||||
curl -sk -XPOST -L "$THEHIVE_API_URL/maintenance/migrate"
|
||||
|
||||
# Create intial TheHive user
|
||||
curl -sk -L "$THEHIVE_API_URL/user" -H "Content-Type: application/json" -d "{\"login\" : \"$THEHIVE_USER\",\"name\" : \"$THEHIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$THEHIVE_PASSWORD\", \"key\": \"$THEHIVE_KEY\"}"
|
||||
|
||||
# Pre-load custom fields
|
||||
#
|
||||
# reputation
|
||||
curl -sk -L "$THEHIVE_API_URL/list/custom_fields" -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -d "{\"value\":{\"name\": \"reputation\", \"reference\": \"reputation\", \"description\": \"This field provides an overall reputation status for an address/domain.\", \"type\": \"string\", \"options\": []}}"
|
||||
|
||||
touch /opt/so/state/thehive.txt
|
||||
else
|
||||
echo "We experienced an issue connecting to TheHive!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
if [ -f /opt/so/state/thehive.txt ]; then
|
||||
thehive_clean
|
||||
exit 0
|
||||
else
|
||||
if wait_for_web_response http://{{MANAGERIP}}:9400/_cluster/health '"status":"green"' 120; then
|
||||
thehive_init
|
||||
thehive_clean
|
||||
else
|
||||
echo "TheHive Elasticsearch server is not ready; unable to proceed with TheHive init."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
13
salt/top.sls
13
salt/top.sls
@@ -1,6 +1,5 @@
|
||||
{% set ZEEKVER = salt['pillar.get']('global:mdengine', '') %}
|
||||
{% set WAZUH = salt['pillar.get']('global:wazuh', '0') %}
|
||||
{% set THEHIVE = salt['pillar.get']('manager:thehive', '0') %}
|
||||
{% set PLAYBOOK = salt['pillar.get']('manager:playbook', '0') %}
|
||||
{% set FREQSERVER = salt['pillar.get']('manager:freq', '0') %}
|
||||
{% set DOMAINSTATS = salt['pillar.get']('manager:domainstats', '0') %}
|
||||
@@ -143,9 +142,6 @@ base:
|
||||
- utility
|
||||
- schedule
|
||||
- soctopus
|
||||
{%- if THEHIVE != 0 %}
|
||||
- thehive
|
||||
{%- endif %}
|
||||
{%- if PLAYBOOK != 0 %}
|
||||
- playbook
|
||||
- redis
|
||||
@@ -210,9 +206,6 @@ base:
|
||||
- fleet.install_package
|
||||
{%- endif %}
|
||||
- soctopus
|
||||
{%- if THEHIVE != 0 %}
|
||||
- thehive
|
||||
{%- endif %}
|
||||
{%- if PLAYBOOK != 0 %}
|
||||
- playbook
|
||||
{%- endif %}
|
||||
@@ -284,9 +277,6 @@ base:
|
||||
- utility
|
||||
- schedule
|
||||
- soctopus
|
||||
{%- if THEHIVE != 0 %}
|
||||
- thehive
|
||||
{%- endif %}
|
||||
{%- if PLAYBOOK != 0 %}
|
||||
- playbook
|
||||
{%- endif %}
|
||||
@@ -376,9 +366,6 @@ base:
|
||||
- fleet.install_package
|
||||
{%- endif %}
|
||||
- soctopus
|
||||
{%- if THEHIVE != 0 %}
|
||||
- thehive
|
||||
{%- endif %}
|
||||
{%- if PLAYBOOK != 0 %}
|
||||
- playbook
|
||||
{%- endif %}
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
#STRELKA=1
|
||||
#THEHIVE=1
|
||||
#THEHIVE=0
|
||||
#WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
#STRELKA=1
|
||||
#THEHIVE=1
|
||||
#THEHIVE=0
|
||||
#WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -72,7 +72,7 @@ SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
SURIPINS=(2 3)
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -71,7 +71,7 @@ PATCHSCHEDULENAME=auto
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -72,7 +72,7 @@ SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
# STRELKA=1
|
||||
SURIPINS=(2 3)
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
# WEBUSER=onionuser@somewhere.invalid
|
||||
# WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
# SOREMOTEPASS1=onionuser
|
||||
# SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
# SOREMOTEPASS1=onionuser
|
||||
# SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
# SOREMOTEPASS1=onionuser
|
||||
# SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
# SOREMOTEPASS1=onionuser
|
||||
# SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
# SOREMOTEPASS1=onionuser
|
||||
# SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
# THEHIVE=1
|
||||
# THEHIVE=0
|
||||
# WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -70,7 +70,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -69,7 +69,7 @@ RULESETUP=ETOPEN
|
||||
SOREMOTEPASS1=onionuser
|
||||
SOREMOTEPASS2=onionuser
|
||||
STRELKA=1
|
||||
THEHIVE=1
|
||||
THEHIVE=0
|
||||
WAZUH=1
|
||||
WEBUSER=onionuser@somewhere.invalid
|
||||
WEBPASSWD1=0n10nus3r
|
||||
|
||||
@@ -153,18 +153,6 @@ check_admin_pass() {
|
||||
check_pass_match "$ADMINPASS1" "$ADMINPASS2" "APMATCH"
|
||||
}
|
||||
|
||||
check_hive_init() {
|
||||
|
||||
wait_for_file /opt/so/state/thehive.txt 20 5
|
||||
local return_val=$?
|
||||
if [[ $return_val -ne 0 ]]; then
|
||||
return $return_val
|
||||
fi
|
||||
|
||||
docker stop so-thehive
|
||||
docker rm so-thehive
|
||||
}
|
||||
|
||||
check_manager_state() {
|
||||
echo "Checking state of manager services. This may take a moment..."
|
||||
retry 2 15 "__check_so_status" >> $setup_log 2>&1 && retry 2 15 "__check_salt_master" >> $setup_log 2>&1 && return 0 || return 1
|
||||
@@ -1499,13 +1487,6 @@ generate_passwords(){
|
||||
FLEETSAPASS=$(get_random_value)
|
||||
FLEETJWT=$(get_random_value)
|
||||
GRAFANAPASS=$(get_random_value)
|
||||
if [[ "$THEHIVE" == "1" ]]; then
|
||||
HIVEKEY=$(get_random_value)
|
||||
HIVEPLAYSECRET=$(get_random_value)
|
||||
CORTEXKEY=$(get_random_value)
|
||||
CORTEXORGUSERKEY=$(get_random_value)
|
||||
CORTEXPLAYSECRET=$(get_random_value)
|
||||
fi
|
||||
SENSORONIKEY=$(get_random_value)
|
||||
KRATOSKEY=$(get_random_value)
|
||||
}
|
||||
@@ -1673,7 +1654,6 @@ manager_pillar() {
|
||||
" es_port: $node_es_port"\
|
||||
" grafana: $GRAFANA"\
|
||||
" osquery: $OSQUERY"\
|
||||
" thehive: $THEHIVE"\
|
||||
" playbook: $PLAYBOOK"\
|
||||
""\
|
||||
"elasticsearch:"\
|
||||
@@ -1759,22 +1739,6 @@ manager_global() {
|
||||
" airgap: False"\ >> "$global_pillar"
|
||||
fi
|
||||
|
||||
# Check if TheHive is enabled. If so, add creds and other details
|
||||
if [[ "$THEHIVE" == "1" ]]; then
|
||||
printf '%s\n'\
|
||||
" hiveuser: '$WEBUSER'"\
|
||||
" hivepassword: '$WEBPASSWD1'"\
|
||||
" hivekey: '$HIVEKEY'"\
|
||||
" hiveplaysecret: '$HIVEPLAYSECRET'"\
|
||||
" cortexuser: '$WEBUSER'"\
|
||||
" cortexpassword: '$WEBPASSWD1'"\
|
||||
" cortexkey: '$CORTEXKEY'"\
|
||||
" cortexorgname: 'SecurityOnion'"\
|
||||
" cortexorguser: 'soadmin'"\
|
||||
" cortexorguserkey: '$CORTEXORGUSERKEY'"\
|
||||
" cortexplaysecret: '$CORTEXPLAYSECRET'" >> "$global_pillar"
|
||||
fi
|
||||
|
||||
# Continue adding other details
|
||||
printf '%s\n'\
|
||||
" fleet_custom_hostname: "\
|
||||
|
||||
@@ -440,7 +440,6 @@ if [[ $is_import ]]; then
|
||||
GRAFANA=${GRAFANA:-0}
|
||||
OSQUERY=${OSQUERY:-0}
|
||||
WAZUH=${WAZUH:-0}
|
||||
THEHIVE=${THEHIVE:-0}
|
||||
PLAYBOOK=${PLAYBOOK:-0}
|
||||
fi
|
||||
|
||||
@@ -937,11 +936,6 @@ echo "1" > /root/accept_changes
|
||||
salt-call state.apply -l info wazuh >> $setup_log 2>&1
|
||||
fi
|
||||
|
||||
if [[ "$THEHIVE" = 1 ]]; then
|
||||
set_progress_str 80 "$(print_salt_state_apply 'thehive')"
|
||||
salt-call state.apply -l info thehive >> $setup_log 2>&1
|
||||
fi
|
||||
|
||||
if [[ "$STRELKA" = 1 ]]; then
|
||||
if [[ $is_sensor ]]; then
|
||||
set_progress_str 81 "$(print_salt_state_apply 'strelka')"
|
||||
@@ -1018,11 +1012,6 @@ else
|
||||
generate_repo_tarball >> "$setup_log" 2>&1
|
||||
fi
|
||||
|
||||
if [[ $THEHIVE == 1 ]]; then
|
||||
set_progress_str 99 'Waiting for TheHive to start up'
|
||||
check_hive_init >> $setup_log 2>&1
|
||||
fi
|
||||
|
||||
if [[ -n $LEARN_LOGSCAN_ENABLE ]]; then
|
||||
set_progress_str 99 'Enabling logscan'
|
||||
so-learn enable logscan --apply >> $setup_log 2>&1
|
||||
|
||||
Reference in New Issue
Block a user