mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Rename Hive to TheHive in several places
This commit is contained in:
@@ -1,64 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
{% set MASTERIP = salt['pillar.get']('static:masterip', '') %}
|
|
||||||
{%- set HIVEUSER = salt['pillar.get']('static:hiveuser', '') %}
|
|
||||||
{%- set HIVEPASSWORD = salt['pillar.get']('static:hivepassword', '') %}
|
|
||||||
{%- set HIVEKEY = salt['pillar.get']('static:hivekey', '') %}
|
|
||||||
|
|
||||||
hive_init(){
|
|
||||||
sleep 120
|
|
||||||
HIVE_IP="{{MASTERIP}}"
|
|
||||||
HIVE_USER="{{HIVEUSER}}"
|
|
||||||
HIVE_PASSWORD="{{HIVEPASSWORD}}"
|
|
||||||
HIVE_KEY="{{HIVEKEY}}"
|
|
||||||
SOCTOPUS_CONFIG="/opt/so/saltstack/salt/soctopus/files/SOCtopus.conf"
|
|
||||||
|
|
||||||
echo -n "Waiting for TheHive..."
|
|
||||||
COUNT=0
|
|
||||||
HIVE_CONNECTED="no"
|
|
||||||
while [[ "$COUNT" -le 240 ]]; do
|
|
||||||
curl --output /dev/null --silent --head --fail -k "https://$HIVE_IP/thehive"
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
HIVE_CONNECTED="yes"
|
|
||||||
echo "connected!"
|
|
||||||
break
|
|
||||||
else
|
|
||||||
((COUNT+=1))
|
|
||||||
sleep 1
|
|
||||||
echo -n "."
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
if [ "$HIVE_CONNECTED" == "yes" ]; then
|
|
||||||
|
|
||||||
# Migrate DB
|
|
||||||
curl -v -k -XPOST "https://$HIVE_IP:/thehive/api/maintenance/migrate"
|
|
||||||
|
|
||||||
# Create intial TheHive user
|
|
||||||
curl -v -k "https://$HIVE_IP/thehive/api/user" -H "Content-Type: application/json" -d "{\"login\" : \"$HIVE_USER\",\"name\" : \"$HIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$HIVE_PASSWORD\", \"key\": \"$HIVE_KEY\"}"
|
|
||||||
|
|
||||||
# Pre-load custom fields
|
|
||||||
#
|
|
||||||
# reputation
|
|
||||||
curl -v -k "https://$HIVE_IP/thehive/api/list/custom_fields" -H "Authorization: Bearer $HIVE_KEY" -H "Content-Type: application/json" -d "{\"value\":{\"name\": \"reputation\", \"reference\": \"reputation\", \"description\": \"This field provides an overall reputation status for an address/domain.\", \"type\": \"string\", \"options\": []}}"
|
|
||||||
|
|
||||||
|
|
||||||
touch /opt/so/state/thehive.txt
|
|
||||||
else
|
|
||||||
echo "We experienced an issue connecting to TheHive!"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
if [ -f /opt/so/state/thehive.txt ]; then
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
rm -f garbage_file
|
|
||||||
while ! wget -O garbage_file {{MASTERIP}}:9400 2>/dev/null
|
|
||||||
do
|
|
||||||
echo "Waiting for Elasticsearch..."
|
|
||||||
rm -f garbage_file
|
|
||||||
sleep 1
|
|
||||||
done
|
|
||||||
rm -f garbage_file
|
|
||||||
sleep 5
|
|
||||||
hive_init
|
|
||||||
fi
|
|
||||||
@@ -12,7 +12,7 @@ search {
|
|||||||
# Name of the index
|
# Name of the index
|
||||||
index = the_hive
|
index = the_hive
|
||||||
# Name of the Elasticsearch cluster
|
# Name of the Elasticsearch cluster
|
||||||
cluster = hive
|
cluster = thehive
|
||||||
# Address of the Elasticsearch instance
|
# Address of the Elasticsearch instance
|
||||||
host = ["{{ MASTERIP }}:9500"]
|
host = ["{{ MASTERIP }}:9500"]
|
||||||
#search.uri = "http://{{ MASTERIP }}:9500"
|
#search.uri = "http://{{ MASTERIP }}:9500"
|
||||||
@@ -12,7 +12,7 @@ search {
|
|||||||
# Name of the index
|
# Name of the index
|
||||||
index = cortex
|
index = cortex
|
||||||
# Name of the Elasticsearch cluster
|
# Name of the Elasticsearch cluster
|
||||||
cluster = hive
|
cluster = thehive
|
||||||
# Address of the Elasticsearch instance
|
# Address of the Elasticsearch instance
|
||||||
host = ["{{ MASTERIP }}:9500"]
|
host = ["{{ MASTERIP }}:9500"]
|
||||||
# Scroll keepalive
|
# Scroll keepalive
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
cluster.name: "hive"
|
cluster.name: "thehive"
|
||||||
network.host: 0.0.0.0
|
network.host: 0.0.0.0
|
||||||
discovery.zen.minimum_master_nodes: 1
|
discovery.zen.minimum_master_nodes: 1
|
||||||
# This is a test -- if this is here, then the volume is mounted correctly.
|
# This is a test -- if this is here, then the volume is mounted correctly.
|
||||||
@@ -1,24 +1,24 @@
|
|||||||
{% set MASTERIP = salt['pillar.get']('master:mainip', '') %}
|
{% set MASTERIP = salt['pillar.get']('master:mainip', '') %}
|
||||||
{% set VERSION = salt['pillar.get']('static:soversion', 'HH1.2.2') %}
|
{% set VERSION = salt['pillar.get']('static:soversion', 'HH1.2.2') %}
|
||||||
{% set MASTER = salt['grains.get']('master') %}
|
{% set MASTER = salt['grains.get']('master') %}
|
||||||
hiveconfdir:
|
thehiveconfdir:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/conf/hive/etc
|
- name: /opt/so/conf/thehive/etc
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
|
|
||||||
hivelogdir:
|
thehivelogdir:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /opt/so/log/hive
|
- name: /opt/so/log/thehive
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
|
|
||||||
hiveconf:
|
thehiveconf:
|
||||||
file.recurse:
|
file.recurse:
|
||||||
- name: /opt/so/conf/hive/etc
|
- name: /opt/so/conf/thehive/etc
|
||||||
- source: salt://hive/thehive/etc
|
- source: salt://thehive/etc
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
- template: jinja
|
- template: jinja
|
||||||
@@ -40,7 +40,7 @@ cortexlogdir:
|
|||||||
cortexconf:
|
cortexconf:
|
||||||
file.recurse:
|
file.recurse:
|
||||||
- name: /opt/so/conf/cortex
|
- name: /opt/so/conf/cortex
|
||||||
- source: salt://hive/thehive/etc
|
- source: salt://thehive/etc
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
- template: jinja
|
- template: jinja
|
||||||
@@ -48,9 +48,9 @@ cortexconf:
|
|||||||
# Install Elasticsearch
|
# Install Elasticsearch
|
||||||
|
|
||||||
# Made directory for ES data to live in
|
# Made directory for ES data to live in
|
||||||
hiveesdata:
|
thehiveesdata:
|
||||||
file.directory:
|
file.directory:
|
||||||
- name: /nsm/hive/esdata
|
- name: /nsm/thehive/esdata
|
||||||
- makedirs: True
|
- makedirs: True
|
||||||
- user: 939
|
- user: 939
|
||||||
- group: 939
|
- group: 939
|
||||||
@@ -64,16 +64,16 @@ so-thehive-es:
|
|||||||
- interactive: True
|
- interactive: True
|
||||||
- tty: True
|
- tty: True
|
||||||
- binds:
|
- binds:
|
||||||
- /nsm/hive/esdata:/usr/share/elasticsearch/data:rw
|
- /nsm/thehive/esdata:/usr/share/elasticsearch/data:rw
|
||||||
- /opt/so/conf/hive/etc/es/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
- /opt/so/conf/thehive/etc/es/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
|
||||||
- /opt/so/conf/hive/etc/es/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
- /opt/so/conf/thehive/etc/es/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||||
- /opt/so/log/hive:/var/log/elasticsearch:rw
|
- /opt/so/log/thehive:/var/log/elasticsearch:rw
|
||||||
- environment:
|
- environment:
|
||||||
- http.host=0.0.0.0
|
- http.host=0.0.0.0
|
||||||
- http.port=9400
|
- http.port=9400
|
||||||
- transport.tcp.port=9500
|
- transport.tcp.port=9500
|
||||||
- transport.host=0.0.0.0
|
- transport.host=0.0.0.0
|
||||||
- cluster.name=hive
|
- cluster.name=thehive
|
||||||
- thread_pool.index.queue_size=100000
|
- thread_pool.index.queue_size=100000
|
||||||
- thread_pool.search.queue_size=100000
|
- thread_pool.search.queue_size=100000
|
||||||
- thread_pool.bulk.queue_size=100000
|
- thread_pool.bulk.queue_size=100000
|
||||||
@@ -90,13 +90,13 @@ so-cortex:
|
|||||||
- name: so-cortex
|
- name: so-cortex
|
||||||
- user: 939
|
- user: 939
|
||||||
- binds:
|
- binds:
|
||||||
- /opt/so/conf/hive/etc/cortex-application.conf:/opt/cortex/conf/application.conf:ro
|
- /opt/so/conf/thehive/etc/cortex-application.conf:/opt/cortex/conf/application.conf:ro
|
||||||
- port_bindings:
|
- port_bindings:
|
||||||
- 0.0.0.0:9001:9001
|
- 0.0.0.0:9001:9001
|
||||||
|
|
||||||
cortexscript:
|
cortexscript:
|
||||||
cmd.script:
|
cmd.script:
|
||||||
- source: salt://hive/thehive/scripts/cortex_init
|
- source: salt://thehive/scripts/cortex_init
|
||||||
- cwd: /opt/so
|
- cwd: /opt/so
|
||||||
- template: jinja
|
- template: jinja
|
||||||
|
|
||||||
@@ -109,12 +109,12 @@ so-thehive:
|
|||||||
- name: so-thehive
|
- name: so-thehive
|
||||||
- user: 939
|
- user: 939
|
||||||
- binds:
|
- binds:
|
||||||
- /opt/so/conf/hive/etc/application.conf:/opt/thehive/conf/application.conf:ro
|
- /opt/so/conf/thehive/etc/application.conf:/opt/thehive/conf/application.conf:ro
|
||||||
- port_bindings:
|
- port_bindings:
|
||||||
- 0.0.0.0:9000:9000
|
- 0.0.0.0:9000:9000
|
||||||
|
|
||||||
hivescript:
|
thehivescript:
|
||||||
cmd.script:
|
cmd.script:
|
||||||
- source: salt://hive/thehive/scripts/hive_init
|
- source: salt://thehive/scripts/hive_init
|
||||||
- cwd: /opt/so
|
- cwd: /opt/so
|
||||||
- template: jinja
|
- template: jinja
|
||||||
64
salt/thehive/scripts/hive_init
Executable file
64
salt/thehive/scripts/hive_init
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
{% set MASTERIP = salt['pillar.get']('static:masterip', '') %}
|
||||||
|
{%- set THEHIVEUSER = salt['pillar.get']('static:hiveuser', '') %}
|
||||||
|
{%- set THEHIVEPASSWORD = salt['pillar.get']('static:hivepassword', '') %}
|
||||||
|
{%- set THEHIVEKEY = salt['pillar.get']('static:hivekey', '') %}
|
||||||
|
|
||||||
|
thehive_init(){
|
||||||
|
sleep 120
|
||||||
|
THEHIVE_IP="{{MASTERIP}}"
|
||||||
|
THEHIVE_USER="{{THEHIVEUSER}}"
|
||||||
|
THEHIVE_PASSWORD="{{THEHIVEPASSWORD}}"
|
||||||
|
THEHIVE_KEY="{{THEHIVEKEY}}"
|
||||||
|
SOCTOPUS_CONFIG="/opt/so/saltstack/salt/soctopus/files/SOCtopus.conf"
|
||||||
|
|
||||||
|
echo -n "Waiting for TheHive..."
|
||||||
|
COUNT=0
|
||||||
|
THEHIVE_CONNECTED="no"
|
||||||
|
while [[ "$COUNT" -le 240 ]]; do
|
||||||
|
curl --output /dev/null --silent --head --fail -k "https://$THEHIVE_IP/thehive"
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
THEHIVE_CONNECTED="yes"
|
||||||
|
echo "connected!"
|
||||||
|
break
|
||||||
|
else
|
||||||
|
((COUNT+=1))
|
||||||
|
sleep 1
|
||||||
|
echo -n "."
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ "$THEHIVE_CONNECTED" == "yes" ]; then
|
||||||
|
|
||||||
|
# Migrate DB
|
||||||
|
curl -v -k -XPOST "https://$THEHIVE_IP:/thehive/api/maintenance/migrate"
|
||||||
|
|
||||||
|
# Create intial TheHive user
|
||||||
|
curl -v -k "https://$THEHIVE_IP/thehive/api/user" -H "Content-Type: application/json" -d "{\"login\" : \"$THEHIVE_USER\",\"name\" : \"$THEHIVE_USER\",\"roles\" : [\"read\",\"alert\",\"write\",\"admin\"],\"preferences\" : \"{}\",\"password\" : \"$THEHIVE_PASSWORD\", \"key\": \"$THEHIVE_KEY\"}"
|
||||||
|
|
||||||
|
# Pre-load custom fields
|
||||||
|
#
|
||||||
|
# reputation
|
||||||
|
curl -v -k "https://$THEHIVE_IP/thehive/api/list/custom_fields" -H "Authorization: Bearer $THEHIVE_KEY" -H "Content-Type: application/json" -d "{\"value\":{\"name\": \"reputation\", \"reference\": \"reputation\", \"description\": \"This field provides an overall reputation status for an address/domain.\", \"type\": \"string\", \"options\": []}}"
|
||||||
|
|
||||||
|
|
||||||
|
touch /opt/so/state/thehive.txt
|
||||||
|
else
|
||||||
|
echo "We experienced an issue connecting to TheHive!"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -f /opt/so/state/thehive.txt ]; then
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
rm -f garbage_file
|
||||||
|
while ! wget -O garbage_file {{MASTERIP}}:9400 2>/dev/null
|
||||||
|
do
|
||||||
|
echo "Waiting for Elasticsearch..."
|
||||||
|
rm -f garbage_file
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
rm -f garbage_file
|
||||||
|
sleep 5
|
||||||
|
thehive_init
|
||||||
|
fi
|
||||||
Reference in New Issue
Block a user