mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-14 13:12:49 +01:00
Compare commits
11 Commits
2.3.91
...
feature/us
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62c702e269 | ||
|
|
f10290246f | ||
|
|
c919f6bca0 | ||
|
|
51b421a165 | ||
|
|
86ff54e844 | ||
|
|
b8cb3f5815 | ||
|
|
381a51271f | ||
|
|
10500178d5 | ||
|
|
e81e66f40d | ||
|
|
f6bd74aadf | ||
|
|
322c2804fc |
@@ -1,6 +1,6 @@
|
||||
## Security Onion 2.3.91
|
||||
## Security Onion 2.3.80
|
||||
|
||||
Security Onion 2.3.91 is here!
|
||||
Security Onion 2.3.80 is here!
|
||||
|
||||
## Screenshots
|
||||
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
### 2.3.91 ISO image built on 2021/12/20
|
||||
### 2.3.80 ISO image built on 2021/09/27
|
||||
|
||||
|
||||
|
||||
### Download and Verify
|
||||
|
||||
2.3.91 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.3.91.iso
|
||||
2.3.80 ISO image:
|
||||
https://download.securityonion.net/file/securityonion/securityonion-2.3.80.iso
|
||||
|
||||
MD5: CD979038EC60318B7C7F8BA278A12D04
|
||||
SHA1: 9FB2AC07FCD24A4993B3F61FC2B2863510650520
|
||||
SHA256: BAA8BEF574ECCB9ADC326D736A00C00AAF940FC6AD68CF491FF1F0AB6C5BAA64
|
||||
MD5: 24F38563860416F4A8ABE18746913E14
|
||||
SHA1: F923C005F54EA2A17AB225ADA0DA46042707AAD9
|
||||
SHA256: 8E95D10AF664D9A406C168EC421D943CB23F0D0C1813C6C2DBA9B4E131984018
|
||||
|
||||
Signature for ISO image:
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.91.iso.sig
|
||||
https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.80.iso.sig
|
||||
|
||||
Signing key:
|
||||
https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/master/KEYS
|
||||
@@ -26,22 +26,22 @@ wget https://raw.githubusercontent.com/Security-Onion-Solutions/securityonion/ma
|
||||
|
||||
Download the signature file for the ISO:
|
||||
```
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.91.iso.sig
|
||||
wget https://github.com/Security-Onion-Solutions/securityonion/raw/master/sigs/securityonion-2.3.80.iso.sig
|
||||
```
|
||||
|
||||
Download the ISO image:
|
||||
```
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.91.iso
|
||||
wget https://download.securityonion.net/file/securityonion/securityonion-2.3.80.iso
|
||||
```
|
||||
|
||||
Verify the downloaded ISO image using the signature file:
|
||||
```
|
||||
gpg --verify securityonion-2.3.91.iso.sig securityonion-2.3.91.iso
|
||||
gpg --verify securityonion-2.3.80.iso.sig securityonion-2.3.80.iso
|
||||
```
|
||||
|
||||
The output should show "Good signature" and the Primary key fingerprint should match what's shown below:
|
||||
```
|
||||
gpg: Signature made Mon 20 Dec 2021 12:37:42 PM EST using RSA key ID FE507013
|
||||
gpg: Signature made Mon 27 Sep 2021 08:55:01 AM EDT using RSA key ID FE507013
|
||||
gpg: Good signature from "Security Onion Solutions, LLC <info@securityonionsolutions.com>"
|
||||
gpg: WARNING: This key is not certified with a trusted signature!
|
||||
gpg: There is no indication that the signature belongs to the owner.
|
||||
|
||||
@@ -2,6 +2,7 @@ base:
|
||||
'*':
|
||||
- patch.needs_restarting
|
||||
- logrotate
|
||||
- users
|
||||
|
||||
'*_eval or *_helixsensor or *_heavynode or *_sensor or *_standalone or *_import':
|
||||
- match: compound
|
||||
|
||||
2
pillar/users/init.sls
Normal file
2
pillar/users/init.sls
Normal file
@@ -0,0 +1,2 @@
|
||||
# users pillar goes in /opt/so/saltstack/local/pillar/users/init.sls
|
||||
# the users directory may need to be created under /opt/so/saltstack/local/pillar
|
||||
19
pillar/users/pillar.example
Normal file
19
pillar/users/pillar.example
Normal file
@@ -0,0 +1,19 @@
|
||||
users:
|
||||
sclapton:
|
||||
# required fields
|
||||
status: present
|
||||
# node_access determines which node types the user can access.
|
||||
# this can either be by grains.role or by final part of the minion id after the _
|
||||
node_access:
|
||||
- standalone
|
||||
- searchnode
|
||||
# optional fields
|
||||
fullname: Stevie Claptoon
|
||||
uid: 1001
|
||||
gid: 1001
|
||||
homephone: does not have a phone
|
||||
groups:
|
||||
- mygroup1
|
||||
- mygroup2
|
||||
- wheel # give sudo access
|
||||
|
||||
20
pillar/users/pillar.usage
Normal file
20
pillar/users/pillar.usage
Normal file
@@ -0,0 +1,20 @@
|
||||
users:
|
||||
sclapton:
|
||||
# required fields
|
||||
status: <present | absent>
|
||||
# node_access determines which node types the user can access.
|
||||
# this can either be by grains.role or by final part of the minion id after the _
|
||||
node_access:
|
||||
- standalone
|
||||
- searchnode
|
||||
# optional fields
|
||||
fullname: <string>
|
||||
uid: <integer>
|
||||
gid: <integer>
|
||||
roomnumber: <string>
|
||||
workphone: <string>
|
||||
homephone: <string>
|
||||
groups:
|
||||
- <string>
|
||||
- <string>
|
||||
- wheel # give sudo access
|
||||
@@ -23,6 +23,7 @@ import sys
|
||||
import argparse
|
||||
import re
|
||||
from lxml import etree as ET
|
||||
from xml.dom import minidom
|
||||
from datetime import datetime as dt
|
||||
from datetime import timezone as tz
|
||||
|
||||
@@ -78,15 +79,20 @@ def ip_prompt() -> str:
|
||||
|
||||
|
||||
def wazuh_enabled() -> bool:
|
||||
file = f'{LOCAL_SALT_DIR}/pillar/global.sls'
|
||||
with open(file, 'r') as pillar:
|
||||
if 'wazuh: 1' in pillar.read():
|
||||
return True
|
||||
for file in os.listdir(f'{LOCAL_SALT_DIR}/pillar'):
|
||||
with open(file, 'r') as pillar:
|
||||
if 'wazuh: 1' in pillar.read():
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def root_to_str(root: ET.ElementTree) -> str:
|
||||
return ET.tostring(root, encoding='unicode', method='xml', xml_declaration=False, pretty_print=True)
|
||||
xml_str = ET.tostring(root, encoding='unicode', method='xml').replace('\n', '')
|
||||
xml_str = re.sub(r'(?:(?<=>) *)', '', xml_str)
|
||||
xml_str = re.sub(r' -', '', xml_str)
|
||||
xml_str = re.sub(r' -->', ' -->', xml_str)
|
||||
dom = minidom.parseString(xml_str)
|
||||
return dom.toprettyxml(indent=" ")
|
||||
|
||||
|
||||
def add_wl(ip):
|
||||
@@ -118,7 +124,7 @@ def apply(role: str, ip: str) -> int:
|
||||
else:
|
||||
return cmd.returncode
|
||||
if cmd.returncode == 0:
|
||||
if wazuh_enabled() and role=='analyst':
|
||||
if wazuh_enabled and role=='analyst':
|
||||
try:
|
||||
add_wl(ip)
|
||||
print(f'Added whitelist entry for {ip} from {WAZUH_CONF}', file=sys.stderr)
|
||||
|
||||
@@ -16,7 +16,6 @@
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
. /usr/sbin/so-common
|
||||
export LC_CTYPE="en_US.UTF-8"
|
||||
|
||||
UPDATE_DIR=/tmp/sogh/securityonion
|
||||
DEFAULT_SALT_DIR=/opt/so/saltstack/default
|
||||
@@ -600,33 +599,18 @@ up_to_2.3.80() {
|
||||
|
||||
up_to_2.3.90() {
|
||||
for i in manager managersearch eval standalone; do
|
||||
echo "Checking for compgen match of /opt/so/saltstack/local/pillar/minions/*_$i.sls"
|
||||
if compgen -G "/opt/so/saltstack/local/pillar/minions/*_$i.sls"; then
|
||||
echo "Found compgen match for /opt/so/saltstack/local/pillar/minions/*_$i.sls"
|
||||
for f in $(compgen -G "/opt/so/saltstack/local/pillar/minions/*_$i.sls"); do
|
||||
if grep -qozP "^soc:\n.*es_index_patterns: '\*:so-\*,\*:endgame-\*'" "$f"; then
|
||||
echo "soc:es_index_patterns already present in $f"
|
||||
else
|
||||
echo "Appending soc pillar data to $f"
|
||||
echo "soc:" >> "$f"
|
||||
sed -i "/^soc:/a \\ es_index_patterns: '*:so-*,*:endgame-*'" "$f"
|
||||
fi
|
||||
done
|
||||
if compgen -G "/opt/so/saltstack/local/pillar/minions/*_$i.sls" > /dev/null; then
|
||||
echo "soc:" >> /opt/so/saltstack/local/pillar/minions/*_$i.sls
|
||||
sed -i "/^soc:/a \\ es_index_patterns: '*:so-*,*:endgame-*'" /opt/so/saltstack/local/pillar/minions/*_$i.sls
|
||||
fi
|
||||
done
|
||||
|
||||
# Create Endgame Hostgroup
|
||||
echo "Adding endgame hostgroup with so-firewall"
|
||||
if so-firewall addhostgroup endgame 2>&1 | grep -q 'Already exists'; then
|
||||
echo 'endgame hostgroup already exists'
|
||||
else
|
||||
echo 'endgame hostgroup added'
|
||||
fi
|
||||
so-firewall addhostgroup endgame
|
||||
|
||||
# Force influx to generate a new cert
|
||||
echo "Moving influxdb.crt and influxdb.key to generate new certs"
|
||||
mv -vf /etc/pki/influxdb.crt /etc/pki/influxdb.crt.2390upgrade
|
||||
mv -vf /etc/pki/influxdb.key /etc/pki/influxdb.key.2390upgrade
|
||||
mv /etc/pki/influxdb.crt /etc/pki/influxdb.crt.2390upgrade
|
||||
mv /etc/pki/influxdb.key /etc/pki/influxdb.key.2390upgrade
|
||||
|
||||
# remove old common ingest pipeline in default
|
||||
rm -vf /opt/so/saltstack/default/salt/elasticsearch/files/ingest/common
|
||||
@@ -651,8 +635,6 @@ up_to_2.3.90() {
|
||||
>> /opt/so/saltstack/local/pillar/secrets.sls
|
||||
|
||||
fi
|
||||
|
||||
sed -i -re 's/^(playbook_admin.*|playbook_automation.*)/ \1/g' /opt/so/saltstack/local/pillar/secrets.sls
|
||||
|
||||
INSTALLEDVERSION=2.3.90
|
||||
}
|
||||
@@ -825,22 +807,16 @@ upgrade_to_2.3.50_repo() {
|
||||
}
|
||||
|
||||
verify_latest_update_script() {
|
||||
#we need to render soup and so-common first since they contain jinja
|
||||
salt-call slsutil.renderer $UPDATE_DIR/salt/common/tools/sbin/soup default_renderer='jinja' --local --out=newline_values_only --out-indent=-4 --out-file=/tmp/soup
|
||||
sed -i -e '$a\' /tmp/soup
|
||||
salt-call slsutil.renderer $UPDATE_DIR/salt/common/tools/sbin/so-common default_renderer='jinja' --local --out=newline_values_only --out-indent=-4 --out-file=/tmp/so-common
|
||||
sed -i -e '$a\' /tmp/so-common
|
||||
# Check to see if the update scripts match. If not run the new one.
|
||||
CURRENTSOUP=$(md5sum /usr/sbin/soup | awk '{print $1}')
|
||||
GITSOUP=$(md5sum /tmp/soup | awk '{print $1}')
|
||||
CURRENTCMN=$(md5sum /usr/sbin/so-common | awk '{print $1}')
|
||||
GITCMN=$(md5sum /tmp/so-common | awk '{print $1}')
|
||||
CURRENTIMGCMN=$(md5sum /usr/sbin/so-image-common | awk '{print $1}')
|
||||
CURRENTSOUP=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/soup | awk '{print $1}')
|
||||
GITSOUP=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/soup | awk '{print $1}')
|
||||
CURRENTCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-common | awk '{print $1}')
|
||||
GITCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-common | awk '{print $1}')
|
||||
CURRENTIMGCMN=$(md5sum /opt/so/saltstack/default/salt/common/tools/sbin/so-image-common | awk '{print $1}')
|
||||
GITIMGCMN=$(md5sum $UPDATE_DIR/salt/common/tools/sbin/so-image-common | awk '{print $1}')
|
||||
|
||||
if [[ "$CURRENTSOUP" == "$GITSOUP" && "$CURRENTCMN" == "$GITCMN" && "$CURRENTIMGCMN" == "$GITIMGCMN" ]]; then
|
||||
echo "This version of the soup script is up to date. Proceeding."
|
||||
rm -f /tmp/soup /tmp/so-common
|
||||
else
|
||||
echo "You are not running the latest soup version. Updating soup and its components. Might take multiple runs to complete"
|
||||
cp $UPDATE_DIR/salt/common/tools/sbin/soup $DEFAULT_SALT_DIR/salt/common/tools/sbin/
|
||||
@@ -853,27 +829,9 @@ verify_latest_update_script() {
|
||||
fi
|
||||
}
|
||||
|
||||
apply_hotfix() {
|
||||
if [[ "$INSTALLEDVERSION" == "2.3.90" ]] ; then
|
||||
FILE="/nsm/wazuh/etc/ossec.conf"
|
||||
echo "Detecting if ossec.conf needs corrected..."
|
||||
if head -1 $FILE | grep -q "xml version"; then
|
||||
echo "$FILE has an XML header; removing"
|
||||
sed -i 1d $FILE
|
||||
so-wazuh-restart
|
||||
else
|
||||
echo "$FILE does not have an XML header, so no changes are necessary."
|
||||
fi
|
||||
else
|
||||
echo "No actions required. ($INSTALLEDVERSION/$HOTFIXVERSION)"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
main() {
|
||||
trap 'check_err $?' EXIT
|
||||
|
||||
echo "### Preparing soup at $(date) ###"
|
||||
check_pillar_items
|
||||
|
||||
echo "Checking to see if this is an airgap install."
|
||||
@@ -925,10 +883,9 @@ main() {
|
||||
set -e
|
||||
|
||||
if [ "$is_hotfix" == "true" ]; then
|
||||
echo "Applying $HOTFIXVERSION hotfix"
|
||||
echo "Applying $HOTFIXVERSION"
|
||||
copy_new_files
|
||||
apply_hotfix
|
||||
echo "Hotfix applied"
|
||||
echo ""
|
||||
update_version
|
||||
salt-call state.highstate -l info queue=True
|
||||
else
|
||||
@@ -968,21 +925,21 @@ main() {
|
||||
echo "Upgrading Salt"
|
||||
# Update the repo files so it can actually upgrade
|
||||
upgrade_salt
|
||||
|
||||
echo "Checking if Salt was upgraded."
|
||||
fi
|
||||
|
||||
echo "Checking if Salt was upgraded."
|
||||
echo ""
|
||||
# Check that Salt was upgraded
|
||||
SALTVERSIONPOSTUPGRADE=$(salt --versions-report | grep Salt: | awk '{print $2}')
|
||||
if [[ "$SALTVERSIONPOSTUPGRADE" != "$NEWSALTVERSION" ]]; then
|
||||
echo "Salt upgrade failed. Check of indicators of failure in $SOUP_LOG."
|
||||
echo "Once the issue is resolved, run soup again."
|
||||
echo "Exiting."
|
||||
echo ""
|
||||
exit 0
|
||||
else
|
||||
echo "Salt upgrade success."
|
||||
echo ""
|
||||
# Check that Salt was upgraded
|
||||
SALTVERSIONPOSTUPGRADE=$(salt --versions-report | grep Salt: | awk '{print $2}')
|
||||
if [[ "$SALTVERSIONPOSTUPGRADE" != "$NEWSALTVERSION" ]]; then
|
||||
echo "Salt upgrade failed. Check of indicators of failure in $SOUP_LOG."
|
||||
echo "Once the issue is resolved, run soup again."
|
||||
echo "Exiting."
|
||||
echo ""
|
||||
exit 0
|
||||
else
|
||||
echo "Salt upgrade success."
|
||||
echo ""
|
||||
fi
|
||||
fi
|
||||
|
||||
preupgrade_changes
|
||||
@@ -1185,4 +1142,6 @@ EOF
|
||||
read -r input
|
||||
fi
|
||||
|
||||
echo "### Preparing soup at $(date) ###"
|
||||
main "$@" | tee -a $SOUP_LOG
|
||||
|
||||
|
||||
@@ -24,9 +24,9 @@ set -e
|
||||
|
||||
# Check to see if we have extracted the ca cert.
|
||||
if [ ! -f /opt/so/saltstack/local/salt/common/cacerts ]; then
|
||||
docker run -v /etc/pki/ca.crt:/etc/ssl/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-elasticsearch:{{ VERSION }} -keystore /usr/share/elasticsearch/jdk/lib/security/cacerts -alias SOSCA -import -file /etc/ssl/ca.crt -storepass changeit -noprompt
|
||||
docker cp so-elasticsearchca:/usr/share/elasticsearch/jdk/lib/security/cacerts /opt/so/saltstack/local/salt/common/cacerts
|
||||
docker cp so-elasticsearchca:/etc/ssl/certs/ca-certificates.crt /opt/so/saltstack/local/salt/common/tls-ca-bundle.pem
|
||||
docker run -v /etc/pki/ca.crt:/etc/pki/ca.crt --name so-elasticsearchca --user root --entrypoint jdk/bin/keytool {{ MANAGER }}:5000/{{ IMAGEREPO }}/so-elasticsearch:{{ VERSION }} -keystore /etc/pki/ca-trust/extracted/java/cacerts -alias SOSCA -import -file /etc/pki/ca.crt -storepass changeit -noprompt
|
||||
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/java/cacerts /opt/so/saltstack/local/salt/common/cacerts
|
||||
docker cp so-elasticsearchca:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem /opt/so/saltstack/local/salt/common/tls-ca-bundle.pem
|
||||
docker rm so-elasticsearchca
|
||||
echo "" >> /opt/so/saltstack/local/salt/common/tls-ca-bundle.pem
|
||||
echo "sosca" >> /opt/so/saltstack/local/salt/common/tls-ca-bundle.pem
|
||||
|
||||
@@ -131,10 +131,6 @@ esrolesdir:
|
||||
- group: 939
|
||||
- makedirs: True
|
||||
|
||||
eslibdir:
|
||||
file.absent:
|
||||
- name: /opt/so/conf/elasticsearch/lib
|
||||
|
||||
esingestdynamicconf:
|
||||
file.recurse:
|
||||
- name: /opt/so/conf/elasticsearch/ingest
|
||||
@@ -262,7 +258,7 @@ so-elasticsearch:
|
||||
{% if TRUECLUSTER is sameas false or (TRUECLUSTER is sameas true and not salt['pillar.get']('nodestab', {})) %}
|
||||
- discovery.type=single-node
|
||||
{% endif %}
|
||||
- ES_JAVA_OPTS=-Xms{{ esheap }} -Xmx{{ esheap }} -Des.transport.cname_in_publish_address=true -Dlog4j2.formatMsgNoLookups=true
|
||||
- ES_JAVA_OPTS=-Xms{{ esheap }} -Xmx{{ esheap }} -Des.transport.cname_in_publish_address=true
|
||||
ulimits:
|
||||
- memlock=-1:-1
|
||||
- nofile=65536:65536
|
||||
@@ -275,7 +271,7 @@ so-elasticsearch:
|
||||
- /opt/so/conf/elasticsearch/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- /nsm/elasticsearch:/usr/share/elasticsearch/data:rw
|
||||
- /opt/so/log/elasticsearch:/var/log/elasticsearch:rw
|
||||
- /opt/so/conf/ca/cacerts:/usr/share/elasticsearch/jdk/lib/security/cacerts:ro
|
||||
- /opt/so/conf/ca/cacerts:/etc/pki/ca-trust/extracted/java/cacerts:ro
|
||||
{% if ismanager %}
|
||||
- /etc/pki/ca.crt:/usr/share/elasticsearch/config/ca.crt:ro
|
||||
{% else %}
|
||||
@@ -331,7 +327,7 @@ so-elasticsearch-pipelines-file:
|
||||
|
||||
so-elasticsearch-pipelines:
|
||||
cmd.run:
|
||||
- name: /opt/so/conf/elasticsearch/so-elasticsearch-pipelines {{ grains.host }}
|
||||
- name: /opt/so/conf/elasticsearch/so-elasticsearch-pipelines {{ esclustername }}
|
||||
- onchanges:
|
||||
- file: esingestconf
|
||||
- file: esingestdynamicconf
|
||||
|
||||
@@ -35,7 +35,7 @@ update() {
|
||||
wait_for_web_response "http://localhost:5601/app/kibana" "Elastic" 300 "{{ ELASTICCURL }}"
|
||||
IFS=$'\r\n' GLOBIGNORE='*' command eval 'LINES=($(cat $1))'
|
||||
for i in "${LINES[@]}"; do
|
||||
{{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.16.2" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i "
|
||||
{{ ELASTICCURL }} -X PUT "localhost:5601/api/saved_objects/config/7.15.2" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d " $i "
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.16.2","id": "7.16.2","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}
|
||||
{"attributes": {"buildNum": 39457,"defaultIndex": "2289a0c0-6970-11ea-a0cd-ffa0f6a1bc29","defaultRoute": "/app/dashboards#/view/a8411b30-6d03-11ea-b301-3d6c35840645","discover:sampleSize": 100,"theme:darkMode": true,"timepicker:timeDefaults": "{\n \"from\": \"now-24h\",\n \"to\": \"now\"\n}"},"coreMigrationVersion": "7.15.2","id": "7.15.2","migrationVersion": {"config": "7.13.0"},"references": [],"type": "config","updated_at": "2021-10-10T10:10:10.105Z","version": "WzI5NzUsMl0="}
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
-Dlog4j2.formatMsgNoLookups=true
|
||||
@@ -61,10 +61,6 @@ logstash:
|
||||
- gid: 931
|
||||
- home: /opt/so/conf/logstash
|
||||
|
||||
lslibdir:
|
||||
file.absent:
|
||||
- name: /opt/so/conf/logstash/lib
|
||||
|
||||
lsetcdir:
|
||||
file.directory:
|
||||
- name: /opt/so/conf/logstash/etc
|
||||
|
||||
@@ -65,10 +65,6 @@ yumconf:
|
||||
- mode: 644
|
||||
- template: jinja
|
||||
- show_changes: False
|
||||
|
||||
cleanairgap:
|
||||
file.absent:
|
||||
- name: /etc/yum.repos.d/airgap_repo.repo
|
||||
{% endif %}
|
||||
|
||||
cleanyum:
|
||||
|
||||
@@ -95,7 +95,7 @@ so-thehive-es:
|
||||
- /opt/so/conf/thehive/etc/es/log4j2.properties:/usr/share/elasticsearch/config/log4j2.properties:ro
|
||||
- /opt/so/log/thehive:/var/log/elasticsearch:rw
|
||||
- environment:
|
||||
- ES_JAVA_OPTS=-Xms512m -Xmx512m -Dlog4j2.formatMsgNoLookups=true
|
||||
- ES_JAVA_OPTS=-Xms512m -Xmx512m
|
||||
- port_bindings:
|
||||
- 0.0.0.0:9400:9400
|
||||
- 0.0.0.0:9500:9500
|
||||
|
||||
@@ -34,6 +34,7 @@ base:
|
||||
- match: compound
|
||||
- salt.minion
|
||||
- common
|
||||
- users
|
||||
- patch.os.schedule
|
||||
- motd
|
||||
- salt.minion-check
|
||||
|
||||
110
salt/users/init.sls
Normal file
110
salt/users/init.sls
Normal file
@@ -0,0 +1,110 @@
|
||||
# The creation of a user will require a pub key placed in /opt/so/saltstack/local/salt/users/authorized_keys/<username>
|
||||
|
||||
# If a user is changed from present to absent, their usergroup will be removed, but any additional usergroups that were created
|
||||
# for that user will remain.
|
||||
|
||||
{% from 'users/map.jinja' import reserved_usernames with context %}
|
||||
|
||||
{% for username, userdeets in pillar.get('users', {}).items() if username not in reserved_usernames %}
|
||||
{% if 'status' in userdeets %}
|
||||
{% if userdeets.status == 'absent' %}
|
||||
|
||||
remove_user_{{username}}:
|
||||
user.absent:
|
||||
- name: {{ username }}
|
||||
{% if 'purge' in userdeets %}
|
||||
- purge: {{ userdeets.purge }}
|
||||
{% endif %}
|
||||
- force: True
|
||||
|
||||
{% elif userdeets.status == 'present' %}
|
||||
|
||||
{% if 'node_access' in userdeets %}
|
||||
{% if grains.role in userdeets.node_access or grains.id.split('_')|last in userdeets.node_access %}
|
||||
|
||||
add_user_group_{{username}}:
|
||||
group.present:
|
||||
- name: {{ username }}
|
||||
{% if 'uid' in userdeets %}
|
||||
- gid: {{ userdeets.uid }}
|
||||
{% endif %}
|
||||
|
||||
add_user_{{username}}:
|
||||
user.present:
|
||||
- name: {{ username }}
|
||||
- home: {{ userdeets.get('home', "/home/%s" % username) }}
|
||||
- shell: {{ userdeets.get('shell', '/bin/bash') }}
|
||||
- usergroup: True
|
||||
|
||||
{% if 'fullname' in userdeets %}
|
||||
- fullname: {{ userdeets.fullname }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'uid' in userdeets %}
|
||||
- uid: {{ userdeets.uid }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'gid' in userdeets %}
|
||||
- gid: {{ userdeets.gid }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'roomnumber' in userdeets %}
|
||||
- roomnumber: {{ userdeets.roomnumber }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'workphone' in userdeets %}
|
||||
- workphone: {{ userdeets.workphone }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'homephone' in userdeets %}
|
||||
- homephone: {{ userdeets.homephone }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'groups' in userdeets %}
|
||||
- groups:
|
||||
{% for group in userdeets.groups %}
|
||||
- {{ group }}
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
|
||||
{{username}}_authorized_keys:
|
||||
file.managed:
|
||||
- name: /home/{{username}}/.ssh/authorized_keys
|
||||
- source: salt://users/authorized_keys/{{username}}
|
||||
- user: {{username}}
|
||||
- group: {{username}}
|
||||
- mode: 644
|
||||
- show_diff: False
|
||||
- makedirs: True
|
||||
- require:
|
||||
- user: add_user_{{username}}
|
||||
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
|
||||
unknown_status_or_password_not_provided_for_user_{{username}}:
|
||||
test.fail_without_changes:
|
||||
- comment: "Verify status is 'present' or 'absent' and a password is provided for {{username}} in the users pillar."
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% else %}
|
||||
|
||||
status_not_provided_for_user_{{username}}:
|
||||
test.fail_without_changes:
|
||||
- comment: "Status should be 'present' or 'absent'."
|
||||
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
|
||||
disable_wheel_pwd_required:
|
||||
file.comment:
|
||||
- name: /etc/sudoers
|
||||
- regex: "%wheel\\s+ALL=\\(ALL\\)\\s+ALL"
|
||||
|
||||
allow_wheel_no_pwd:
|
||||
file.uncomment:
|
||||
- name: /etc/sudoers
|
||||
- regex: "%wheel\\s+ALL=\\(ALL\\)\\s+NOPASSWD: ALL"
|
||||
58
salt/users/map.jinja
Normal file
58
salt/users/map.jinja
Normal file
@@ -0,0 +1,58 @@
|
||||
{% set reserved_usernames = [
|
||||
'root',
|
||||
'bin',
|
||||
'daemon',
|
||||
'adm',
|
||||
'lp',
|
||||
'sync',
|
||||
'shutdown',
|
||||
'halt',
|
||||
'mail',
|
||||
'operator',
|
||||
'games',
|
||||
'ftp',
|
||||
'nobody',
|
||||
'systemd-network',
|
||||
'dbus',
|
||||
'polkitd',
|
||||
'tss',
|
||||
'sshd',
|
||||
'ossec',
|
||||
'postfix',
|
||||
'chrony',
|
||||
'ntp',
|
||||
'tcpdump',
|
||||
'socore',
|
||||
'soremote',
|
||||
'elasticsearch',
|
||||
'stenographer',
|
||||
'suricata',
|
||||
'zeek',
|
||||
'curator',
|
||||
'kratos',
|
||||
'kibana',
|
||||
'elastalert',
|
||||
'ossecm',
|
||||
'ossecr',
|
||||
'logstash',
|
||||
'sys',
|
||||
'man',
|
||||
'news',
|
||||
'uucp',
|
||||
'proxy',
|
||||
'www-data',
|
||||
'backup',
|
||||
'list',
|
||||
'irc',
|
||||
'gnats',
|
||||
'systemd-resolve',
|
||||
'syslog',
|
||||
'messagebus',
|
||||
'_apt',
|
||||
'lxd',
|
||||
'uuidd',
|
||||
'dnsmasq',
|
||||
'landscape',
|
||||
'pollinate',
|
||||
'ossec'
|
||||
] %}
|
||||
@@ -2016,10 +2016,10 @@ reinstall_init() {
|
||||
{
|
||||
if command -v salt-call &> /dev/null && grep -q "master:" /etc/salt/minion 2> /dev/null; then
|
||||
# Disable schedule so highstate doesn't start running during the install
|
||||
salt-call -l info schedule.disable --local
|
||||
salt-call -l info schedule.disable
|
||||
|
||||
# Kill any currently running salt jobs, also to prevent issues with highstate.
|
||||
salt-call -l info saltutil.kill_all_jobs --local
|
||||
salt-call -l info saltutil.kill_all_jobs
|
||||
fi
|
||||
|
||||
# Kill any salt processes (safely)
|
||||
|
||||
@@ -318,7 +318,7 @@ if ! [[ -f $install_opt_file ]]; then
|
||||
elif [[ $is_minion && $is_iso ]]; then
|
||||
$sshcmd -i /root/.ssh/so.key soremote@"$MSRV" [[ -f /etc/yum.repos.d/airgap_repo.repo ]] >> $setup_log 2>&1
|
||||
airgap_check=$?
|
||||
[[ $airgap_check == 0 ]] && is_airgap=true >> $setup_log 2>&1
|
||||
[[ $airgap_check ]] && is_airgap=true >> $setup_log 2>&1
|
||||
fi
|
||||
|
||||
reset_proxy
|
||||
|
||||
@@ -753,7 +753,7 @@ whiptail_install_type_dist() {
|
||||
|
||||
dist_option=$(whiptail --title "$whiptail_title" --menu "Do you want to start a new deployment or join this box to \nan existing deployment?" 11 75 2 \
|
||||
"New Deployment " "Create a new Security Onion deployment" \
|
||||
"Existing Deployment " "Join to an existing Security Onion deployment " \
|
||||
"Existing Deployment " "Join to an exisiting Security Onion deployment " \
|
||||
3>&1 1>&2 2>&3
|
||||
)
|
||||
local exitstatus=$?
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Reference in New Issue
Block a user