Merge pull request #5577 from Security-Onion-Solutions/kilo

Continuation of auth enhancements
This commit is contained in:
Jason Ertel
2021-09-20 06:30:36 -04:00
committed by GitHub
8 changed files with 200 additions and 46 deletions

View File

@@ -26,7 +26,7 @@ if [[ $# -lt 1 || $# -gt 3 ]]; then
echo " where <operation> is one of the following:" echo " where <operation> is one of the following:"
echo "" echo ""
echo " list: Lists all user email addresses currently defined in the identity system" echo " list: Lists all user email addresses currently defined in the identity system"
echo " add: Adds a new user to the identity system; requires 'email' parameter" echo " add: Adds a new user to the identity system; requires 'email' parameter, while 'role' parameter is optional and defaults to $DEFAULT_ROLE"
echo " addrole: Grants a role to an existing user; requires 'email' and 'role' parameters" echo " addrole: Grants a role to an existing user; requires 'email' and 'role' parameters"
echo " delrole: Removes a role from an existing user; requires 'email' and 'role' parameters" echo " delrole: Removes a role from an existing user; requires 'email' and 'role' parameters"
echo " update: Updates a user's password; requires 'email' parameter" echo " update: Updates a user's password; requires 'email' parameter"
@@ -49,8 +49,11 @@ databasePath=${KRATOS_DB_PATH:-/opt/so/conf/kratos/db/db.sqlite}
bcryptRounds=${BCRYPT_ROUNDS:-12} bcryptRounds=${BCRYPT_ROUNDS:-12}
elasticUsersFile=${ELASTIC_USERS_FILE:-/opt/so/saltstack/local/salt/elasticsearch/files/users} elasticUsersFile=${ELASTIC_USERS_FILE:-/opt/so/saltstack/local/salt/elasticsearch/files/users}
elasticRolesFile=${ELASTIC_ROLES_FILE:-/opt/so/saltstack/local/salt/elasticsearch/files/users_roles} elasticRolesFile=${ELASTIC_ROLES_FILE:-/opt/so/saltstack/local/salt/elasticsearch/files/users_roles}
socRolesFile=${SOC_ROLES_FILE:-/opt/so/conf/soc/soc_users_roles}
esUID=${ELASTIC_UID:-930} esUID=${ELASTIC_UID:-930}
esGID=${ELASTIC_GID:-930} esGID=${ELASTIC_GID:-930}
soUID=${SOCORE_UID:-939}
soGID=${SOCORE_GID:-939}
function lock() { function lock() {
# Obtain file descriptor lock # Obtain file descriptor lock
@@ -87,7 +90,7 @@ function findIdByEmail() {
email=$1 email=$1
response=$(curl -Ss -L ${kratosUrl}/identities) response=$(curl -Ss -L ${kratosUrl}/identities)
identityId=$(echo "${response}" | jq ".[] | select(.verifiable_addresses[0].value == \"$email\") | .id") identityId=$(echo "${response}" | jq -r ".[] | select(.verifiable_addresses[0].value == \"$email\") | .id")
echo $identityId echo $identityId
} }
@@ -135,36 +138,46 @@ function updatePassword() {
validatePassword "$password" validatePassword "$password"
fi fi
if [[ -n $identityId ]]; then if [[ -n "$identityId" ]]; then
# Generate password hash # Generate password hash
passwordHash=$(hashPassword "$password") passwordHash=$(hashPassword "$password")
# Update DB with new hash # Update DB with new hash
echo "update identity_credentials set config=CAST('{\"hashed_password\":\"$passwordHash\"}' as BLOB) where identity_id=${identityId};" | sqlite3 "$databasePath" echo "update identity_credentials set config=CAST('{\"hashed_password\":\"$passwordHash\"}' as BLOB) where identity_id='${identityId}';" | sqlite3 "$databasePath"
[[ $? != 0 ]] && fail "Unable to update password" [[ $? != 0 ]] && fail "Unable to update password"
fi fi
} }
function createElasticFile() { function createFile() {
filename=$1 filename=$1
uid=$2
gid=$3
mkdir -p $(dirname "$filename")
truncate -s 0 "$filename" truncate -s 0 "$filename"
chmod 600 "$filename" chmod 600 "$filename"
chown "${esUID}:${esGID}" "$filename" chown "${uid}:${gid}" "$filename"
} }
function ensureRoleFileExists() { function ensureRoleFileExists() {
if [ ! -f "$elasticRolesFile" ]; then if [[ ! -f "$socRolesFile" || ! -s "$socRolesFile" ]]; then
echo "Creating new roles file: $elasticRolesFile" # Generate the new users file
rolesTmpFile="${elasticRolesFile}.tmp" rolesTmpFile="${socRolesFile}.tmp"
createElasticFile "${rolesTmpFile}" createFile "$rolesTmpFile" "$soUID" "$soGID"
authPillarJson=$(lookup_salt_value "auth" "elasticsearch" "pillar" "json")
syncElasticSystemRole "$authPillarJson" "so_elastic_user" "superuser" "$rolesTmpFile" if [[ -f "$databasePath" ]]; then
syncElasticSystemRole "$authPillarJson" "so_kibana_user" "superuser" "$rolesTmpFile" echo "Migrating roles to new file: $socRolesFile"
syncElasticSystemRole "$authPillarJson" "so_logstash_user" "superuser" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_beats_user" "superuser" "$rolesTmpFile" echo "select 'superuser:' || id from identities;" | sqlite3 "$databasePath" \
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "remote_monitoring_collector" "$rolesTmpFile" >> "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "remote_monitoring_agent" "$rolesTmpFile" [[ $? != 0 ]] && fail "Unable to read identities from database"
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "monitoring_user" "$rolesTmpFile"
mv "${rolesTmpFile}" "${elasticRolesFile}" echo "The following users have all been migrated with the super user role:"
cat "${rolesTmpFile}"
else
echo "Database file does not exist yet, installation is likely not yet complete."
fi
mv "${rolesTmpFile}" "${socRolesFile}"
fi fi
} }
@@ -196,11 +209,12 @@ function syncElasticSystemRole() {
} }
function syncElastic() { function syncElastic() {
echo "Syncing users between SOC and Elastic..." echo "Syncing users and roles between SOC and Elastic..."
ensureRoleFileExists
usersTmpFile="${elasticUsersFile}.tmp" usersTmpFile="${elasticUsersFile}.tmp"
createElasticFile "${usersTmpFile}" createFile "${usersTmpFile}" "$esUID" "$esGID"
rolesTmpFile="${elasticRolesFile}.tmp"
createFile "${rolesTmpFile}" "$esUID" "$esGID"
authPillarJson=$(lookup_salt_value "auth" "elasticsearch" "pillar" "json") authPillarJson=$(lookup_salt_value "auth" "elasticsearch" "pillar" "json")
@@ -210,8 +224,16 @@ function syncElastic() {
syncElasticSystemUser "$authPillarJson" "so_beats_user" "$usersTmpFile" syncElasticSystemUser "$authPillarJson" "so_beats_user" "$usersTmpFile"
syncElasticSystemUser "$authPillarJson" "so_monitor_user" "$usersTmpFile" syncElasticSystemUser "$authPillarJson" "so_monitor_user" "$usersTmpFile"
if [[ -f "$databasePath" ]]; then syncElasticSystemRole "$authPillarJson" "so_elastic_user" "superuser" "$rolesTmpFile"
# Generate the new users file syncElasticSystemRole "$authPillarJson" "so_kibana_user" "superuser" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_logstash_user" "superuser" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_beats_user" "superuser" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "remote_monitoring_collector" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "remote_monitoring_agent" "$rolesTmpFile"
syncElasticSystemRole "$authPillarJson" "so_monitor_user" "monitoring_user" "$rolesTmpFile"
if [[ -f "$databasePath" && -f "$socRolesFile" ]]; then
# Append the SOC users
echo "select '{\"user\":\"' || ici.identifier || '\", \"data\":' || ic.config || '}'" \ echo "select '{\"user\":\"' || ici.identifier || '\", \"data\":' || ic.config || '}'" \
"from identity_credential_identifiers ici, identity_credentials ic " \ "from identity_credential_identifiers ici, identity_credentials ic " \
"where ici.identity_credential_id=ic.id and instr(ic.config, 'hashed_password') " \ "where ici.identity_credential_id=ic.id and instr(ic.config, 'hashed_password') " \
@@ -220,12 +242,24 @@ function syncElastic() {
jq -r '.user + ":" + .data.hashed_password' \ jq -r '.user + ":" + .data.hashed_password' \
>> "$usersTmpFile" >> "$usersTmpFile"
[[ $? != 0 ]] && fail "Unable to read credential hashes from database" [[ $? != 0 ]] && fail "Unable to read credential hashes from database"
# Append the user roles
while IFS="" read -r rolePair || [ -n "$rolePair" ]; do
userId=$(echo "$rolePair" | cut -d: -f2)
role=$(echo "$rolePair" | cut -d: -f1)
echo "select '$role:' || ici.identifier " \
"from identity_credential_identifiers ici, identity_credentials ic " \
"where ici.identity_credential_id=ic.id and ic.identity_id = '$userId';" | \
sqlite3 "$databasePath" >> "$rolesTmpFile"
done < "$socRolesFile"
else else
echo "Database file does not exist yet, skipping users export" echo "Database file or soc roles file does not exist yet, skipping users export"
fi fi
if [[ -s "${usersTmpFile}" ]]; then if [[ -s "${usersTmpFile}" ]]; then
mv "${usersTmpFile}" "${elasticUsersFile}" mv "${usersTmpFile}" "${elasticUsersFile}"
mv "${rolesTmpFile}" "${elasticRolesFile}"
if [[ -z "$SKIP_STATE_APPLY" ]]; then if [[ -z "$SKIP_STATE_APPLY" ]]; then
echo "Elastic state will be re-applied to affected minions. This may take several minutes..." echo "Elastic state will be re-applied to affected minions. This may take several minutes..."
@@ -238,15 +272,22 @@ function syncElastic() {
} }
function syncAll() { function syncAll() {
ensureRoleFileExists
# Check if a sync is needed. Sync is not needed if the following are true:
# - user database entries are all older than the elastic users file
# - soc roles file last modify date is older than the elastic roles file
if [[ -z "$FORCE_SYNC" && -f "$databasePath" && -f "$elasticUsersFile" ]]; then if [[ -z "$FORCE_SYNC" && -f "$databasePath" && -f "$elasticUsersFile" ]]; then
usersFileAgeSecs=$(echo $(($(date +%s) - $(date +%s -r "$elasticUsersFile")))) usersFileAgeSecs=$(echo $(($(date +%s) - $(date +%s -r "$elasticUsersFile"))))
staleCount=$(echo "select count(*) from identity_credentials where updated_at >= Datetime('now', '-${usersFileAgeSecs} seconds');" \ staleCount=$(echo "select count(*) from identity_credentials where updated_at >= Datetime('now', '-${usersFileAgeSecs} seconds');" \
| sqlite3 "$databasePath") | sqlite3 "$databasePath")
if [[ "$staleCount" == "0" ]]; then if [[ "$staleCount" == "0" && "$elasticRolesFile" -nt "$socRolesFile" ]]; then
return 1 return 1
fi fi
fi fi
syncElastic syncElastic
return 0 return 0
} }
@@ -285,24 +326,28 @@ function adjustUserRole() {
ensureRoleFileExists ensureRoleFileExists
filename="$elasticRolesFile" filename="$socRolesFile"
hasRole=0 hasRole=0
grep "$role:" "$elasticRolesFile" | grep -q "$email" && hasRole=1 grep "$role:" "$socRolesFile" | grep -q "$identityId" && hasRole=1
if [[ "$op" == "add" ]]; then if [[ "$op" == "add" ]]; then
if [[ "$hasRole" == "1" ]]; then if [[ "$hasRole" == "1" ]]; then
fail "User '$email' already has the role: $role" echo "User '$email' already has the role: $role"
return 1
else else
echo "$role:$email" >> "$filename" echo "$role:$identityId" >> "$filename"
fi fi
elif [[ "$op" == "del" ]]; then elif [[ "$op" == "del" ]]; then
if [[ "$hasRole" -ne 1 ]]; then if [[ "$hasRole" -ne 1 ]]; then
fail "User '$email' does not have the role: $role" fail "User '$email' does not have the role: $role"
else else
sed -i "/^$role:$email\$/d" "$filename" sed "/^$role:$identityId\$/d" "$filename" > "$filename.tmp"
cat "$filename".tmp > "$filename"
rm -f "$filename".tmp
fi fi
else else
fail "Unsupported role adjustment operation: $op" fail "Unsupported role adjustment operation: $op"
fi fi
return 0
} }
function createUser() { function createUser() {
@@ -321,7 +366,7 @@ EOF
response=$(curl -Ss -L ${kratosUrl}/identities -d "$addUserJson") response=$(curl -Ss -L ${kratosUrl}/identities -d "$addUserJson")
[[ $? != 0 ]] && fail "Unable to communicate with Kratos" [[ $? != 0 ]] && fail "Unable to communicate with Kratos"
identityId=$(echo "${response}" | jq ".id") identityId=$(echo "${response}" | jq -r ".id")
if [[ ${identityId} == "null" ]]; then if [[ ${identityId} == "null" ]]; then
code=$(echo "${response}" | jq ".error.code") code=$(echo "${response}" | jq ".error.code")
[[ "${code}" == "409" ]] && fail "User already exists" [[ "${code}" == "409" ]] && fail "User already exists"
@@ -329,10 +374,9 @@ EOF
reason=$(echo "${response}" | jq ".error.message") reason=$(echo "${response}" | jq ".error.message")
[[ $? == 0 ]] && fail "Unable to add user: ${reason}" [[ $? == 0 ]] && fail "Unable to add user: ${reason}"
else else
updatePassword "$identityId"
addUserRole "$email" "$role" addUserRole "$email" "$role"
fi fi
updatePassword $identityId
} }
function updateStatus() { function updateStatus() {
@@ -382,6 +426,11 @@ function deleteUser() {
response=$(curl -Ss -XDELETE -L "${kratosUrl}/identities/$identityId") response=$(curl -Ss -XDELETE -L "${kratosUrl}/identities/$identityId")
[[ $? != 0 ]] && fail "Unable to communicate with Kratos" [[ $? != 0 ]] && fail "Unable to communicate with Kratos"
rolesTmpFile="${socRolesFile}.tmp"
createFile "$rolesTmpFile" "$soUID" "$soGID"
grep -v "$id" "$socRolesFile" > "$rolesTmpFile"
mv "$rolesTmpFile" "$socRolesFile"
} }
case "${operation}" in case "${operation}" in
@@ -411,9 +460,10 @@ case "${operation}" in
lock lock
validateEmail "$email" validateEmail "$email"
addUserRole "$email" "$role" if addUserRole "$email" "$role"; then
syncElastic syncElastic
echo "Successfully added role to user" echo "Successfully added role to user"
fi
;; ;;
"delrole") "delrole")

View File

@@ -0,0 +1,49 @@
{
"cluster": [
],
"indices": [
{
"names": [
"so-*"
],
"privileges": [
"index",
"maintenance",
"monitor",
"read",
"read_cross_cluster",
"view_index_metadata"
]
}
],
"applications": [
{
"application": "kibana-.kibana",
"privileges": [
"feature_discover.read",
"feature_dashboard.read",
"feature_canvas.read",
"feature_maps.read",
"feature_ml.read",
"feature_logs.read",
"feature_visualize.read",
"feature_infrastructure.read",
"feature_apm.read",
"feature_uptime.read",
"feature_siem.read",
"feature_dev_tools.read",
"feature_advancedSettings.read",
"feature_indexPatterns.read",
"feature_savedObjectsManagement.read",
"feature_savedObjectsTagging.read",
"feature_fleet.read",
"feature_actions.read",
"feature_stackAlerts.read"
],
"resources": [
"*"
]
}
],
"run_as": []
}

View File

@@ -0,0 +1,47 @@
{
"cluster": [
],
"indices": [
{
"names": [
"so-*"
],
"privileges": [
"read",
"read_cross_cluster",
"monitor",
"view_index_metadata"
]
}
],
"applications": [
{
"application": "kibana-.kibana",
"privileges": [
"feature_discover.read",
"feature_dashboard.read",
"feature_canvas.read",
"feature_maps.read",
"feature_ml.read",
"feature_logs.read",
"feature_visualize.read",
"feature_infrastructure.read",
"feature_apm.read",
"feature_uptime.read",
"feature_siem.read",
"feature_dev_tools.read",
"feature_advancedSettings.read",
"feature_indexPatterns.read",
"feature_savedObjectsManagement.read",
"feature_savedObjectsTagging.read",
"feature_fleet.read",
"feature_actions.read",
"feature_stackAlerts.read"
],
"resources": [
"*"
]
}
],
"run_as": []
}

View File

@@ -124,6 +124,7 @@ syncesusers:
- creates: - creates:
- /opt/so/saltstack/local/salt/elasticsearch/files/users - /opt/so/saltstack/local/salt/elasticsearch/files/users
- /opt/so/saltstack/local/salt/elasticsearch/files/users_roles - /opt/so/saltstack/local/salt/elasticsearch/files/users_roles
- /opt/so/conf/soc/soc_users_roles
- show_changes: False - show_changes: False
{% else %} {% else %}

View File

@@ -167,6 +167,7 @@ http {
proxy_pass http://{{ manager_ip }}:9822; proxy_pass http://{{ manager_ip }}:9822;
proxy_read_timeout 90; proxy_read_timeout 90;
proxy_connect_timeout 90; proxy_connect_timeout 90;
proxy_set_header x-user-id "";
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
@@ -378,6 +379,7 @@ http {
proxy_pass http://{{ manager_ip }}:9822/; proxy_pass http://{{ manager_ip }}:9822/;
proxy_read_timeout 90; proxy_read_timeout 90;
proxy_connect_timeout 90; proxy_connect_timeout 90;
proxy_set_header x-user-id "";
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;

View File

@@ -9,12 +9,15 @@
# Syntax => prebuiltRoleX: customRoleY: op # Syntax => prebuiltRoleX: customRoleY: op
# Explanation => roleY and roleZ are adjusted permissions of roleX, op is: # Explanation => roleY and roleZ are adjusted permissions of roleX, op is:
# + add the new permissions/role mappings (default) # + add the new permissions/role mappings (default)
# - remove existing prebuilt permissions # - remove existing "explicit" prebuilt permissions. This
# does not work with implictly inherited permissions.
# #
# In the example below, we will define a new role for junior analysts, # In the example below, we will define two new roles for segregating
# that is nearly identical to the analyst role that comes with SOC, with the # analysts into two regions. Then we will remove the ability for all
# exception that it removes their ability to obtain details about other # analysts to see the roles of other analysts. (Seperately we will need to
# analysts in the system. # define these two new roles in Elasticsearch so that each analyst region
# can only see data from their specific region's indices, but that is out
# of scope from this file.)
# #
# analyst: jr_analyst # analyst: westcoast_analyst, eastcoast_analyst
# user-monitor: jr_analyst:- # roles/read: user-monitor:-

View File

@@ -91,8 +91,10 @@
"roleFiles": [ "roleFiles": [
"rbac/permissions", "rbac/permissions",
"rbac/roles", "rbac/roles",
"rbac/users_roles",
"rbac/custom_roles" "rbac/custom_roles"
],
"userFiles": [
"rbac/users_roles"
] ]
} }
}, },

View File

@@ -91,7 +91,7 @@ so-soc:
- /opt/so/conf/soc/banner.md:/opt/sensoroni/html/login/banner.md:ro - /opt/so/conf/soc/banner.md:/opt/sensoroni/html/login/banner.md:ro
- /opt/so/conf/soc/custom.js:/opt/sensoroni/html/js/custom.js:ro - /opt/so/conf/soc/custom.js:/opt/sensoroni/html/js/custom.js:ro
- /opt/so/conf/soc/custom_roles:/opt/sensoroni/rbac/custom_roles:ro - /opt/so/conf/soc/custom_roles:/opt/sensoroni/rbac/custom_roles:ro
- /opt/so/conf/elasticsearch/users_roles:/opt/sensoroni/rbac/users_roles:ro - /opt/so/conf/soc/soc_users_roles:/opt/sensoroni/rbac/users_roles:rw
- /opt/so/log/soc/:/opt/sensoroni/logs/:rw - /opt/so/log/soc/:/opt/sensoroni/logs/:rw
{%- if salt['pillar.get']('nodestab', {}) %} {%- if salt['pillar.get']('nodestab', {}) %}
- extra_hosts: - extra_hosts: