mirror of
https://github.com/Security-Onion-Solutions/securityonion.git
synced 2025-12-06 17:22:49 +01:00
Merge pull request #1338 from Security-Onion-Solutions/experimental
Fix strelka rules
This commit is contained in:
@@ -14,11 +14,10 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU General Public License
|
# You should have received a copy of the GNU General Public License
|
||||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
{%- set ISAIRGAP = salt['pillar.get']('global:airgap', 'False') -%}
|
{%- set ISAIRGAP = salt['pillar.get']('global:airgap', 'False') %}
|
||||||
|
|
||||||
|
|
||||||
output_dir="/opt/so/saltstack/default/salt/strelka/rules"
|
output_dir="/opt/so/saltstack/default/salt/strelka/rules"
|
||||||
#mkdir -p $output_dir
|
mkdir -p $output_dir
|
||||||
repos="$output_dir/repos.txt"
|
repos="$output_dir/repos.txt"
|
||||||
ignorefile="$output_dir/ignore.txt"
|
ignorefile="$output_dir/ignore.txt"
|
||||||
|
|
||||||
@@ -26,18 +25,17 @@ deletecounter=0
|
|||||||
newcounter=0
|
newcounter=0
|
||||||
updatecounter=0
|
updatecounter=0
|
||||||
|
|
||||||
gh_status=$(curl -s -o /dev/null -w "%{http_code}" http://github.com)
|
{% if ISAIRGAP is sameas true %}
|
||||||
|
|
||||||
{%- if ISAIRGAP is sameas true -%}
|
|
||||||
|
|
||||||
|
|
||||||
clone_dir="/nsm/repo/rules/strelka"
|
clone_dir="/nsm/repo/rules/strelka"
|
||||||
repo_name="signature-base"
|
repo_name="signature-base"
|
||||||
mkdir -p $output_dir/$repo_name
|
mkdir -p /opt/so/saltstack/default/salt/strelka/rules/signature-base
|
||||||
|
|
||||||
[ -f $clone_dir/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
[ -f $clone_dir/LICENSE ] && cp $clone_dir/$repo_name/LICENSE $output_dir/$repo_name
|
||||||
|
|
||||||
# Copy over rules
|
# Copy over rules
|
||||||
for i in $(find $clone_dir -name "*.yar*"); do
|
for i in $(find $clone_dir/yara -name "*.yar*"); do
|
||||||
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
rule_name=$(echo $i | awk -F '/' '{print $NF}')
|
||||||
repo_sum=$(sha256sum $i | awk '{print $1}')
|
repo_sum=$(sha256sum $i | awk '{print $1}')
|
||||||
|
|
||||||
@@ -62,7 +60,7 @@ for i in $(find $clone_dir -name "*.yar*"); do
|
|||||||
fi
|
fi
|
||||||
fi;
|
fi;
|
||||||
done
|
done
|
||||||
|
|
||||||
# Check to see if we have any old rules that need to be removed
|
# Check to see if we have any old rules that need to be removed
|
||||||
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
||||||
is_repo_rule=$(find $clone_dir -name "$i" | wc -l)
|
is_repo_rule=$(find $clone_dir -name "$i" | wc -l)
|
||||||
@@ -87,9 +85,9 @@ echo "Done!"
|
|||||||
echo "$deletecounter rules removed because they were deprecated or don't exist in the source repo."
|
echo "$deletecounter rules removed because they were deprecated or don't exist in the source repo."
|
||||||
fi
|
fi
|
||||||
|
|
||||||
{%- else -%}
|
{% else %}
|
||||||
|
|
||||||
|
|
||||||
|
gh_status=$(curl -s -o /dev/null -w "%{http_code}" http://github.com)
|
||||||
clone_dir="/tmp"
|
clone_dir="/tmp"
|
||||||
if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
||||||
|
|
||||||
@@ -132,7 +130,7 @@ if [ "$gh_status" == "200" ] || [ "$gh_status" == "301" ]; then
|
|||||||
fi
|
fi
|
||||||
fi;
|
fi;
|
||||||
done
|
done
|
||||||
|
|
||||||
# Check to see if we have any old rules that need to be removed
|
# Check to see if we have any old rules that need to be removed
|
||||||
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
for i in $(find $output_dir/$repo_name -name "*.yar*" | awk -F '/' '{print $NF}'); do
|
||||||
is_repo_rule=$(find $clone_dir/$repo_name -name "$i" | wc -l)
|
is_repo_rule=$(find $clone_dir/$repo_name -name "$i" | wc -l)
|
||||||
|
|||||||
Reference in New Issue
Block a user