Merge pull request #29 from Yamato-Security/add-actions-for-rule

feat: add actions for create security_rules.json
This commit is contained in:
Zach Mathis (田中ザック)
2025-04-21 21:20:44 +09:00
committed by GitHub
4 changed files with 70 additions and 255 deletions

View File

@@ -14,39 +14,6 @@ jobs:
steps:
- uses: actions/checkout@v4
# - name: auditpol /list /subcategory:* /r
# run: auditpol /list /subcategory:* /r
#
# - name: auditpol /get /category:*
# run: auditpol /get /category:*
#
# - name: Get-WinEvent -ListLog * | Select-Object LogName, MaximumSizeInBytes
# run: Get-WinEvent -ListLog * | Select-Object LogName, MaximumSizeInBytes
#
# - name: Get-WinEvent -ListProvider *
# run: (Get-WinEvent -ListProvider Microsoft-Windows-Security-Auditing).Events | ForEach-Object { [PSCustomObject]@{EventID=$_.Id; Description=($_.Description -replace "`r`n", " ") -replace "\..*", ""} }
#
# - name: Checkout self repository
# uses: actions/checkout@v4
#
# - name: Load audit settings(json)
# run: |
# $startTime = Get-Date
# $audit_settings = Get-Content -Path ./config/security_rules.json -Raw | ConvertFrom-Json
# $audit_settings
# $endTime = Get-Date
# $duration = $endTime - $startTime
# Write-Output "Duration: $duration"
#
# - name: Load audit settings(csv)
# run: |
# $startTime = Get-Date
# $audit_settings = Import-Csv ./config/eid_subcategory_mapping.csv
# $audit_settings
# $endTime = Get-Date
# $duration = $endTime - $startTime
# Write-Output "Duration: $duration"
- name: Run WELA.ps1 audit-settings
run: |
./WELA.ps1 audit-settings

View File

@@ -1,7 +1,9 @@
name: create-rule-meta.json
name: create security_rules.json
on:
push:
branches: [ "main" ]
branches: [ "*" ]
schedule:
- cron: '0 20 * * *'
workflow_dispatch:
jobs:
@@ -10,6 +12,15 @@ jobs:
steps:
- name: Checkout self repository
uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
path: WELA
- name: Checkout wela-extractor
uses: actions/checkout@v4
with:
repository: Yamato-Security/WELA-RulesGenerator
path: wela-extractor
- name: Checkout hayabusa-rules
uses: actions/checkout@v4
@@ -18,4 +29,60 @@ jobs:
path: hayabusa-rules
- name: Run
run: cd wela-extractor && cargo run --release -- ../hayabusa-rules ../config/eid_subcategory_mapping.csv ../config/security_rules.json
run: cd wela-extractor && cargo run --release -- ../hayabusa-rules ../WELA/config/eid_subcategory_mapping.csv ../WELA/config/security_rules.json
- name: Create Text
id: create-text
run: |
pushd WELA
echo "action_date=$(date '+%Y-%m-%d %H:%M:%S')" >> $GITHUB_ENV
echo "change_exist=true" >> $GITHUB_ENV
git_new=$(git diff --name-status --diff-filter=AC)
git_mod=$(git diff --name-status --diff-filter=MR)
git_del=$(git diff --name-status --diff-filter=D)
is_rule_changed=$(git status)
if [ "${is_rule_changed}" =~ nothing\sto\scommit ]; then
echo "change_exist=false" >> $GITHUB_ENV
else
echo "<details><summary>New files</summary>" >> ../changed_rule.logs
echo "${git_new}" >> ../changed_rule.logs
echo "</details>" >> ../changed_rule.logs
echo "<details><summary>Modified files</summary>" >> ../changed_rule.logs
echo "${git_mod}" >> ../changed_rule.logs
echo "</details>" >> ../changed_rule.logs
echo "<details><summary>Deleted files</summary>" >> ../changed_rule.logs
echo "${git_del}" >> ../changed_rule.logs
echo "</details>" >> ../changed_rule.logs
fi
popd
- name: Create Pull Request
if: env.change_exist == 'true'
id: cpr
uses: peter-evans/create-pull-request@v4
with:
path: WELA
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Sigma Rule Update (${{ env.action_date }})
branch: rules/auto-sigma-update
delete-branch: true
title: '[Auto] Sigma Update report(${{ env.action_date }})' ### If a PR with the same name already exists, this github action library will not create a new pull request but it will update the PR with the same name. Therefore I added the date to the pull request's title so it creates a new PR.
branch-suffix: timestamp ### I use this field in order to avoid name duplication. If the pull request which is related to the same branch exists, the pull request is not newly created but is updated. So the next step will be skipped due to its if-field
body: |
${{ env.action_date }} Update report
- name: Enable Pull Request Automerge
if: steps.cpr.outputs.pull-request-operation == 'created' # This only runs if there were sigma rules updates and a new PR was created.
uses: peter-evans/enable-pull-request-automerge@v2
with:
token: ${{ secrets.GITHUB_TOKEN }}
pull-request-number: ${{ steps.cpr.outputs.pull-request-number }}
merge-method: squash
- name: upload change log
if: env.change_exist == 'true'
uses: actions/upload-artifact@v4
with:
name: changed_rule_log
path: ${{ github.workspace }}/changed_rule.logs
retention-days: 30

View File

@@ -1,10 +0,0 @@
[package]
name = "wela-extractor"
version = "0.1.0"
edition = "2024"
[dependencies]
csv = "1.3.*"
yaml-rust2 = "0.10.*"
walkdir = "2.*"
serde_json = "*"

View File

@@ -1,209 +0,0 @@
use csv::ReaderBuilder;
use serde_json::{Value, json};
use std::collections::HashSet;
use std::error::Error;
use std::fmt::{Display, Formatter};
use std::fs::write;
use std::{env, fs};
use walkdir::WalkDir;
use yaml_rust2::{Yaml, YamlLoader};
enum Channel {
Security,
PowerShell,
Other(String),
}
impl Display for Channel {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Channel::Security => write!(f, "sec"),
Channel::PowerShell => write!(f, "pwsh"),
Channel::Other(name) => write!(f, "{}", name),
}
}
}
fn list_yml_files(dir: &str) -> Vec<String> {
let mut yml_files = Vec::new();
for entry in WalkDir::new(dir).into_iter().filter_map(|e| e.ok()) {
let path = entry.path();
if path.is_file() && path.extension().and_then(|ext| ext.to_str()) == Some("yml") {
if let Some(path_str) = path.to_str() {
yml_files.push(path_str.to_string());
}
}
}
yml_files
}
fn extract_event_ids(yaml: &Yaml, event_ids: &mut HashSet<String>) {
match yaml {
Yaml::Hash(hash) => {
for (key, value) in hash {
if key.as_str() == Some("EventID") {
match value {
Yaml::Array(ids) => {
for id in ids {
if let Some(id) = id.as_i64() {
event_ids.insert(id.to_string());
} else if let Some(id) = id.as_str() {
event_ids.insert(id.to_string());
}
}
}
Yaml::String(id) => {
event_ids.insert(id.clone());
}
Yaml::Integer(id) => {
event_ids.insert(id.to_string());
}
_ => {}
}
} else {
extract_event_ids(value, event_ids);
}
}
}
Yaml::Array(array) => {
for item in array {
extract_event_ids(item, event_ids);
}
}
_ => {}
}
}
fn contains_builtin_channel(yaml: &Yaml) -> Option<Vec<Channel>> {
fn check_channel(value: &Yaml) -> Option<Channel> {
match value.as_str() {
Some("Security") => Some(Channel::Security),
Some("Microsoft-Windows-PowerShell/Operational")
| Some("PowerShellCore/Operational")
| Some("Windows PowerShell") => Some(Channel::PowerShell),
val => Some(Channel::Other(val?.to_string())),
}
}
match yaml {
Yaml::Hash(hash) => {
for (key, value) in hash {
if key.as_str() == Some("Channel") {
match value {
Yaml::Array(array) => {
let mut channels = Vec::new();
for item in array {
if let Some(channel) = check_channel(item) {
channels.push(channel);
}
}
if !channels.is_empty() {
return Some(channels);
}
}
Yaml::String(_) => {
if let Some(channel) = check_channel(value) {
return Some(vec![channel]);
}
}
_ => {}
}
} else if let Some(channel) = contains_builtin_channel(value) {
return Some(channel);
}
}
}
Yaml::Array(array) => {
for item in array {
if let Some(channel) = contains_builtin_channel(item) {
return Some(channel);
}
}
}
_ => {}
}
None
}
fn parse_yaml(doc: Yaml, eid_subcategory_pair: &Vec<(String, String)>) -> Option<Value> {
let sysmon_tag = doc["tags"].as_vec().map_or(false, |tags| tags.iter().any(|tag| tag.as_str() == Some("sysmon")));
if sysmon_tag {
return None;
}
if let Some(ch) = contains_builtin_channel(&doc["detection"]) {
let uuid = doc["id"].as_str().unwrap_or("");
let title = doc["title"].as_str().unwrap_or("");
let level = doc["level"].as_str().unwrap_or("");
let mut event_ids = HashSet::new();
let mut subcategories = HashSet::new();
extract_event_ids(&doc, &mut event_ids);
for event_id in &event_ids {
for (eid, subcategory) in eid_subcategory_pair {
if eid == event_id {
subcategories.insert(subcategory.clone());
}
}
}
let event_ids: Vec<String> = event_ids.into_iter().collect();
let subcategories: Vec<String> = subcategories.into_iter().collect();
return Some(json!({
"id": uuid,
"title": title,
"channel": ch.iter().map(|c| c.to_string()).collect::<Vec<String>>(),
"level": level,
"event_ids": event_ids,
"subcategory_guids": subcategories
}));
}
None
}
fn load_event_id_guid_pairs(file_path: &str) -> Result<Vec<(String, String)>, Box<dyn Error>> {
let mut rdr = ReaderBuilder::new()
.has_headers(true)
.from_path(file_path)?;
let mut pairs = Vec::new();
for result in rdr.records() {
let record = result?;
let event_id = record.get(0).unwrap_or("").to_string();
let guid = record.get(3).unwrap_or("").to_string();
if !event_id.is_empty() && !guid.is_empty() {
pairs.push((event_id, guid));
}
}
Ok(pairs)
}
fn main() -> Result<(), Box<dyn Error>> {
let args: Vec<String> = env::args().collect();
if args.len() != 4 {
eprintln!("Usage: {} <file_path> <dir>", args[0]);
std::process::exit(1);
}
let dir = &args[1];
let yml_files = list_yml_files(dir);
let mut results = Vec::new();
let file_path = &args[2];
let eid_subcategory_pair = load_event_id_guid_pairs(file_path)?;
let out = &args[3];
for file in yml_files {
let contents = fs::read_to_string(&file).expect("Unable to read file");
let docs = YamlLoader::load_from_str(&contents).expect("Unable to parse YAML");
for doc in docs {
if let Some(res) = parse_yaml(doc, &eid_subcategory_pair) {
results.push(res);
}
}
}
let json_output = serde_json::to_string_pretty(&results)?;
println!("{}", json_output);
write(out, json_output)?;
Ok(())
}