diff --git a/src/main.rs b/src/main.rs index 204fc302..9cdce7ab 100644 --- a/src/main.rs +++ b/src/main.rs @@ -7,9 +7,8 @@ extern crate serde_derive; extern crate static_vcruntime; use bytesize::ByteSize; -use chrono::{DateTime, Datelike, Local, TimeZone}; +use chrono::{DateTime, Datelike, Local}; use evtx::{EvtxParser, ParserSettings}; -use git2::Repository; use hashbrown::{HashMap, HashSet}; use hayabusa::detections::configs::CURRENT_EXE_PATH; use hayabusa::detections::configs::{load_pivot_keywords, TargetEventTime, TARGET_EXTENSIONS}; @@ -22,23 +21,19 @@ use hayabusa::detections::print::{ }; use hayabusa::detections::rule::{get_detection_keys, RuleNode}; use hayabusa::omikuji::Omikuji; -use hayabusa::options::level_tuning::LevelTuning; -use hayabusa::yaml::ParseYaml; +use hayabusa::options::{level_tuning::LevelTuning, update_rules::UpdateRules}; use hayabusa::{afterfact::after_fact, detections::utils}; use hayabusa::{detections::configs, timeline::timelines::Timeline}; use hayabusa::{detections::utils::write_color_buffer, filter}; use hhmmss::Hhmmss; use pbr::ProgressBar; use serde_json::Value; -use std::cmp::Ordering; use std::ffi::{OsStr, OsString}; use std::fmt::Display; use std::fmt::Write as _; -use std::fs::create_dir; use std::io::{BufWriter, Write}; use std::path::Path; use std::sync::Arc; -use std::time::SystemTime; use std::{ env, fs::{self, File}, @@ -119,7 +114,7 @@ impl App { } if configs::CONFIG.read().unwrap().args.update_rules { - match self.update_rules() { + match UpdateRules::update_rules() { Ok(output) => { if output != "You currently have the latest rules." { write_color_buffer( @@ -749,225 +744,6 @@ impl App { } } - /// update rules(hayabusa-rules subrepository) - fn update_rules(&self) -> Result { - let mut result; - let mut prev_modified_time: SystemTime = SystemTime::UNIX_EPOCH; - let mut prev_modified_rules: HashSet = HashSet::default(); - let hayabusa_repo = Repository::open(CURRENT_EXE_PATH.as_path()); - let rules_path = CURRENT_EXE_PATH.join("rules"); - let hayabusa_rule_repo = Repository::open(&rules_path); - if hayabusa_repo.is_err() && hayabusa_rule_repo.is_err() { - write_color_buffer( - &BufferWriter::stdout(ColorChoice::Always), - None, - "Attempting to git clone the hayabusa-rules repository into the rules folder.", - true, - ) - .ok(); - // execution git clone of hayabusa-rules repository when failed open hayabusa repository. - result = self.clone_rules(); - } else if hayabusa_rule_repo.is_ok() { - // case of exist hayabusa-rules repository - self._repo_main_reset_hard(hayabusa_rule_repo.as_ref().unwrap())?; - // case of failed fetching origin/main, git clone is not executed so network error has occurred possibly. - prev_modified_rules = - self.get_updated_rules(rules_path.to_str().unwrap(), &prev_modified_time); - prev_modified_time = fs::metadata(&rules_path).unwrap().modified().unwrap(); - result = self.pull_repository(&hayabusa_rule_repo.unwrap()); - } else { - // case of no exist hayabusa-rules repository in rules. - // execute update because submodule information exists if hayabusa repository exists submodule information. - - prev_modified_time = fs::metadata(&rules_path).unwrap().modified().unwrap(); - if !&rules_path.exists() { - create_dir(&rules_path).ok(); - } - let hayabusa_repo = hayabusa_repo.unwrap(); - let submodules = hayabusa_repo.submodules()?; - let mut is_success_submodule_update = true; - // submodule rules erase path is hard coding to avoid unintentional remove folder. - fs::remove_dir_all(CURRENT_EXE_PATH.join(".git/.submodule/rules")).ok(); - for mut submodule in submodules { - submodule.update(true, None)?; - let submodule_repo = submodule.open()?; - if let Err(e) = self.pull_repository(&submodule_repo) { - AlertMessage::alert(&format!("Failed submodule update. {}", e)).ok(); - is_success_submodule_update = false; - } - } - if is_success_submodule_update { - result = Ok("Successed submodule update".to_string()); - } else { - result = Err(git2::Error::from_str(&String::default())); - } - } - if result.is_ok() { - let updated_modified_rules = - self.get_updated_rules(rules_path.to_str().unwrap(), &prev_modified_time); - result = - self.print_diff_modified_rule_dates(prev_modified_rules, updated_modified_rules); - } - result - } - - /// hard reset in main branch - fn _repo_main_reset_hard(&self, input_repo: &Repository) -> Result<(), git2::Error> { - let branch = input_repo - .find_branch("main", git2::BranchType::Local) - .unwrap(); - let local_head = branch.get().target().unwrap(); - let object = input_repo.find_object(local_head, None).unwrap(); - match input_repo.reset(&object, git2::ResetType::Hard, None) { - Ok(()) => Ok(()), - _ => Err(git2::Error::from_str("Failed reset main branch in rules")), - } - } - - /// Pull(fetch and fast-forward merge) repositoryto input_repo. - fn pull_repository(&self, input_repo: &Repository) -> Result { - match input_repo - .find_remote("origin")? - .fetch(&["main"], None, None) - .map_err(|e| { - AlertMessage::alert(&format!("Failed git fetch to rules folder. {}", e)).ok(); - }) { - Ok(it) => it, - Err(_err) => return Err(git2::Error::from_str(&String::default())), - }; - let fetch_head = input_repo.find_reference("FETCH_HEAD")?; - let fetch_commit = input_repo.reference_to_annotated_commit(&fetch_head)?; - let analysis = input_repo.merge_analysis(&[&fetch_commit])?; - if analysis.0.is_up_to_date() { - Ok("Already up to date".to_string()) - } else if analysis.0.is_fast_forward() { - let mut reference = input_repo.find_reference("refs/heads/main")?; - reference.set_target(fetch_commit.id(), "Fast-Forward")?; - input_repo.set_head("refs/heads/main")?; - input_repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?; - Ok("Finished fast forward merge.".to_string()) - } else if analysis.0.is_normal() { - AlertMessage::alert( - "update-rules option is git Fast-Forward merge only. please check your rules folder." - , - ).ok(); - Err(git2::Error::from_str(&String::default())) - } else { - Err(git2::Error::from_str(&String::default())) - } - } - - /// git clone でhauyabusa-rules レポジトリをrulesフォルダにgit cloneする関数 - fn clone_rules(&self) -> Result { - match Repository::clone( - "https://github.com/Yamato-Security/hayabusa-rules.git", - CURRENT_EXE_PATH.join("rules"), - ) { - Ok(_repo) => { - println!("Finished cloning the hayabusa-rules repository."); - Ok("Finished clone".to_string()) - } - Err(e) => { - AlertMessage::alert( - &format!( - "Failed to git clone into the rules folder. Please rename your rules folder name. {}", - e - ), - ) - .ok(); - Err(git2::Error::from_str(&String::default())) - } - } - } - - /// Create rules folder files Hashset. Format is "[rule title in yaml]|[filepath]|[filemodified date]|[rule type in yaml]" - fn get_updated_rules( - &self, - rule_folder_path: &str, - target_date: &SystemTime, - ) -> HashSet { - let mut rulefile_loader = ParseYaml::new(); - // level in read_dir is hard code to check all rules. - rulefile_loader - .read_dir( - rule_folder_path, - "INFORMATIONAL", - &filter::RuleExclude::default(), - ) - .ok(); - - let hash_set_keys: HashSet = rulefile_loader - .files - .into_iter() - .filter_map(|(filepath, yaml)| { - let file_modified_date = fs::metadata(&filepath).unwrap().modified().unwrap(); - - if file_modified_date.cmp(target_date).is_gt() { - let yaml_date = yaml["date"].as_str().unwrap_or("-"); - return Option::Some(format!( - "{}|{}|{}|{}", - yaml["title"].as_str().unwrap_or(&String::default()), - yaml["modified"].as_str().unwrap_or(yaml_date), - &filepath, - yaml["ruletype"].as_str().unwrap_or("Other") - )); - } - Option::None - }) - .collect(); - hash_set_keys - } - - /// print updated rule files. - fn print_diff_modified_rule_dates( - &self, - prev_sets: HashSet, - updated_sets: HashSet, - ) -> Result { - let diff = updated_sets.difference(&prev_sets); - let mut update_count_by_rule_type: HashMap = HashMap::new(); - let mut latest_update_date = Local.timestamp(0, 0); - for diff_key in diff { - let tmp: Vec<&str> = diff_key.split('|').collect(); - let file_modified_date = fs::metadata(&tmp[2]).unwrap().modified().unwrap(); - - let dt_local: DateTime = file_modified_date.into(); - - if latest_update_date.cmp(&dt_local) == Ordering::Less { - latest_update_date = dt_local; - } - *update_count_by_rule_type - .entry(tmp[3].to_string()) - .or_insert(0b0) += 1; - write_color_buffer( - &BufferWriter::stdout(ColorChoice::Always), - None, - &format!( - "[Updated] {} (Modified: {} | Path: {})", - tmp[0], tmp[1], tmp[2] - ), - true, - ) - .ok(); - } - println!(); - for (key, value) in &update_count_by_rule_type { - println!("Updated {} rules: {}", key, value); - } - if !&update_count_by_rule_type.is_empty() { - Ok("Rule updated".to_string()) - } else { - write_color_buffer( - &BufferWriter::stdout(ColorChoice::Always), - None, - "You currently have the latest rules.", - true, - ) - .ok(); - Ok("You currently have the latest rules.".to_string()) - } - } - /// check architecture fn is_matched_architecture_and_binary(&self) -> bool { if cfg!(target_os = "windows") { @@ -988,7 +764,6 @@ impl App { #[cfg(test)] mod tests { use crate::App; - use std::time::SystemTime; #[test] fn test_collect_evtxfiles() { @@ -1005,20 +780,4 @@ mod tests { assert_eq!(is_contains, &true); }) } - - #[test] - fn test_get_updated_rules() { - let app = App::new(); - - let prev_modified_time: SystemTime = SystemTime::UNIX_EPOCH; - - let prev_modified_rules = - app.get_updated_rules("test_files/rules/level_yaml", &prev_modified_time); - assert_eq!(prev_modified_rules.len(), 5); - - let target_time: SystemTime = SystemTime::now(); - let prev_modified_rules2 = - app.get_updated_rules("test_files/rules/level_yaml", &target_time); - assert_eq!(prev_modified_rules2.len(), 0); - } } diff --git a/src/options/mod.rs b/src/options/mod.rs index 1f3c32b6..6841494b 100644 --- a/src/options/mod.rs +++ b/src/options/mod.rs @@ -1 +1,2 @@ pub mod level_tuning; +pub mod update_rules; diff --git a/src/options/update_rules.rs b/src/options/update_rules.rs new file mode 100644 index 00000000..764a2a8a --- /dev/null +++ b/src/options/update_rules.rs @@ -0,0 +1,256 @@ +use crate::detections::print::AlertMessage; +use crate::detections::utils::write_color_buffer; +use crate::filter; +use crate::yaml::ParseYaml; +use chrono::{DateTime, Local, TimeZone}; +use git2::Repository; +use std::fs::{self}; +use std::path::Path; + +use hashbrown::{HashMap, HashSet}; +use std::cmp::Ordering; + +use std::time::SystemTime; + +use std::fs::create_dir; + +use termcolor::{BufferWriter, ColorChoice}; + +pub struct UpdateRules {} + +impl UpdateRules { + /// update rules(hayabusa-rules subrepository) + pub fn update_rules() -> Result { + let mut result; + let mut prev_modified_time: SystemTime = SystemTime::UNIX_EPOCH; + let mut prev_modified_rules: HashSet = HashSet::default(); + let hayabusa_repo = Repository::open(Path::new(".")); + let hayabusa_rule_repo = Repository::open(Path::new("rules")); + if hayabusa_repo.is_err() && hayabusa_rule_repo.is_err() { + write_color_buffer( + &BufferWriter::stdout(ColorChoice::Always), + None, + "Attempting to git clone the hayabusa-rules repository into the rules folder.", + true, + ) + .ok(); + // execution git clone of hayabusa-rules repository when failed open hayabusa repository. + result = UpdateRules::clone_rules(); + } else if hayabusa_rule_repo.is_ok() { + // case of exist hayabusa-rules repository + UpdateRules::_repo_main_reset_hard(hayabusa_rule_repo.as_ref().unwrap())?; + // case of failed fetching origin/main, git clone is not executed so network error has occurred possibly. + prev_modified_rules = UpdateRules::get_updated_rules("rules", &prev_modified_time); + prev_modified_time = fs::metadata("rules").unwrap().modified().unwrap(); + result = UpdateRules::pull_repository(&hayabusa_rule_repo.unwrap()); + } else { + // case of no exist hayabusa-rules repository in rules. + // execute update because submodule information exists if hayabusa repository exists submodule information. + + prev_modified_time = fs::metadata("rules").unwrap().modified().unwrap(); + let rules_path = Path::new("rules"); + if !rules_path.exists() { + create_dir(rules_path).ok(); + } + let hayabusa_repo = hayabusa_repo.unwrap(); + let submodules = hayabusa_repo.submodules()?; + let mut is_success_submodule_update = true; + // submodule rules erase path is hard coding to avoid unintentional remove folder. + fs::remove_dir_all(".git/.submodule/rules").ok(); + for mut submodule in submodules { + submodule.update(true, None)?; + let submodule_repo = submodule.open()?; + if let Err(e) = UpdateRules::pull_repository(&submodule_repo) { + AlertMessage::alert(&format!("Failed submodule update. {}", e)).ok(); + is_success_submodule_update = false; + } + } + if is_success_submodule_update { + result = Ok("Successed submodule update".to_string()); + } else { + result = Err(git2::Error::from_str(&String::default())); + } + } + if result.is_ok() { + let updated_modified_rules = + UpdateRules::get_updated_rules("rules", &prev_modified_time); + result = UpdateRules::print_diff_modified_rule_dates( + prev_modified_rules, + updated_modified_rules, + ); + } + result + } + + /// hard reset in main branch + fn _repo_main_reset_hard(input_repo: &Repository) -> Result<(), git2::Error> { + let branch = input_repo + .find_branch("main", git2::BranchType::Local) + .unwrap(); + let local_head = branch.get().target().unwrap(); + let object = input_repo.find_object(local_head, None).unwrap(); + match input_repo.reset(&object, git2::ResetType::Hard, None) { + Ok(()) => Ok(()), + _ => Err(git2::Error::from_str("Failed reset main branch in rules")), + } + } + + /// Pull(fetch and fast-forward merge) repositoryto input_repo. + fn pull_repository(input_repo: &Repository) -> Result { + match input_repo + .find_remote("origin")? + .fetch(&["main"], None, None) + .map_err(|e| { + AlertMessage::alert(&format!("Failed git fetch to rules folder. {}", e)).ok(); + }) { + Ok(it) => it, + Err(_err) => return Err(git2::Error::from_str(&String::default())), + }; + let fetch_head = input_repo.find_reference("FETCH_HEAD")?; + let fetch_commit = input_repo.reference_to_annotated_commit(&fetch_head)?; + let analysis = input_repo.merge_analysis(&[&fetch_commit])?; + if analysis.0.is_up_to_date() { + Ok("Already up to date".to_string()) + } else if analysis.0.is_fast_forward() { + let mut reference = input_repo.find_reference("refs/heads/main")?; + reference.set_target(fetch_commit.id(), "Fast-Forward")?; + input_repo.set_head("refs/heads/main")?; + input_repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?; + Ok("Finished fast forward merge.".to_string()) + } else if analysis.0.is_normal() { + AlertMessage::alert( + "update-rules option is git Fast-Forward merge only. please check your rules folder." + , + ).ok(); + Err(git2::Error::from_str(&String::default())) + } else { + Err(git2::Error::from_str(&String::default())) + } + } + + /// git clone でhauyabusa-rules レポジトリをrulesフォルダにgit cloneする関数 + fn clone_rules() -> Result { + match Repository::clone( + "https://github.com/Yamato-Security/hayabusa-rules.git", + "rules", + ) { + Ok(_repo) => { + println!("Finished cloning the hayabusa-rules repository."); + Ok("Finished clone".to_string()) + } + Err(e) => { + AlertMessage::alert( + &format!( + "Failed to git clone into the rules folder. Please rename your rules folder name. {}", + e + ), + ) + .ok(); + Err(git2::Error::from_str(&String::default())) + } + } + } + + /// Create rules folder files Hashset. Format is "[rule title in yaml]|[filepath]|[filemodified date]|[rule type in yaml]" + fn get_updated_rules(rule_folder_path: &str, target_date: &SystemTime) -> HashSet { + let mut rulefile_loader = ParseYaml::new(); + // level in read_dir is hard code to check all rules. + rulefile_loader + .read_dir( + rule_folder_path, + "INFORMATIONAL", + &filter::RuleExclude::default(), + ) + .ok(); + + let hash_set_keys: HashSet = rulefile_loader + .files + .into_iter() + .filter_map(|(filepath, yaml)| { + let file_modified_date = fs::metadata(&filepath).unwrap().modified().unwrap(); + + if file_modified_date.cmp(target_date).is_gt() { + let yaml_date = yaml["date"].as_str().unwrap_or("-"); + return Option::Some(format!( + "{}|{}|{}|{}", + yaml["title"].as_str().unwrap_or(&String::default()), + yaml["modified"].as_str().unwrap_or(yaml_date), + &filepath, + yaml["ruletype"].as_str().unwrap_or("Other") + )); + } + Option::None + }) + .collect(); + hash_set_keys + } + + /// print updated rule files. + fn print_diff_modified_rule_dates( + prev_sets: HashSet, + updated_sets: HashSet, + ) -> Result { + let diff = updated_sets.difference(&prev_sets); + let mut update_count_by_rule_type: HashMap = HashMap::new(); + let mut latest_update_date = Local.timestamp(0, 0); + for diff_key in diff { + let tmp: Vec<&str> = diff_key.split('|').collect(); + let file_modified_date = fs::metadata(&tmp[2]).unwrap().modified().unwrap(); + + let dt_local: DateTime = file_modified_date.into(); + + if latest_update_date.cmp(&dt_local) == Ordering::Less { + latest_update_date = dt_local; + } + *update_count_by_rule_type + .entry(tmp[3].to_string()) + .or_insert(0b0) += 1; + write_color_buffer( + &BufferWriter::stdout(ColorChoice::Always), + None, + &format!( + "[Updated] {} (Modified: {} | Path: {})", + tmp[0], tmp[1], tmp[2] + ), + true, + ) + .ok(); + } + println!(); + for (key, value) in &update_count_by_rule_type { + println!("Updated {} rules: {}", key, value); + } + if !&update_count_by_rule_type.is_empty() { + Ok("Rule updated".to_string()) + } else { + write_color_buffer( + &BufferWriter::stdout(ColorChoice::Always), + None, + "You currently have the latest rules.", + true, + ) + .ok(); + Ok("You currently have the latest rules.".to_string()) + } + } +} + +#[cfg(test)] +mod tests { + use crate::options::update_rules::UpdateRules; + use std::time::SystemTime; + + #[test] + fn test_get_updated_rules() { + let prev_modified_time: SystemTime = SystemTime::UNIX_EPOCH; + + let prev_modified_rules = + UpdateRules::get_updated_rules("test_files/rules/level_yaml", &prev_modified_time); + assert_eq!(prev_modified_rules.len(), 5); + + let target_time: SystemTime = SystemTime::now(); + let prev_modified_rules2 = + UpdateRules::get_updated_rules("test_files/rules/level_yaml", &target_time); + assert_eq!(prev_modified_rules2.len(), 0); + } +}