Merge branch 'main' into feature/output_errorlog#301
This commit is contained in:
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[submodule "rules"]
|
||||||
|
path = rules
|
||||||
|
url = git@github.com:Yamato-Security/hayabusa-rules.git
|
||||||
@@ -5,7 +5,7 @@ DustInDark(@hitenkoku): Core developer, project management, sigma count implemen
|
|||||||
Garigariganzy (@garigariganzy31): Developer, event ID statistics implementation, etc...
|
Garigariganzy (@garigariganzy31): Developer, event ID statistics implementation, etc...
|
||||||
ItiB (@itiB_S144) : Core developer, sigmac hayabusa backend, rule creation, etc...
|
ItiB (@itiB_S144) : Core developer, sigmac hayabusa backend, rule creation, etc...
|
||||||
James Takai / hachiyone(@hach1yon): Current lead developer, tokio multi-threading, sigma aggregation logic, sigmac backend, rule creation, etc…
|
James Takai / hachiyone(@hach1yon): Current lead developer, tokio multi-threading, sigma aggregation logic, sigmac backend, rule creation, etc…
|
||||||
Kazuminn (@k2warugaki): Developer
|
Kazuminn (@k47_um1n): Developer
|
||||||
Yusuke Matsui (@apt773): AD hacking working group leader, rule testing, documentation, research, support, etc...
|
Yusuke Matsui (@apt773): AD hacking working group leader, rule testing, documentation, research, support, etc...
|
||||||
Zach Mathis (@yamatosecurity, Yamato Security Founder): Project leader, tool and concept design, rule creation and tuning, etc…
|
Zach Mathis (@yamatosecurity, Yamato Security Founder): Project leader, tool and concept design, rule creation and tuning, etc…
|
||||||
|
|
||||||
@@ -14,9 +14,9 @@ Hayabusa would not have been possible without first creating RustyBlue, so we wo
|
|||||||
|
|
||||||
Zach Mathis (@yamatosecurity, Yamato Security Founder): Project Leader
|
Zach Mathis (@yamatosecurity, Yamato Security Founder): Project Leader
|
||||||
Nishikawa Akira (@nishikawaakira): Lead Developer
|
Nishikawa Akira (@nishikawaakira): Lead Developer
|
||||||
kazuminn (@k2warugaki): Core Developer
|
kazuminn (@k47_um1n): Core Developer
|
||||||
itiB (@itiB_S144): Core Developer
|
itiB (@itiB_S144): Core Developer
|
||||||
James Takai / hachiyone (@hach1yon): Developer
|
James Takai / hachiyone (@hach1yon): Core Developer
|
||||||
DustInDark (@hitenkoku): Core Developer
|
DustInDark (@hitenkoku): Core Developer
|
||||||
garigariganzy (@garigariganzy31): Developer
|
garigariganzy (@garigariganzy31): Developer
|
||||||
7itoh (@yNitocrypto22): Developer
|
7itoh (@yNitocrypto22): Developer
|
||||||
|
|||||||
1
rules
Submodule
1
rules
Submodule
Submodule rules added at 631db51204
@@ -42,13 +42,10 @@ pub fn after_fact() {
|
|||||||
.ok();
|
.ok();
|
||||||
process::exit(1);
|
process::exit(1);
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut displayflag = false;
|
let mut displayflag = false;
|
||||||
let mut target: Box<dyn io::Write> = if let Some(csv_path) = configs::CONFIG
|
let mut target: Box<dyn io::Write> =
|
||||||
.read()
|
if let Some(csv_path) = configs::CONFIG.read().unwrap().args.value_of("output") {
|
||||||
.unwrap()
|
|
||||||
.args
|
|
||||||
.value_of("csv-timeline")
|
|
||||||
{
|
|
||||||
// ファイル出力する場合
|
// ファイル出力する場合
|
||||||
match File::create(csv_path) {
|
match File::create(csv_path) {
|
||||||
Ok(file) => Box::new(BufWriter::new(file)),
|
Ok(file) => Box::new(BufWriter::new(file)),
|
||||||
@@ -81,8 +78,13 @@ fn emit_csv<W: std::io::Write>(writer: &mut W, displayflag: bool) -> io::Result<
|
|||||||
} else {
|
} else {
|
||||||
wtr = csv::WriterBuilder::new().from_writer(writer);
|
wtr = csv::WriterBuilder::new().from_writer(writer);
|
||||||
}
|
}
|
||||||
|
|
||||||
let messages = print::MESSAGES.lock().unwrap();
|
let messages = print::MESSAGES.lock().unwrap();
|
||||||
let mut detect_count = 0;
|
// levelの区分が"Critical","High","Medium","Low","Informational","Undefined"の6つであるため
|
||||||
|
let mut total_detect_counts_by_level: Vec<u128> = vec![0; 6];
|
||||||
|
let mut unique_detect_counts_by_level: Vec<u128> = vec![0; 6];
|
||||||
|
let mut detected_rule_files: Vec<String> = Vec::new();
|
||||||
|
|
||||||
for (time, detect_infos) in messages.iter() {
|
for (time, detect_infos) in messages.iter() {
|
||||||
for detect_info in detect_infos {
|
for detect_info in detect_infos {
|
||||||
if displayflag {
|
if displayflag {
|
||||||
@@ -107,17 +109,61 @@ fn emit_csv<W: std::io::Write>(writer: &mut W, displayflag: bool) -> io::Result<
|
|||||||
details: &detect_info.detail,
|
details: &detect_info.detail,
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
|
let level_suffix = *configs::LEVELMAP
|
||||||
|
.get(&detect_info.level.to_uppercase())
|
||||||
|
.unwrap_or(&0) as usize;
|
||||||
|
if !detected_rule_files.contains(&detect_info.rulepath) {
|
||||||
|
detected_rule_files.push(detect_info.rulepath.clone());
|
||||||
|
unique_detect_counts_by_level[level_suffix] += 1;
|
||||||
|
}
|
||||||
|
total_detect_counts_by_level[level_suffix] += 1;
|
||||||
}
|
}
|
||||||
detect_count += detect_infos.len();
|
|
||||||
}
|
}
|
||||||
println!("");
|
println!("");
|
||||||
|
|
||||||
wtr.flush()?;
|
wtr.flush()?;
|
||||||
println!("");
|
println!("");
|
||||||
println!("Total events: {:?}", detect_count);
|
_print_unique_results(
|
||||||
|
total_detect_counts_by_level,
|
||||||
|
"Total".to_string(),
|
||||||
|
"detections".to_string(),
|
||||||
|
);
|
||||||
|
_print_unique_results(
|
||||||
|
unique_detect_counts_by_level,
|
||||||
|
"Unique".to_string(),
|
||||||
|
"rules".to_string(),
|
||||||
|
);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// 与えられたユニークな検知数と全体の検知数の情報(レベル別と総計)を元に結果文を標準出力に表示する関数
|
||||||
|
fn _print_unique_results(mut counts_by_level: Vec<u128>, head_word: String, tail_word: String) {
|
||||||
|
let levels = Vec::from([
|
||||||
|
"Critical",
|
||||||
|
"High",
|
||||||
|
"Medium",
|
||||||
|
"Low",
|
||||||
|
"Informational",
|
||||||
|
"Undefined",
|
||||||
|
]);
|
||||||
|
|
||||||
|
// configsの登録順番と表示をさせたいlevelの順番が逆であるため
|
||||||
|
counts_by_level.reverse();
|
||||||
|
|
||||||
|
// 全体の集計(levelの記載がないためformatの第二引数は空の文字列)
|
||||||
|
println!(
|
||||||
|
"{} {}:{}",
|
||||||
|
head_word,
|
||||||
|
tail_word,
|
||||||
|
counts_by_level.iter().sum::<u128>()
|
||||||
|
);
|
||||||
|
for (i, level_name) in levels.iter().enumerate() {
|
||||||
|
println!(
|
||||||
|
"{} {} {}:{}",
|
||||||
|
head_word, level_name, tail_word, counts_by_level[i]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
fn format_time(time: &DateTime<Utc>) -> String {
|
fn format_time(time: &DateTime<Utc>) -> String {
|
||||||
if configs::CONFIG.read().unwrap().args.is_present("utc") {
|
if configs::CONFIG.read().unwrap().args.is_present("utc") {
|
||||||
format_rfc(time)
|
format_rfc(time)
|
||||||
|
|||||||
@@ -52,22 +52,22 @@ fn build_app<'a>() -> ArgMatches<'a> {
|
|||||||
return ArgMatches::default();
|
return ArgMatches::default();
|
||||||
}
|
}
|
||||||
|
|
||||||
let usages = "-f --filepath=[FILEPATH] 'File path to one .evtx file'
|
let usages = "-d --directory=[DIRECTORY] 'Directory of multiple .evtx files'
|
||||||
--csv-timeline=[CSV_TIMELINE] 'Save the timeline in CSV format'
|
-f --filepath=[FILEPATH] 'File path to one .evtx file'
|
||||||
|
-r --rules=[RULEDIRECTORY] 'Rule file directory (default: ./rules)'
|
||||||
|
-o --output=[CSV_TIMELINE] 'Save the timeline in CSV format. Example: results.csv'
|
||||||
|
-v --verbose 'Output verbose information'
|
||||||
|
-D --enable-deprecated-rules 'Enable sigma rules marked as deprecated'
|
||||||
|
-n --enable-noisy-rules 'Enable rules marked as noisy'
|
||||||
|
-m --min-level=[LEVEL] 'Minimum level for rules (default: informational)'
|
||||||
|
--start-timeline=[STARTTIMELINE] 'Start time of the event to load from event file. Example: '2018/11/28 12:00:00 +09:00''
|
||||||
|
--end-timeline=[ENDTIMELINE] 'End time of the event to load from event file. Example: '2018/11/28 12:00:00 +09:00''
|
||||||
--rfc-2822 'Output date and time in RFC 2822 format. Example: Mon, 07 Aug 2006 12:34:56 -0600'
|
--rfc-2822 'Output date and time in RFC 2822 format. Example: Mon, 07 Aug 2006 12:34:56 -0600'
|
||||||
--rfc-3339 'Output date and time in RFC 3339 format. Example: 2006-08-07T12:34:56.485214 -06:00'
|
--rfc-3339 'Output date and time in RFC 3339 format. Example: 2006-08-07T12:34:56.485214 -06:00'
|
||||||
--verbose 'Output verbose information to target event file path and rule file'
|
|
||||||
--starttimeline=[STARTTIMELINE] 'Start time of the event to load from event file. Example: '2018/11/28 12:00:00 +09:00''
|
|
||||||
--endtimeline=[ENDTIMELINE]'End time of the event to load from event file. Example: '2018/11/28 12:00:00 +09:00''
|
|
||||||
-q 'Quiet mode. Do not display the launch banner'
|
|
||||||
-r --rules=[RULEDIRECTORY] 'Rule file directory (default: ./rules)'
|
|
||||||
-m --min-level=[LEVEL] 'Minimum level for rules (default: informational)'
|
|
||||||
-u --utc 'Output time in UTC format (default: local time)'
|
-u --utc 'Output time in UTC format (default: local time)'
|
||||||
-d --directory=[DIRECTORY] 'Directory of multiple .evtx files'
|
-t --thread-number=[NUMBER] 'Thread number (default: optimal number for performance)'
|
||||||
-s --statistics 'Prints statistics of event IDs'
|
-s --statistics 'Prints statistics of event IDs'
|
||||||
-n --show-noisyalerts 'do not exclude noisy rules'
|
-q --quiet 'Quiet mode. Do not display the launch banner'
|
||||||
-t --threadnum=[NUM] 'Thread number (default: optimal number for performance)'
|
|
||||||
--show-deprecated 'do not exclude rules with YAML's status deprecated'
|
|
||||||
--contributors 'Prints the list of contributors'";
|
--contributors 'Prints the list of contributors'";
|
||||||
App::new(&program)
|
App::new(&program)
|
||||||
.about("Hayabusa: Aiming to be the world's greatest Windows event log analysis tool!")
|
.about("Hayabusa: Aiming to be the world's greatest Windows event log analysis tool!")
|
||||||
@@ -131,8 +131,8 @@ pub struct TargetEventTime {
|
|||||||
|
|
||||||
impl TargetEventTime {
|
impl TargetEventTime {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
let start_time = if let Some(s_time) = CONFIG.read().unwrap().args.value_of("starttimeline")
|
let start_time =
|
||||||
{
|
if let Some(s_time) = CONFIG.read().unwrap().args.value_of("start-timeline") {
|
||||||
match DateTime::parse_from_str(s_time, "%Y-%m-%d %H:%M:%S %z") // 2014-11-28 21:00:09 +09:00
|
match DateTime::parse_from_str(s_time, "%Y-%m-%d %H:%M:%S %z") // 2014-11-28 21:00:09 +09:00
|
||||||
.or_else(|_| DateTime::parse_from_str(s_time, "%Y/%m/%d %H:%M:%S %z")) // 2014/11/28 21:00:09 +09:00
|
.or_else(|_| DateTime::parse_from_str(s_time, "%Y/%m/%d %H:%M:%S %z")) // 2014/11/28 21:00:09 +09:00
|
||||||
{
|
{
|
||||||
@@ -149,7 +149,7 @@ impl TargetEventTime {
|
|||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
let end_time = if let Some(e_time) = CONFIG.read().unwrap().args.value_of("endtimeline") {
|
let end_time = if let Some(e_time) = CONFIG.read().unwrap().args.value_of("end-timeline") {
|
||||||
match DateTime::parse_from_str(e_time, "%Y-%m-%d %H:%M:%S %z") // 2014-11-28 21:00:09 +09:00
|
match DateTime::parse_from_str(e_time, "%Y-%m-%d %H:%M:%S %z") // 2014-11-28 21:00:09 +09:00
|
||||||
.or_else(|_| DateTime::parse_from_str(e_time, "%Y/%m/%d %H:%M:%S %z")) // 2014/11/28 21:00:09 +09:00
|
.or_else(|_| DateTime::parse_from_str(e_time, "%Y/%m/%d %H:%M:%S %z")) // 2014/11/28 21:00:09 +09:00
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
extern crate csv;
|
extern crate csv;
|
||||||
|
|
||||||
use crate::detections::configs;
|
|
||||||
use crate::detections::print::AlertMessage;
|
use crate::detections::print::AlertMessage;
|
||||||
use crate::detections::print::ERROR_LOG_PATH;
|
use crate::detections::print::ERROR_LOG_PATH;
|
||||||
use crate::detections::print::MESSAGES;
|
use crate::detections::print::MESSAGES;
|
||||||
@@ -140,7 +139,7 @@ impl Detection {
|
|||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_aggcondtion_msg(&self) {
|
pub fn add_aggcondition_msg(&self) {
|
||||||
for rule in &self.rules {
|
for rule in &self.rules {
|
||||||
if !rule.has_agg_condition() {
|
if !rule.has_agg_condition() {
|
||||||
continue;
|
continue;
|
||||||
@@ -153,46 +152,11 @@ impl Detection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn print_unique_results(&self) {
|
|
||||||
let rules = &self.rules;
|
|
||||||
let levellabel = Vec::from([
|
|
||||||
"Critical",
|
|
||||||
"High",
|
|
||||||
"Medium",
|
|
||||||
"Low",
|
|
||||||
"Informational",
|
|
||||||
"Undefined",
|
|
||||||
]);
|
|
||||||
// levclcounts is [(Undefined), (Informational), (Low),(Medium),(High),(Critical)]
|
|
||||||
let mut levelcounts = Vec::from([0, 0, 0, 0, 0, 0]);
|
|
||||||
for rule in rules.into_iter() {
|
|
||||||
if rule.check_exist_countdata() {
|
|
||||||
let suffix = configs::LEVELMAP
|
|
||||||
.get(
|
|
||||||
&rule.yaml["level"]
|
|
||||||
.as_str()
|
|
||||||
.unwrap_or("")
|
|
||||||
.to_owned()
|
|
||||||
.to_uppercase(),
|
|
||||||
)
|
|
||||||
.unwrap_or(&0);
|
|
||||||
levelcounts[*suffix as usize] += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut total_unique = 0;
|
|
||||||
levelcounts.reverse();
|
|
||||||
for (i, value) in levelcounts.iter().enumerate() {
|
|
||||||
println!("{} alerts: {}", levellabel[i], value);
|
|
||||||
total_unique += value;
|
|
||||||
}
|
|
||||||
println!("Unique alerts detected: {}", total_unique);
|
|
||||||
}
|
|
||||||
|
|
||||||
// 複数のイベントレコードに対して、ルールを1個実行します。
|
// 複数のイベントレコードに対して、ルールを1個実行します。
|
||||||
fn execute_rule(mut rule: RuleNode, records: Arc<Vec<EvtxRecordInfo>>) -> RuleNode {
|
fn execute_rule(mut rule: RuleNode, records: Arc<Vec<EvtxRecordInfo>>) -> RuleNode {
|
||||||
let agg_condition = rule.has_agg_condition();
|
let agg_condition = rule.has_agg_condition();
|
||||||
for record_info in records.as_ref() {
|
for record_info in records.as_ref() {
|
||||||
let result = rule.select(&record_info.evtx_filepath, &record_info);
|
let result = rule.select(&record_info);
|
||||||
if !result {
|
if !result {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -227,34 +191,63 @@ impl Detection {
|
|||||||
fn insert_agg_message(rule: &RuleNode, agg_result: AggResult) {
|
fn insert_agg_message(rule: &RuleNode, agg_result: AggResult) {
|
||||||
let output = Detection::create_count_output(rule, &agg_result);
|
let output = Detection::create_count_output(rule, &agg_result);
|
||||||
MESSAGES.lock().unwrap().insert_message(
|
MESSAGES.lock().unwrap().insert_message(
|
||||||
agg_result.filepath,
|
"-".to_owned(),
|
||||||
rule.rulepath.to_string(),
|
rule.rulepath.to_owned(),
|
||||||
agg_result.start_timedate,
|
agg_result.start_timedate,
|
||||||
rule.yaml["level"].as_str().unwrap_or("").to_string(),
|
rule.yaml["level"].as_str().unwrap_or("").to_owned(),
|
||||||
"-".to_string(),
|
"-".to_owned(),
|
||||||
"-".to_string(),
|
"-".to_owned(),
|
||||||
rule.yaml["title"].as_str().unwrap_or("").to_string(),
|
rule.yaml["title"].as_str().unwrap_or("").to_owned(),
|
||||||
output.to_string(),
|
output.to_owned(),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
///aggregation conditionのcount部分の検知出力文の文字列を返す関数
|
///aggregation conditionのcount部分の検知出力文の文字列を返す関数
|
||||||
fn create_count_output(rule: &RuleNode, agg_result: &AggResult) -> String {
|
fn create_count_output(rule: &RuleNode, agg_result: &AggResult) -> String {
|
||||||
let mut ret: String = "count(".to_owned();
|
// 条件式部分の出力
|
||||||
let key: Vec<&str> = agg_result.key.split("_").collect();
|
let mut ret: String = "[condition] ".to_owned();
|
||||||
if key.len() >= 1 {
|
let agg_condition_raw_str: Vec<&str> = rule.yaml["detection"]["condition"]
|
||||||
ret.push_str(key[0]);
|
.as_str()
|
||||||
}
|
.unwrap()
|
||||||
ret.push_str(&") ");
|
.split("|")
|
||||||
if key.len() >= 2 {
|
.collect();
|
||||||
ret.push_str("by ");
|
// この関数が呼び出されている段階で既にaggregation conditionは存在する前提なのでunwrap前の確認は行わない
|
||||||
ret.push_str(key[1]);
|
let agg_condition = rule.get_agg_condition().unwrap();
|
||||||
|
let exist_timeframe = rule.yaml["detection"]["timeframe"]
|
||||||
|
.as_str()
|
||||||
|
.unwrap_or("")
|
||||||
|
.to_string()
|
||||||
|
!= "";
|
||||||
|
// この関数が呼び出されている段階で既にaggregation conditionは存在する前提なのでagg_conditionの配列の長さは2となる
|
||||||
|
ret.push_str(agg_condition_raw_str[1].trim());
|
||||||
|
if exist_timeframe {
|
||||||
|
ret.push_str(" in timeframe");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
ret.push_str(&format!(" [result] count:{}", agg_result.data));
|
||||||
|
if agg_condition._field_name.is_some() {
|
||||||
ret.push_str(&format!(
|
ret.push_str(&format!(
|
||||||
"{} in {}.",
|
" {}:{}",
|
||||||
agg_result.condition_op_num,
|
agg_condition._field_name.as_ref().unwrap(),
|
||||||
rule.yaml["timeframe"].as_str().unwrap_or(""),
|
agg_result.field_values.join("/")
|
||||||
));
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if agg_condition._by_field_name.is_some() {
|
||||||
|
ret.push_str(&format!(
|
||||||
|
" {}:{}",
|
||||||
|
agg_condition._by_field_name.as_ref().unwrap(),
|
||||||
|
agg_result.key
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if exist_timeframe {
|
||||||
|
ret.push_str(&format!(
|
||||||
|
" timeframe:{}",
|
||||||
|
rule.yaml["detection"]["timeframe"].as_str().unwrap()
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
pub fn print_rule_load_info(
|
pub fn print_rule_load_info(
|
||||||
@@ -274,10 +267,196 @@ impl Detection {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[cfg(test)]
|
||||||
fn test_parse_rule_files() {
|
mod tests {
|
||||||
|
|
||||||
|
use crate::detections::detection::Detection;
|
||||||
|
use crate::detections::rule::create_rule;
|
||||||
|
use crate::detections::rule::AggResult;
|
||||||
|
use crate::filter;
|
||||||
|
use chrono::{TimeZone, Utc};
|
||||||
|
use yaml_rust::YamlLoader;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_rule_files() {
|
||||||
let level = "informational";
|
let level = "informational";
|
||||||
let opt_rule_path = Some("./test_files/rules/level_yaml");
|
let opt_rule_path = Some("./test_files/rules/level_yaml");
|
||||||
let cole = Detection::parse_rule_files(level.to_owned(), opt_rule_path, &filter::exclude_ids());
|
let cole =
|
||||||
|
Detection::parse_rule_files(level.to_owned(), opt_rule_path, &filter::exclude_ids());
|
||||||
assert_eq!(5, cole.len());
|
assert_eq!(5, cole.len());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_with_output() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult =
|
||||||
|
AggResult::new(2, "_".to_string(), vec![], default_time, ">= 1".to_string());
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
EventID: 7040
|
||||||
|
selection3:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 and selection3 | count() >= 1
|
||||||
|
output: testdata
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output = "[condition] count() >= 1 [result] count:2";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_no_filed_by() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult =
|
||||||
|
AggResult::new(2, "_".to_string(), vec![], default_time, ">= 1".to_string());
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
EventID: 7040
|
||||||
|
selection3:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 and selection3 | count() >= 1
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output = "[condition] count() >= 1 [result] count:2";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_with_timeframe() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult =
|
||||||
|
AggResult::new(2, "_".to_string(), vec![], default_time, ">= 1".to_string());
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
EventID: 7040
|
||||||
|
selection3:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 and selection3 | count() >= 1
|
||||||
|
timeframe: 15m
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output =
|
||||||
|
"[condition] count() >= 1 in timeframe [result] count:2 timeframe:15m";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_with_field() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult = AggResult::new(
|
||||||
|
2,
|
||||||
|
"_".to_string(),
|
||||||
|
vec!["7040".to_owned(), "9999".to_owned()],
|
||||||
|
default_time,
|
||||||
|
">= 1".to_string(),
|
||||||
|
);
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 | count(EventID) >= 1
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output = "[condition] count(EventID) >= 1 [result] count:2 EventID:7040/9999";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_with_field_by() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult = AggResult::new(
|
||||||
|
2,
|
||||||
|
"lsass.exe".to_string(),
|
||||||
|
vec!["0000".to_owned(), "1111".to_owned()],
|
||||||
|
default_time,
|
||||||
|
">= 1".to_string(),
|
||||||
|
);
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 | count(EventID) by process >= 1
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output = "[condition] count(EventID) by process >= 1 [result] count:2 EventID:0000/1111 process:lsass.exe";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
|
#[test]
|
||||||
|
fn test_output_aggregation_output_with_by() {
|
||||||
|
let default_time = Utc.ymd(1977, 1, 1).and_hms(0, 0, 0);
|
||||||
|
let agg_result: AggResult = AggResult::new(
|
||||||
|
2,
|
||||||
|
"lsass.exe".to_string(),
|
||||||
|
vec![],
|
||||||
|
default_time,
|
||||||
|
">= 1".to_string(),
|
||||||
|
);
|
||||||
|
let rule_str = r#"
|
||||||
|
enabled: true
|
||||||
|
detection:
|
||||||
|
selection1:
|
||||||
|
Channel: 'System'
|
||||||
|
selection2:
|
||||||
|
param1: 'Windows Event Log'
|
||||||
|
condition: selection1 and selection2 | count() by process >= 1
|
||||||
|
"#;
|
||||||
|
let mut rule_yaml = YamlLoader::load_from_str(rule_str).unwrap().into_iter();
|
||||||
|
let test = rule_yaml.next().unwrap();
|
||||||
|
let mut rule_node = create_rule("testpath".to_string(), test);
|
||||||
|
rule_node.init().ok();
|
||||||
|
let expected_output =
|
||||||
|
"[condition] count() by process >= 1 [result] count:2 process:lsass.exe";
|
||||||
|
assert_eq!(
|
||||||
|
Detection::create_count_output(&rule_node, &agg_result),
|
||||||
|
expected_output
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -538,10 +538,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(
|
assert_eq!(rule_node.select(&recinfo), expect_select);
|
||||||
rule_node.select(&"testpath".to_owned(), &recinfo),
|
|
||||||
expect_select
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -584,7 +581,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -628,7 +625,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -707,7 +707,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "failed to parse json record.");
|
assert!(false, "failed to parse json record.");
|
||||||
@@ -737,7 +737,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -767,7 +767,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "failed to parse json record.");
|
assert!(false, "failed to parse json record.");
|
||||||
@@ -798,7 +798,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -829,7 +829,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -859,7 +859,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -889,7 +889,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -920,7 +920,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -951,7 +951,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -982,7 +982,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1013,7 +1013,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1044,7 +1044,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1074,7 +1074,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1108,7 +1108,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1142,7 +1142,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1175,7 +1175,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1217,7 +1217,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1259,7 +1259,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1301,7 +1301,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1343,7 +1343,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1385,7 +1385,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1427,7 +1427,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1457,7 +1457,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1487,7 +1487,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1517,7 +1517,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1605,7 +1605,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1635,7 +1635,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1667,7 +1667,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -1699,7 +1699,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ use self::aggregation_parser::AggregationParseInfo;
|
|||||||
|
|
||||||
mod condition_parser;
|
mod condition_parser;
|
||||||
mod count;
|
mod count;
|
||||||
use self::count::TimeFrameInfo;
|
use self::count::{AggRecordTimeInfo, TimeFrameInfo};
|
||||||
|
|
||||||
use super::detection::EvtxRecordInfo;
|
use super::detection::EvtxRecordInfo;
|
||||||
|
|
||||||
@@ -28,7 +28,7 @@ pub struct RuleNode {
|
|||||||
pub rulepath: String,
|
pub rulepath: String,
|
||||||
pub yaml: Yaml,
|
pub yaml: Yaml,
|
||||||
detection: DetectionNode,
|
detection: DetectionNode,
|
||||||
countdata: HashMap<String, HashMap<String, Vec<DateTime<Utc>>>>,
|
countdata: HashMap<String, Vec<AggRecordTimeInfo>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Debug for RuleNode {
|
impl Debug for RuleNode {
|
||||||
@@ -66,10 +66,10 @@ impl RuleNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn select(&mut self, filepath: &String, event_record: &EvtxRecordInfo) -> bool {
|
pub fn select(&mut self, event_record: &EvtxRecordInfo) -> bool {
|
||||||
let result = self.detection.select(event_record);
|
let result = self.detection.select(event_record);
|
||||||
if result {
|
if result && self.has_agg_condition() {
|
||||||
count::count(self, filepath, &event_record.record);
|
count::count(self, &event_record.record);
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@@ -83,14 +83,23 @@ impl RuleNode {
|
|||||||
if !self.has_agg_condition() {
|
if !self.has_agg_condition() {
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
for filepath in self.countdata.keys() {
|
ret.append(&mut count::aggregation_condition_select(&self));
|
||||||
ret.append(&mut count::aggregation_condition_select(&self, &filepath));
|
|
||||||
}
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
pub fn check_exist_countdata(&self) -> bool {
|
pub fn check_exist_countdata(&self) -> bool {
|
||||||
self.countdata.len() > 0
|
self.countdata.len() > 0
|
||||||
}
|
}
|
||||||
|
/// ルール内のAggregationParseInfo(Aggregation Condition)を取得する関数
|
||||||
|
pub fn get_agg_condition(&self) -> Option<&AggregationParseInfo> {
|
||||||
|
match self.detection.aggregation_condition.as_ref() {
|
||||||
|
None => {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
Some(agg_parse_info) => {
|
||||||
|
return Some(agg_parse_info);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// RuleNodeのdetectionに定義されているキーの一覧を取得する。
|
// RuleNodeのdetectionに定義されているキーの一覧を取得する。
|
||||||
@@ -293,12 +302,12 @@ impl DetectionNode {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
/// countなどのaggregationの結果を出力する構造体
|
/// countなどのaggregationの結果を出力する構造体
|
||||||
pub struct AggResult {
|
pub struct AggResult {
|
||||||
/// evtx file path
|
|
||||||
pub filepath: String,
|
|
||||||
/// countなどの値
|
/// countなどの値
|
||||||
pub data: i32,
|
pub data: i32,
|
||||||
/// (countの括弧内の記載)_(count byで指定された条件)で設定されたキー
|
/// count byで指定された条件のレコード内での値
|
||||||
pub key: String,
|
pub key: String,
|
||||||
|
/// countの括弧内指定された項目の検知されたレコード内での値の配列。括弧内で指定がなかった場合は長さ0の配列となる
|
||||||
|
pub field_values: Vec<String>,
|
||||||
///検知したブロックの最初のレコードの時間
|
///検知したブロックの最初のレコードの時間
|
||||||
pub start_timedate: DateTime<Utc>,
|
pub start_timedate: DateTime<Utc>,
|
||||||
///条件式の情報
|
///条件式の情報
|
||||||
@@ -307,16 +316,16 @@ pub struct AggResult {
|
|||||||
|
|
||||||
impl AggResult {
|
impl AggResult {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
filepath: String,
|
|
||||||
data: i32,
|
data: i32,
|
||||||
key: String,
|
key: String,
|
||||||
|
field_values: Vec<String>,
|
||||||
start_timedate: DateTime<Utc>,
|
start_timedate: DateTime<Utc>,
|
||||||
condition_op_num: String,
|
condition_op_num: String,
|
||||||
) -> AggResult {
|
) -> AggResult {
|
||||||
return AggResult {
|
return AggResult {
|
||||||
filepath: filepath,
|
|
||||||
data: data,
|
data: data,
|
||||||
key: key,
|
key: key,
|
||||||
|
field_values: field_values,
|
||||||
start_timedate: start_timedate,
|
start_timedate: start_timedate,
|
||||||
condition_op_num: condition_op_num,
|
condition_op_num: condition_op_num,
|
||||||
};
|
};
|
||||||
@@ -361,7 +370,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -391,7 +400,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -421,7 +430,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -504,7 +513,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -563,7 +572,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -629,7 +638,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -673,7 +682,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -718,7 +727,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -782,7 +791,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -846,7 +855,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -892,7 +901,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_rec) => {
|
Err(_rec) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -950,17 +959,11 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
let result = rule_node.select(&"testpath".to_string(), &recinfo);
|
let result = rule_node.select(&recinfo);
|
||||||
assert_eq!(rule_node.detection.aggregation_condition.is_some(), true);
|
assert_eq!(rule_node.detection.aggregation_condition.is_some(), true);
|
||||||
assert_eq!(result, true);
|
assert_eq!(result, true);
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
*&rule_node
|
*&rule_node.countdata.get(key).unwrap().len() as i32,
|
||||||
.countdata
|
|
||||||
.get("testpath")
|
|
||||||
.unwrap()
|
|
||||||
.get(key)
|
|
||||||
.unwrap()
|
|
||||||
.len() as i32,
|
|
||||||
expect_count
|
expect_count
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -432,7 +432,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -465,7 +465,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -497,7 +497,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -529,7 +529,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
|
assert_eq!(rule_node.select(&recinfo), true);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
@@ -561,7 +561,7 @@ mod tests {
|
|||||||
Ok(record) => {
|
Ok(record) => {
|
||||||
let keys = detections::rule::get_detection_keys(&rule_node);
|
let keys = detections::rule::get_detection_keys(&rule_node);
|
||||||
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
let recinfo = utils::create_rec_info(record, "testpath".to_owned(), &keys);
|
||||||
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
|
assert_eq!(rule_node.select(&recinfo), false);
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
assert!(false, "Failed to parse json record.");
|
assert!(false, "Failed to parse json record.");
|
||||||
|
|||||||
@@ -184,7 +184,7 @@ pub fn get_thread_num() -> usize {
|
|||||||
let conf = configs::CONFIG.read().unwrap();
|
let conf = configs::CONFIG.read().unwrap();
|
||||||
let threadnum = &conf
|
let threadnum = &conf
|
||||||
.args
|
.args
|
||||||
.value_of("threadnum")
|
.value_of("thread-number")
|
||||||
.unwrap_or(def_thread_num_str.as_str());
|
.unwrap_or(def_thread_num_str.as_str());
|
||||||
return threadnum.parse::<usize>().unwrap().clone();
|
return threadnum.parse::<usize>().unwrap().clone();
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ pub fn exclude_ids() -> RuleExclude {
|
|||||||
.read()
|
.read()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.args
|
.args
|
||||||
.is_present("show-noisyalerts")
|
.is_present("enable-noisy-rules")
|
||||||
{
|
{
|
||||||
ids += "\n"; // 改行を入れないとexclude-rulesの一番最後の行とnoisy-rules.txtの一番最初の行が一行にまとめられてしまう。
|
ids += "\n"; // 改行を入れないとexclude-rulesの一番最後の行とnoisy-rules.txtの一番最初の行が一行にまとめられてしまう。
|
||||||
match fs::read("config/noisy-rules.txt") {
|
match fs::read("config/noisy-rules.txt") {
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ impl App {
|
|||||||
|
|
||||||
fn exec(&mut self) {
|
fn exec(&mut self) {
|
||||||
let analysis_start_time: DateTime<Local> = Local::now();
|
let analysis_start_time: DateTime<Local> = Local::now();
|
||||||
if !configs::CONFIG.read().unwrap().args.is_present("q") {
|
if !configs::CONFIG.read().unwrap().args.is_present("quiet") {
|
||||||
self.output_logo();
|
self.output_logo();
|
||||||
println!("");
|
println!("");
|
||||||
self.output_eggs(&format!(
|
self.output_eggs(&format!(
|
||||||
@@ -183,8 +183,6 @@ impl App {
|
|||||||
pb.inc();
|
pb.inc();
|
||||||
}
|
}
|
||||||
after_fact();
|
after_fact();
|
||||||
detection.print_unique_results();
|
|
||||||
AlertMessage::output_error_log_exist();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Windowsイベントログファイルを1ファイル分解析する。
|
// Windowsイベントログファイルを1ファイル分解析する。
|
||||||
@@ -259,7 +257,7 @@ impl App {
|
|||||||
detection = detection.start(&self.rt, records_per_detect);
|
detection = detection.start(&self.rt, records_per_detect);
|
||||||
}
|
}
|
||||||
|
|
||||||
detection.add_aggcondtion_msg();
|
detection.add_aggcondition_msg();
|
||||||
tl.tm_stats_dsp_msg();
|
tl.tm_stats_dsp_msg();
|
||||||
|
|
||||||
return detection;
|
return detection;
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ pub struct Timeline {
|
|||||||
impl Timeline {
|
impl Timeline {
|
||||||
pub fn new() -> Timeline {
|
pub fn new() -> Timeline {
|
||||||
let totalcnt = 0;
|
let totalcnt = 0;
|
||||||
let filepath = "".to_owned();
|
let filepath = String::default();
|
||||||
let starttm = "".to_string();
|
let starttm = "".to_string();
|
||||||
let endtm = "".to_string();
|
let endtm = "".to_string();
|
||||||
let statslst = HashMap::new();
|
let statslst = HashMap::new();
|
||||||
|
|||||||
@@ -156,7 +156,7 @@ impl ParseYaml {
|
|||||||
.read()
|
.read()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.args
|
.args
|
||||||
.is_present("show-deprecated")
|
.is_present("enable-deprecated-rules")
|
||||||
{
|
{
|
||||||
let rule_status = &yaml_doc["status"].as_str();
|
let rule_status = &yaml_doc["status"].as_str();
|
||||||
if rule_status.is_some() && rule_status.unwrap() == "deprecated" {
|
if rule_status.is_some() && rule_status.unwrap() == "deprecated" {
|
||||||
@@ -191,7 +191,7 @@ mod tests {
|
|||||||
};
|
};
|
||||||
let _ = &yaml.read_dir(
|
let _ = &yaml.read_dir(
|
||||||
"test_files/rules/yaml/".to_string(),
|
"test_files/rules/yaml/".to_string(),
|
||||||
&"".to_owned(),
|
&String::default(),
|
||||||
&exclude_ids,
|
&exclude_ids,
|
||||||
);
|
);
|
||||||
assert_ne!(yaml.files.len(), 0);
|
assert_ne!(yaml.files.len(), 0);
|
||||||
|
|||||||
Reference in New Issue
Block a user