Merge branch 'main' into feature/start_finish_time

This commit is contained in:
itiB
2021-12-06 23:07:08 +09:00
1197 changed files with 3668 additions and 724 deletions

View File

@@ -21,6 +21,17 @@ pub struct CsvFormat<'a> {
filepath: &'a str,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct DisplayFormat<'a> {
time: &'a str,
computername: &'a str,
eventid: &'a str,
level: &'a str,
alert: &'a str,
details: &'a str,
}
pub fn after_fact() {
let fn_emit_csv_err = |err: Box<dyn Error>| {
AlertMessage::alert(
@@ -30,7 +41,7 @@ pub fn after_fact() {
.ok();
process::exit(1);
};
let mut displayflag = false;
let mut target: Box<dyn io::Write> = if let Some(csv_path) = configs::CONFIG
.read()
.unwrap()
@@ -50,31 +61,51 @@ pub fn after_fact() {
}
}
} else {
displayflag = true;
// 標準出力に出力する場合
Box::new(io::stdout())
};
if let Err(err) = emit_csv(&mut target) {
fn_emit_csv_err(err);
if let Err(err) = emit_csv(&mut target, displayflag) {
fn_emit_csv_err(Box::new(err));
}
}
fn emit_csv<W: std::io::Write>(writer: &mut W) -> Result<(), Box<dyn Error>> {
let mut wtr = csv::WriterBuilder::new().from_writer(writer);
fn emit_csv<W: std::io::Write>(writer: &mut W, displayflag: bool) -> io::Result<()> {
let mut wtr;
if displayflag {
wtr = csv::WriterBuilder::new()
.delimiter(b'|')
.from_writer(writer);
} else {
wtr = csv::WriterBuilder::new().from_writer(writer);
}
let messages = print::MESSAGES.lock().unwrap();
let mut detect_count = 0;
for (time, detect_infos) in messages.iter() {
for detect_info in detect_infos {
wtr.serialize(CsvFormat {
time: &format_time(time),
filepath: &detect_info.filepath,
rulepath: &detect_info.rulepath,
level: &detect_info.level,
computername: &detect_info.computername,
eventid: &detect_info.eventid,
alert: &detect_info.alert,
details: &detect_info.detail,
})?;
if displayflag {
wtr.serialize(DisplayFormat {
time: &format!("{} ", &format_time(time)),
level: &format!(" {} ", &detect_info.level),
computername: &format!(" {} ", &detect_info.computername),
eventid: &format!(" {} ", &detect_info.eventid),
alert: &format!(" {} ", &detect_info.alert),
details: &format!(" {}", &detect_info.detail),
})?;
} else {
// csv出力時フォーマット
wtr.serialize(CsvFormat {
time: &format_time(time),
filepath: &detect_info.filepath,
rulepath: &detect_info.rulepath,
level: &detect_info.level,
computername: &detect_info.computername,
eventid: &detect_info.eventid,
alert: &detect_info.alert,
details: &detect_info.detail,
})?;
}
}
detect_count += detect_infos.len();
}
@@ -82,7 +113,7 @@ fn emit_csv<W: std::io::Write>(writer: &mut W) -> Result<(), Box<dyn Error>> {
wtr.flush()?;
println!("");
println!("Events Detected:{:?}", detect_count);
println!("Total events detected: {:?}", detect_count);
Ok(())
}
@@ -107,81 +138,165 @@ where
}
}
#[test]
fn test_emit_csv() {
#[cfg(test)]
mod tests {
use crate::afterfact::emit_csv;
use crate::detections::print;
use chrono::{Local, TimeZone, Utc};
use serde_json::Value;
use std::fs::File;
use std::fs::{read_to_string, remove_file};
let testfilepath: &str = "test.evtx";
let testrulepath: &str = "test-rule.yml";
let test_title = "test_title";
let test_level = "high";
let test_computername = "testcomputer";
let test_eventid = "1111";
let output = "pokepoke";
{
let mut messages = print::MESSAGES.lock().unwrap();
use std::io;
let val = r##"
{
"Event": {
"EventData": {
"CommandRLine": "hoge"
},
"System": {
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event: Value = serde_json::from_str(val).unwrap();
messages.insert(
testfilepath.to_string(),
testrulepath.to_string(),
&event,
test_level.to_string(),
test_computername.to_string(),
test_eventid.to_string(),
test_title.to_string(),
output.to_string(),
);
#[test]
fn test_emit_csv() {
//テストの並列処理によって読み込みの順序が担保できずstatic変数の内容が担保が取れない為、このテストはシーケンシャルで行う
test_emit_csv_output();
test_emit_csv_output();
}
let expect_time = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_tz = expect_time.with_timezone(&Local);
let expect = "Time,Computername,Eventid,Level,Alert,Details,Rulepath,Filepath\n".to_string()
+ &expect_tz
.clone()
.format("%Y-%m-%d %H:%M:%S%.3f %:z")
.to_string()
+ ","
+ test_computername
+ ","
+ test_eventid
+ ","
+ test_level
+ ","
+ test_title
+ ","
+ output
+ ","
+ testrulepath
+ ","
+ &testfilepath.to_string()
+ "\n";
let mut file: Box<dyn io::Write> =
Box::new(File::create("./test_emit_csv.csv".to_string()).unwrap());
assert!(emit_csv(&mut file).is_ok());
match read_to_string("./test_emit_csv.csv") {
Err(_) => panic!("Failed to open file"),
Ok(s) => {
assert_eq!(s, expect);
fn test_emit_csv_output() {
let testfilepath: &str = "test.evtx";
let testrulepath: &str = "test-rule.yml";
let test_title = "test_title";
let test_level = "high";
let test_computername = "testcomputer";
let test_eventid = "1111";
let output = "pokepoke";
{
let mut messages = print::MESSAGES.lock().unwrap();
messages.clear();
let val = r##"
{
"Event": {
"EventData": {
"CommandRLine": "hoge"
},
"System": {
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event: Value = serde_json::from_str(val).unwrap();
messages.insert(
testfilepath.to_string(),
testrulepath.to_string(),
&event,
test_level.to_string(),
test_computername.to_string(),
test_eventid.to_string(),
test_title.to_string(),
output.to_string(),
);
}
};
assert!(remove_file("./test_emit_csv.csv").is_ok());
let expect_time = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_tz = expect_time.with_timezone(&Local);
let expect = "Time,Computername,Eventid,Level,Alert,Details,Rulepath,Filepath\n"
.to_string()
+ &expect_tz
.clone()
.format("%Y-%m-%d %H:%M:%S%.3f %:z")
.to_string()
+ ","
+ test_computername
+ ","
+ test_eventid
+ ","
+ test_level
+ ","
+ test_title
+ ","
+ output
+ ","
+ testrulepath
+ ","
+ &testfilepath.to_string()
+ "\n";
let mut file: Box<dyn io::Write> =
Box::new(File::create("./test_emit_csv.csv".to_string()).unwrap());
assert!(emit_csv(&mut file, false).is_ok());
match read_to_string("./test_emit_csv.csv") {
Err(_) => panic!("Failed to open file."),
Ok(s) => {
assert_eq!(s, expect);
}
};
assert!(remove_file("./test_emit_csv.csv").is_ok());
check_emit_csv_display();
}
fn check_emit_csv_display() {
let testfilepath: &str = "test2.evtx";
let testrulepath: &str = "test-rule2.yml";
let test_title = "test_title2";
let test_level = "medium";
let test_computername = "testcomputer2";
let test_eventid = "2222";
let output = "displaytest";
{
let mut messages = print::MESSAGES.lock().unwrap();
messages.clear();
let val = r##"
{
"Event": {
"EventData": {
"CommandRLine": "hoge"
},
"System": {
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event: Value = serde_json::from_str(val).unwrap();
messages.insert(
testfilepath.to_string(),
testrulepath.to_string(),
&event,
test_level.to_string(),
test_computername.to_string(),
test_eventid.to_string(),
test_title.to_string(),
output.to_string(),
);
messages.debug();
}
let expect_time = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_tz = expect_time.with_timezone(&Local);
let expect = "Time|Computername|Eventid|Level|Alert|Details\n".to_string()
+ &expect_tz
.clone()
.format("%Y-%m-%d %H:%M:%S%.3f %:z")
.to_string()
+ " | "
+ test_computername
+ " | "
+ test_eventid
+ " | "
+ test_level
+ " | "
+ test_title
+ " | "
+ output
+ "\n";
let mut file: Box<dyn io::Write> =
Box::new(File::create("./test_emit_csv_display.txt".to_string()).unwrap());
assert!(emit_csv(&mut file, true).is_ok());
match read_to_string("./test_emit_csv_display.txt") {
Err(_) => panic!("Failed to open file."),
Ok(s) => {
assert_eq!(s, expect);
}
};
assert!(remove_file("./test_emit_csv_display.txt").is_ok());
}
}

View File

@@ -1,13 +1,13 @@
use crate::detections::utils;
use clap::{App, AppSettings, ArgMatches};
use lazy_static::lazy_static;
use std::collections::HashMap;
use std::collections::{HashMap, HashSet};
use std::sync::RwLock;
lazy_static! {
pub static ref CONFIG: RwLock<ConfigReader> = RwLock::new(ConfigReader::new());
pub static ref LEVELMAP: HashMap<String, u8> = {
pub static ref LEVELMAP: HashMap<String, u128> = {
let mut levelmap = HashMap::new();
levelmap.insert("INFO".to_owned(), 1);
levelmap.insert("INFORMATIONAL".to_owned(), 1);
levelmap.insert("LOW".to_owned(), 2);
levelmap.insert("MEDIUM".to_owned(), 3);
levelmap.insert("HIGH".to_owned(), 4);
@@ -21,6 +21,7 @@ pub struct ConfigReader {
pub args: ArgMatches<'static>,
pub event_key_alias_config: EventKeyAliasConfig,
pub event_timeline_config: EventInfoConfig,
pub target_eventids: TargetEventIds,
}
impl ConfigReader {
@@ -29,6 +30,7 @@ impl ConfigReader {
args: build_app(),
event_key_alias_config: load_eventkey_alias("config/eventkey_alias.txt"),
event_timeline_config: load_eventcode_info("config/timeline_event_info.txt"),
target_eventids: load_target_ids("config/target_eventids.txt"),
}
}
}
@@ -47,25 +49,25 @@ fn build_app<'a>() -> ArgMatches<'a> {
return ArgMatches::default();
}
let usages = "-f --filepath=[FILEPATH] 'Event file path'
--csv-timeline=[CSV_TIMELINE] 'Csv output timeline'
let usages = "-f --filepath=[FILEPATH] 'File path to one .evtx file'
--csv-timeline=[CSV_TIMELINE] 'Save the timeline in CSV format'
--rfc-2822 'Output date and time in RFC 2822 format. Example: Mon, 07 Aug 2006 12:34:56 -0600'
--rfc-3339 'Output date and time in RFC 3339 format. Example: 2006-08-07T12:34:56.485214 -06:00'
--verbose 'Output check information to target event file path and rule file.'
--verbose 'Output verbose information to target event file path and rule file'
--start-time=[STARTTIME]
--end-time=[ENDTIME]
-q 'Quiet Output Logo'
-r --rules=[RULEDIRECTORY] 'using target of rule file directory'
-L --level=[LEVEL] 'Specified execute rule level(default: LOW)'
-u --utc 'Output time in UTC format(default: local time)'
-d --directory=[DIRECTORY] 'Event log files directory'
-s --statistics 'Prints statistics for event logs'
-t --threadnum=[NUM] 'Thread number'
-q 'Quiet mode. Do not display the launch banner'
-r --rules=[RULEDIRECTORY] 'Rule file directory (default: ./rules)'
-L --level=[LEVEL] 'Minimum level for rules (default: INFORMATIONAL)'
-u --utc 'Output time in UTC format (default: local time)'
-d --directory=[DIRECTORY] 'Directory of multiple .evtx files'
-s --statistics 'Prints statistics of event IDs'
-t --threadnum=[NUM] 'Thread number (default: optimal number for performance)'
--contributors 'Prints the list of contributors'";
App::new(&program)
.about("hayabusa. Aiming to be the world's greatest Windows event log analysis tool!")
.about("Hayabusa: Aiming to be the world's greatest Windows event log analysis tool!")
.version("1.0.0")
.author("Author name Yamato-Security(https://github.com/Yamato-Security/hayabusa)")
.author("Yamato-Security(https://github.com/Yamato-Security/hayabusa)")
.setting(AppSettings::VersionlessSubcommands)
.usage(usages)
.args_from_usage(usages)
@@ -82,6 +84,40 @@ fn is_test_mode() -> bool {
return false;
}
#[derive(Debug, Clone)]
pub struct TargetEventIds {
ids: HashSet<String>,
}
impl TargetEventIds {
pub fn new() -> TargetEventIds {
return TargetEventIds {
ids: HashSet::new(),
};
}
pub fn is_target(&self, id: &String) -> bool {
// 中身が空の場合は全EventIdを対象とする。
if self.ids.is_empty() {
return true;
}
return self.ids.contains(id);
}
}
fn load_target_ids(path: &str) -> TargetEventIds {
let mut ret = TargetEventIds::new();
let lines = utils::read_txt(path).unwrap(); // ファイルが存在しなければエラーとする
for line in lines {
if line.is_empty() {
continue;
}
ret.ids.insert(line);
}
return ret;
}
#[derive(Debug, Clone)]
pub struct EventKeyAliasConfig {
key_to_eventkey: HashMap<String, String>,

View File

@@ -5,6 +5,7 @@ use serde_json::Value;
use std::collections::HashMap;
use tokio::{runtime::Runtime, spawn, task::JoinHandle};
use crate::detections::configs;
use crate::detections::print::AlertMessage;
use crate::detections::print::MESSAGES;
use crate::detections::rule;
@@ -54,11 +55,6 @@ impl Detection {
// ルールファイルのパースを実行
let mut rulefile_loader = ParseYaml::new();
let result_readdir = rulefile_loader.read_dir(rulespath.unwrap_or(DIRPATH_RULES), &level);
Detection::print_rule_load_info(
rulefile_loader.rulecounter,
rulefile_loader.parseerror_count,
rulefile_loader.ignore_count,
);
if result_readdir.is_err() {
AlertMessage::alert(
&mut std::io::stderr().lock(),
@@ -67,7 +63,7 @@ impl Detection {
.ok();
return vec![];
}
let mut parseerror_count = rulefile_loader.errorrule_count;
let return_if_success = |mut rule: RuleNode| {
let err_msgs_result = rule.init();
if err_msgs_result.is_ok() {
@@ -77,24 +73,30 @@ impl Detection {
// ruleファイルのパースに失敗した場合はエラー出力
err_msgs_result.err().iter().for_each(|err_msgs| {
let errmsg_body =
format!("Failed to parse Rule file. (FilePath : {})", rule.rulepath);
format!("Failed to parse rule file. (FilePath : {})", rule.rulepath);
AlertMessage::warn(&mut std::io::stdout().lock(), errmsg_body).ok();
err_msgs.iter().for_each(|err_msg| {
AlertMessage::warn(&mut std::io::stdout().lock(), err_msg.to_string()).ok();
});
parseerror_count += 1;
println!(""); // 一行開けるためのprintln
});
return Option::None;
};
// parse rule files
return rulefile_loader
let ret = rulefile_loader
.files
.into_iter()
.map(|rule_file_tuple| rule::create_rule(rule_file_tuple.0, rule_file_tuple.1))
.filter_map(return_if_success)
.collect();
Detection::print_rule_load_info(
&rulefile_loader.rulecounter,
&parseerror_count,
&rulefile_loader.ignorerule_count,
);
return ret;
}
// 複数のイベントレコードに対して、複数のルールを1個実行します。
@@ -141,6 +143,41 @@ impl Detection {
}
}
pub fn print_unique_results(&self) {
let rules = &self.rules;
let levellabel = Vec::from([
"Critical",
"High",
"Medium",
"Low",
"Informational",
"Undefined",
]);
// levclcounts is [(Undefined), (Informational), (Low),(Medium),(High),(Critical)]
let mut levelcounts = Vec::from([0, 0, 0, 0, 0, 0]);
for rule in rules.into_iter() {
if rule.check_exist_countdata() {
let suffix = configs::LEVELMAP
.get(
&rule.yaml["level"]
.as_str()
.unwrap_or("")
.to_owned()
.to_uppercase(),
)
.unwrap_or(&0);
levelcounts[*suffix as usize] += 1;
}
}
let mut total_unique = 0;
levelcounts.reverse();
for (i, value) in levelcounts.iter().enumerate() {
println!("{} alerts: {}", levellabel[i], value);
total_unique += value;
}
println!("Unique events detected: {}", total_unique);
}
// 複数のイベントレコードに対して、ルールを1個実行します。
fn execute_rule(mut rule: RuleNode, records: Arc<Vec<EvtxRecordInfo>>) -> RuleNode {
let records = &*records;
@@ -169,7 +206,9 @@ impl Detection {
record_info.record["Event"]["System"]["Computer"]
.to_string()
.replace("\"", ""),
get_serde_number_to_string(&record_info.record["Event"]["System"]["EventID"]),
get_serde_number_to_string(&record_info.record["Event"]["System"]["EventID"])
.unwrap_or("-".to_owned())
.to_string(),
rule.yaml["title"].as_str().unwrap_or("").to_string(),
rule.yaml["output"].as_str().unwrap_or("").to_string(),
);
@@ -210,25 +249,25 @@ impl Detection {
return ret;
}
pub fn print_rule_load_info(
rc: HashMap<String, u128>,
parseerror_count: u128,
ignore_count: u128,
rc: &HashMap<String, u128>,
parseerror_count: &u128,
ignore_count: &u128,
) {
let mut total = parseerror_count + ignore_count;
rc.into_iter().for_each(|(key, value)| {
println!("{} Rules: {}", key, value);
println!("{} rules: {}", key, value);
total += value;
});
println!("Ignored Rule Count: {}", ignore_count);
println!("Rule Parse Errors Count: {}", parseerror_count);
println!("Total Detection Rules: {}", total);
println!("Ignored rules: {}", ignore_count);
println!("Rule parsing errors: {}", parseerror_count);
println!("Total detection rules: {}", total);
println!("");
}
}
#[test]
fn test_parse_rule_files() {
let level = "INFO";
let level = "informational";
let opt_rule_path = Some("./test_files/rules/level_yaml");
let cole = Detection::parse_rule_files(level.to_owned(), opt_rule_path);
assert_eq!(5, cole.len());

View File

@@ -30,6 +30,7 @@ pub struct AlertMessage {}
lazy_static! {
pub static ref MESSAGES: Mutex<Message> = Mutex::new(Message::new());
pub static ref ALIASREGEX: Regex = Regex::new(r"%[a-zA-Z0-9-_]+%").unwrap();
}
impl Message {
@@ -101,8 +102,7 @@ impl Message {
fn parse_message(&mut self, event_record: &Value, output: String) -> String {
let mut return_message: String = output;
let mut hash_map: HashMap<String, String> = HashMap::new();
let re = Regex::new(r"%[a-zA-Z0-9-_]+%").unwrap();
for caps in re.captures_iter(&return_message) {
for caps in ALIASREGEX.captures_iter(&return_message) {
let full_target_str = &caps[0];
let target_length = full_target_str.chars().count() - 2; // The meaning of 2 is two percent
let target_str = full_target_str
@@ -118,16 +118,20 @@ impl Message {
.get_event_key(target_str.to_string())
{
let split: Vec<&str> = array_str.split(".").collect();
let mut is_exist_event_key = false;
let mut tmp_event_record: &Value = event_record.into();
for s in split {
if let Some(record) = tmp_event_record.get(s) {
is_exist_event_key = true;
tmp_event_record = record;
}
}
hash_map.insert(
full_target_str.to_string(),
get_serde_number_to_string(tmp_event_record),
);
if is_exist_event_key {
let hash_value = get_serde_number_to_string(tmp_event_record);
if hash_value.is_some() {
hash_map.insert(full_target_str.to_string(), hash_value.unwrap());
}
}
}
}
@@ -161,7 +165,7 @@ impl Message {
detect_count += detect_infos.len();
}
println!("");
println!("Events Detected:{:?}", detect_count);
println!("Total Events Detected:{:?}", detect_count);
}
pub fn iter(&self) -> &BTreeMap<DateTime<Utc>, Vec<DetectInfo>> {
@@ -188,6 +192,11 @@ impl Message {
return Option::Some(datetime.unwrap());
}
}
/// message内のマップをクリアする。テストする際の冪等性の担保のため作成。
pub fn clear(&mut self) {
self.map.clear();
}
}
impl AlertMessage {
@@ -327,4 +336,87 @@ mod tests {
let mut stdout = stdout.lock();
AlertMessage::alert(&mut stdout, input.to_string()).expect("[WARN] TESTWarn!");
}
#[test]
/// outputで指定されているキー(eventkey_alias.txt内で設定済み)から対象のレコード内の情報でメッセージをパースしているか確認する関数
fn test_parse_message() {
let mut message = Message::new();
let json_str = r##"
{
"Event": {
"EventData": {
"CommandLine": "parsetest1"
},
"System": {
"Computer": "testcomputer1",
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
let expected = "commandline:parsetest1 computername:testcomputer1";
assert_eq!(
message.parse_message(
&event_record,
"commandline:%CommandLine% computername:%ComputerName%".to_owned()
),
expected,
);
}
#[test]
/// outputで指定されているキーが、eventkey_alias.txt内で設定されていない場合の出力テスト
fn test_parse_message_not_exist_key_in_output() {
let mut message = Message::new();
let json_str = r##"
{
"Event": {
"EventData": {
"CommandLine": "parsetest2"
},
"System": {
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
let expected = "NoExistKey:%TESTNoExistKey%";
assert_eq!(
message.parse_message(&event_record, "NoExistKey:%TESTNoExistKey%".to_owned()),
expected,
);
}
#[test]
/// outputで指定されているキー(eventkey_alias.txt内で設定済み)が対象のレコード内に該当する情報がない場合の出力テスト
fn test_parse_message_not_exist_value_in_record() {
let mut message = Message::new();
let json_str = r##"
{
"Event": {
"EventData": {
"CommandLine": "parsetest3"
},
"System": {
"TimeCreated_attributes": {
"SystemTime": "1996-02-27T01:05:01Z"
}
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
let expected = "commandline:parsetest3 computername:%ComputerName%";
assert_eq!(
message.parse_message(
&event_record,
"commandline:%CommandLine% computername:%ComputerName%".to_owned()
),
expected,
);
}
}

View File

@@ -1,5 +1,23 @@
use lazy_static::lazy_static;
use regex::Regex;
lazy_static! {
// ここで字句解析するときに使う正規表現の一覧を定義する。
// ここはSigmaのGithubレポジトリにある、toos/sigma/parser/condition.pyのSigmaConditionTokenizerのtokendefsを参考にしています。
pub static ref AGGREGATION_REGEXMAP: Vec<Regex> = vec![
Regex::new(r"^count\( *\w* *\)").unwrap(), // countの式
Regex::new(r"^ ").unwrap(),
Regex::new(r"^by").unwrap(),
Regex::new(r"^==").unwrap(),
Regex::new(r"^<=").unwrap(),
Regex::new(r"^>=").unwrap(),
Regex::new(r"^<").unwrap(),
Regex::new(r"^>").unwrap(),
Regex::new(r"^\w+").unwrap(),
];
pub static ref RE_PIPE: Regex = Regex::new(r"\|.*").unwrap();
}
#[derive(Debug)]
pub struct AggregationParseInfo {
pub _field_name: Option<String>, // countの括弧に囲まれた部分の文字
@@ -24,35 +42,18 @@ pub enum AggregationConditionToken {
/// SIGMAルールでいうAggregationConditionを解析する。
/// AggregationConditionはconditionに指定された式のパイプ以降の部分を指してます。
#[derive(Debug)]
pub struct AggegationConditionCompiler {
regex_patterns: Vec<Regex>,
}
pub struct AggegationConditionCompiler {}
impl AggegationConditionCompiler {
pub fn new() -> Self {
// ここで字句解析するときに使う正規表現の一覧を定義する。
// ここはSigmaのGithubレポジトリにある、toos/sigma/parser/condition.pyのSigmaConditionTokenizerのtokendefsを参考にしています。
let mut regex_patterns = vec![];
regex_patterns.push(Regex::new(r"^count\( *\w* *\)").unwrap()); // countの式
regex_patterns.push(Regex::new(r"^ ").unwrap());
regex_patterns.push(Regex::new(r"^by").unwrap());
regex_patterns.push(Regex::new(r"^==").unwrap());
regex_patterns.push(Regex::new(r"^<=").unwrap());
regex_patterns.push(Regex::new(r"^>=").unwrap());
regex_patterns.push(Regex::new(r"^<").unwrap());
regex_patterns.push(Regex::new(r"^>").unwrap());
regex_patterns.push(Regex::new(r"^\w+").unwrap());
return AggegationConditionCompiler {
regex_patterns: regex_patterns,
};
AggegationConditionCompiler {}
}
pub fn compile(&self, condition_str: String) -> Result<Option<AggregationParseInfo>, String> {
let result = self.compile_body(condition_str);
if let Result::Err(msg) = result {
return Result::Err(format!(
"aggregation condition parse error has occurred. {}",
"An aggregation condition parse error has occurred. {}",
msg
));
} else {
@@ -65,8 +66,7 @@ impl AggegationConditionCompiler {
condition_str: String,
) -> Result<Option<AggregationParseInfo>, String> {
// パイプの部分だけを取り出す
let re_pipe = Regex::new(r"\|.*").unwrap();
let captured = re_pipe.captures(&condition_str);
let captured = self::RE_PIPE.captures(&condition_str);
if captured.is_none() {
// パイプが無いので終了
return Result::Ok(Option::None);
@@ -94,7 +94,7 @@ impl AggegationConditionCompiler {
let mut tokens = Vec::new();
while cur_condition_str.len() != 0 {
let captured = self.regex_patterns.iter().find_map(|regex| {
let captured = self::AGGREGATION_REGEXMAP.iter().find_map(|regex| {
return regex.captures(cur_condition_str.as_str());
});
if captured.is_none() {
@@ -137,7 +137,7 @@ impl AggegationConditionCompiler {
) -> Result<Option<AggregationParseInfo>, String> {
if tokens.is_empty() {
// パイプしか無いのはおかしいのでエラー
return Result::Err("There are no strings after pipe(|).".to_string());
return Result::Err("There are no strings after the pipe(|).".to_string());
}
let mut token_ite = tokens.into_iter();
@@ -150,14 +150,14 @@ impl AggegationConditionCompiler {
}
} else {
// いろんなパターンがあるので難しいが、countというキーワードしか使えないことを説明しておく。
return Result::Err("aggregation condition can use count only.".to_string());
return Result::Err("The aggregation condition can only use count.".to_string());
}
let token = token_ite.next();
if token.is_none() {
// 論理演算子がないのはだめ
return Result::Err(
"count keyword needs compare operator and number like '> 3'".to_string(),
"The count keyword needs a compare operator and number like '> 3'".to_string(),
);
}
@@ -168,14 +168,18 @@ impl AggegationConditionCompiler {
let after_by = token_ite.next();
if after_by.is_none() {
// BYの後に何もないのはだめ
return Result::Err("by keyword needs field name like 'by EventID'".to_string());
return Result::Err(
"The by keyword needs a field name like 'by EventID'".to_string(),
);
}
if let AggregationConditionToken::KEYWORD(keyword) = after_by.unwrap() {
by_field_name = Option::Some(keyword);
token_ite.next()
} else {
return Result::Err("by keyword needs field name like 'by EventID'".to_string());
return Result::Err(
"The by keyword needs a field name like 'by EventID'".to_string(),
);
}
} else {
Option::Some(token)
@@ -185,14 +189,14 @@ impl AggegationConditionCompiler {
if token.is_none() {
// 論理演算子がないのはだめ
return Result::Err(
"count keyword needs compare operator and number like '> 3'".to_string(),
"The count keyword needs a compare operator and number like '> 3'".to_string(),
);
}
let cmp_token = token.unwrap();
if !self.is_cmp_op(&cmp_token) {
return Result::Err(
"count keyword needs compare operator and number like '> 3'".to_string(),
"The count keyword needs a compare operator and number like '> 3'".to_string(),
);
}
@@ -201,17 +205,17 @@ impl AggegationConditionCompiler {
let number: Result<i32, _> = number.parse();
if number.is_err() {
// 比較演算子の後に数値が無い。
return Result::Err("compare operator needs a number like '> 3'.".to_string());
return Result::Err("The compare operator needs a number like '> 3'.".to_string());
} else {
number.unwrap()
}
} else {
// 比較演算子の後に数値が無い。
return Result::Err("compare operator needs a number like '> 3'.".to_string());
return Result::Err("The compare operator needs a number like '> 3'.".to_string());
};
if token_ite.next().is_some() {
return Result::Err("unnecessary word was found.".to_string());
return Result::Err("An unnecessary word was found.".to_string());
}
let info = AggregationParseInfo {
@@ -379,7 +383,7 @@ mod tests {
assert_eq!(true, result.is_err());
assert_eq!(
"aggregation condition parse error has occurred. There are no strings after pipe(|)."
"An aggregation condition parse error has occurred. There are no strings after the pipe(|)."
.to_string(),
result.unwrap_err()
);
@@ -393,7 +397,7 @@ mod tests {
assert_eq!(true, result.is_err());
assert_eq!(
"aggregation condition parse error has occurred. An unusable character was found."
"An aggregation condition parse error has occurred. An unusable character was found."
.to_string(),
result.unwrap_err()
);
@@ -407,7 +411,7 @@ mod tests {
compiler.compile("select1 or select2 | by count( hogehoge) by snsn > 3".to_string());
assert_eq!(true, result.is_err());
assert_eq!("aggregation condition parse error has occurred. aggregation condition can use count only.".to_string(),result.unwrap_err());
assert_eq!("An aggregation condition parse error has occurred. The aggregation condition can only use count.".to_string(),result.unwrap_err());
}
#[test]
@@ -417,7 +421,7 @@ mod tests {
let result = compiler.compile("select1 or select2 | count( hogehoge) 3".to_string());
assert_eq!(true, result.is_err());
assert_eq!("aggregation condition parse error has occurred. count keyword needs compare operator and number like '> 3'".to_string(),result.unwrap_err());
assert_eq!("An aggregation condition parse error has occurred. The count keyword needs a compare operator and number like '> 3'".to_string(),result.unwrap_err());
}
#[test]
@@ -427,7 +431,7 @@ mod tests {
let result = compiler.compile("select1 or select2 | count( hogehoge) by".to_string());
assert_eq!(true, result.is_err());
assert_eq!("aggregation condition parse error has occurred. by keyword needs field name like 'by EventID'".to_string(),result.unwrap_err());
assert_eq!("An aggregation condition parse error has occurred. The by keyword needs a field name like 'by EventID'".to_string(),result.unwrap_err());
}
#[test]
@@ -438,7 +442,7 @@ mod tests {
compiler.compile("select1 or select2 | count( hogehoge ) by hoe >".to_string());
assert_eq!(true, result.is_err());
assert_eq!("aggregation condition parse error has occurred. compare operator needs a number like '> 3'.".to_string(),result.unwrap_err());
assert_eq!("An aggregation condition parse error has occurred. The compare operator needs a number like '> 3'.".to_string(),result.unwrap_err());
}
#[test]
@@ -450,7 +454,7 @@ mod tests {
assert_eq!(true, result.is_err());
assert_eq!(
"aggregation condition parse error has occurred. unnecessary word was found."
"An aggregation condition parse error has occurred. An unnecessary word was found."
.to_string(),
result.unwrap_err()
);

View File

@@ -1,3 +1,4 @@
use lazy_static::lazy_static;
use regex::Regex;
use self::selectionnodes::{
@@ -6,6 +7,16 @@ use self::selectionnodes::{
use super::selectionnodes;
use std::{collections::HashMap, sync::Arc};
lazy_static! {
pub static ref CONDITION_REGEXMAP: Vec<Regex> = vec![
Regex::new(r"^\(").unwrap(),
Regex::new(r"^\)").unwrap(),
Regex::new(r"^ ").unwrap(),
Regex::new(r"^\w+").unwrap(),
];
pub static ref RE_PIPE: Regex = Regex::new(r"\|.*").unwrap();
}
#[derive(Debug, Clone)]
/// 字句解析で出てくるトークン
pub enum ConditionToken {
@@ -92,25 +103,12 @@ impl ConditionToken {
}
#[derive(Debug)]
pub struct ConditionCompiler {
regex_patterns: Vec<Regex>,
}
pub struct ConditionCompiler {}
// conditionの式を読み取るクラス。
impl ConditionCompiler {
pub fn new() -> Self {
// ここで字句解析するときに使う正規表現の一覧を定義する。
let mut regex_patterns = vec![];
regex_patterns.push(Regex::new(r"^\(").unwrap());
regex_patterns.push(Regex::new(r"^\)").unwrap());
regex_patterns.push(Regex::new(r"^ ").unwrap());
// ^\w+については、sigmaのソースのsigma/tools/sigma/parser/condition.pyのSigmaConditionTokenizerを参考にしている。
// 上記ソースの(SigmaConditionToken.TOKEN_ID, re.compile("[\\w*]+")),を参考。
regex_patterns.push(Regex::new(r"^\w+").unwrap());
return ConditionCompiler {
regex_patterns: regex_patterns,
};
ConditionCompiler {}
}
pub fn compile_condition(
@@ -119,8 +117,7 @@ impl ConditionCompiler {
name_2_node: &HashMap<String, Arc<Box<dyn SelectionNode + Send + Sync>>>,
) -> Result<Box<dyn SelectionNode + Send + Sync>, String> {
// パイプはここでは処理しない
let re_pipe = Regex::new(r"\|.*").unwrap();
let captured = re_pipe.captures(&condition_str);
let captured = self::RE_PIPE.captures(&condition_str);
let condition_str = if captured.is_some() {
let captured = captured.unwrap().get(0).unwrap().as_str().to_string();
condition_str.replacen(&captured, "", 1)
@@ -130,7 +127,7 @@ impl ConditionCompiler {
let result = self.compile_condition_body(condition_str, name_2_node);
if let Result::Err(msg) = result {
return Result::Err(format!("condition parse error has occured. {}", msg));
return Result::Err(format!("A condition parse error has occured. {}", msg));
} else {
return result;
}
@@ -192,7 +189,7 @@ impl ConditionCompiler {
let mut tokens = Vec::new();
while cur_condition_str.len() != 0 {
let captured = self.regex_patterns.iter().find_map(|regex| {
let captured = self::CONDITION_REGEXMAP.iter().find_map(|regex| {
return regex.captures(cur_condition_str.as_str());
});
if captured.is_none() {
@@ -269,7 +266,7 @@ impl ConditionCompiler {
}
// 最後までついても対応する右括弧が見つからないことを表している
if left_cnt != right_cnt {
return Result::Err("expected ')'. but not found.".to_string());
return Result::Err("')' was expected but not found.".to_string());
}
// ここで再帰的に呼び出す。
@@ -284,7 +281,7 @@ impl ConditionCompiler {
};
});
if is_right_left {
return Result::Err("expected '('. but not found.".to_string());
return Result::Err("'(' was expected but not found.".to_string());
}
return Result::Ok(ret);
@@ -294,7 +291,7 @@ impl ConditionCompiler {
fn parse_and_or_operator(&self, tokens: Vec<ConditionToken>) -> Result<ConditionToken, String> {
if tokens.len() == 0 {
// 長さ0は呼び出してはいけない
return Result::Err("unknown error.".to_string());
return Result::Err("Unknown error.".to_string());
}
// まず、selection1 and not selection2みたいな式のselection1やnot selection2のように、ANDやORでつながるトークンをまとめる。
@@ -302,7 +299,7 @@ impl ConditionCompiler {
// 先頭又は末尾がAND/ORなのはだめ
if self.is_logical(&tokens[0]) || self.is_logical(&tokens[tokens.len() - 1]) {
return Result::Err("illegal Logical Operator(and, or) was found.".to_string());
return Result::Err("An illegal logical operator(and, or) was found.".to_string());
}
// OperandContainerとLogicalOperator(AndとOR)が交互に並んでいるので、それぞれリストに投入
@@ -311,7 +308,9 @@ impl ConditionCompiler {
for (i, token) in tokens.into_iter().enumerate() {
if (i % 2 == 1) != self.is_logical(&token) {
// インデックスが奇数の時はLogicalOperatorで、インデックスが偶数のときはOperandContainerになる
return Result::Err("The use of logical operator(and, or) was wrong.".to_string());
return Result::Err(
"The use of a logical operator(and, or) was wrong.".to_string(),
);
}
if i % 2 == 0 {
@@ -354,21 +353,21 @@ impl ConditionCompiler {
// 上記の通り、3つ以上入っていることはないはず。
if sub_tokens.len() >= 3 {
return Result::Err(
"unknown error. maybe it's because there are multiple name of selection node."
"Unknown error. Maybe it is because there are multiple names of selection nodes."
.to_string(),
);
}
// 0はありえないはず
if sub_tokens.len() == 0 {
return Result::Err("unknown error.".to_string());
return Result::Err("Unknown error.".to_string());
}
// 1つだけ入っている場合、NOTはありえない。
if sub_tokens.len() == 1 {
let operand_subtoken = sub_tokens.into_iter().next().unwrap();
if let ConditionToken::Not = operand_subtoken {
return Result::Err("illegal not was found.".to_string());
return Result::Err("An illegal not was found.".to_string());
}
return Result::Ok(operand_subtoken);
@@ -380,14 +379,14 @@ impl ConditionCompiler {
let second_token = sub_tokens_ite.next().unwrap();
if let ConditionToken::Not = first_token {
if let ConditionToken::Not = second_token {
return Result::Err("not is continuous.".to_string());
return Result::Err("Not is continuous.".to_string());
} else {
let not_container = ConditionToken::NotContainer(vec![second_token]);
return Result::Ok(not_container);
}
} else {
return Result::Err(
"unknown error. maybe it's because there are multiple name of selection node."
"Unknown error. Maybe it is because there are multiple names of selection nodes."
.to_string(),
);
}
@@ -450,7 +449,7 @@ impl ConditionCompiler {
// NotSelectionNodeに変換
if let ConditionToken::NotContainer(sub_tokens) = token {
if sub_tokens.len() > 1 {
return Result::Err("unknown error".to_string());
return Result::Err("Unknown error".to_string());
}
let select_sub_node =
@@ -459,7 +458,7 @@ impl ConditionCompiler {
return Result::Ok(Box::new(select_not_node));
}
return Result::Err("unknown error".to_string());
return Result::Err("Unknown error".to_string());
}
/// ConditionTokenがAndまたはOrTokenならばTrue
@@ -549,7 +548,7 @@ mod tests {
);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -595,7 +594,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -642,7 +641,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1204,7 +1203,7 @@ mod tests {
assert_eq!(
rule_node.init(),
Err(vec![
"There are no condition node under detection.".to_string()
"There is no condition node under detection.".to_string()
])
);
}
@@ -1226,7 +1225,9 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. An unusable character was found.".to_string()],
vec![
"A condition parse error has occured. An unusable character was found.".to_string(),
],
);
}
@@ -1247,7 +1248,9 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. expected ')'. but not found.".to_string()],
vec![
"A condition parse error has occured. ')' was expected but not found.".to_string(),
],
);
}
@@ -1268,7 +1271,9 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. expected '('. but not found.".to_string()],
vec![
"A condition parse error has occured. '(' was expected but not found.".to_string(),
],
);
}
@@ -1289,7 +1294,9 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. expected ')'. but not found.".to_string()],
vec![
"A condition parse error has occured. ')' was expected but not found.".to_string(),
],
);
}
@@ -1308,7 +1315,7 @@ mod tests {
output: 'Service name : %param1%¥nMessage : Event Log Service Stopped¥nResults: Selective event log manipulation may follow this event.'
"#;
check_rule_parse_error(rule_str,vec!["condition parse error has occured. unknown error. maybe it\'s because there are multiple name of selection node.".to_string()]);
check_rule_parse_error(rule_str,vec!["A condition parse error has occured. Unknown error. Maybe it is because there are multiple names of selection nodes.".to_string()]);
}
#[test]
@@ -1329,7 +1336,7 @@ mod tests {
check_rule_parse_error(
rule_str,
vec![
"condition parse error has occured. illegal Logical Operator(and, or) was found."
"A condition parse error has occured. An illegal logical operator(and, or) was found."
.to_string(),
],
);
@@ -1353,7 +1360,7 @@ mod tests {
check_rule_parse_error(
rule_str,
vec![
"condition parse error has occured. illegal Logical Operator(and, or) was found."
"A condition parse error has occured. An illegal logical operator(and, or) was found."
.to_string(),
],
);
@@ -1374,7 +1381,7 @@ mod tests {
output: 'Service name : %param1%¥nMessage : Event Log Service Stopped¥nResults: Selective event log manipulation may follow this event.'
"#;
check_rule_parse_error(rule_str,vec!["condition parse error has occured. The use of logical operator(and, or) was wrong.".to_string()]);
check_rule_parse_error(rule_str,vec!["A condition parse error has occured. The use of a logical operator(and, or) was wrong.".to_string()]);
}
#[test]
@@ -1394,7 +1401,7 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. illegal not was found.".to_string()],
vec!["A condition parse error has occured. An illegal not was found.".to_string()],
);
}
@@ -1415,7 +1422,7 @@ mod tests {
check_rule_parse_error(
rule_str,
vec!["condition parse error has occured. not is continuous.".to_string()],
vec!["A condition parse error has occured. Not is continuous.".to_string()],
);
}
}

View File

@@ -175,7 +175,7 @@ impl TimeFrameInfo {
} else {
AlertMessage::alert(
&mut std::io::stderr().lock(),
format!("timeframe is invalid.input value:{}", value),
format!("Timeframe is invalid. Input value:{}", value),
)
.ok();
}
@@ -207,7 +207,7 @@ pub fn get_sec_timeframe(timeframe: &Option<TimeFrameInfo>) -> Option<i64> {
Err(err) => {
AlertMessage::alert(
&mut std::io::stderr().lock(),
format!("timeframe num is invalid. timeframe.{}", err),
format!("Timeframe number is invalid. timeframe.{}", err),
)
.ok();
return Option::None;
@@ -755,7 +755,7 @@ mod tests {
let mut rule_node = create_rule("testpath".to_string(), test);
let error_checker = rule_node.init();
if error_checker.is_err() {
assert!(false, "failed to init rulenode");
assert!(false, "Failed to init rulenode");
}
for record_str in records_str {
match serde_json::from_str(record_str) {
@@ -769,7 +769,7 @@ mod tests {
assert_eq!(result, &true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}

View File

@@ -50,7 +50,7 @@ impl LeafMatcher for MinlengthMatcher {
let min_length = select_value.as_i64();
if min_length.is_none() {
let errmsg = format!(
"min_length value should be Integer. [key:{}]",
"min_length value should be an integer. [key:{}]",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -99,7 +99,7 @@ impl LeafMatcher for RegexesFileMatcher {
};
if value.is_none() {
let errmsg = format!(
"regexes value should be String. [key:{}]",
"regexes value should be a string. [key:{}]",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -158,7 +158,7 @@ impl LeafMatcher for AllowlistFileMatcher {
};
if value.is_none() {
let errmsg = format!(
"allowlist value should be String. [key:{}]",
"allowlist value should be a string. [key:{}]",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -253,7 +253,7 @@ impl LeafMatcher for DefaultMatcher {
};
if yaml_value.is_none() {
let errmsg = format!(
"unknown error occured. [key:{}]",
"An unknown error occured. [key:{}]",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -275,7 +275,7 @@ impl LeafMatcher for DefaultMatcher {
};
if pipe_element.is_none() {
let errmsg = format!(
"unknown pipe element was specified. key:{}",
"An unknown pipe element was specified. key:{}",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -286,7 +286,7 @@ impl LeafMatcher for DefaultMatcher {
if self.pipes.len() >= 2 {
// 現状では複数のパイプは対応していない
let errmsg = format!(
"multiple pipe element can't be used. key:{}",
"Multiple pipe elements cannot be used. key:{}",
utils::concat_selection_key(key_list)
);
return Result::Err(vec![errmsg]);
@@ -309,7 +309,7 @@ impl LeafMatcher for DefaultMatcher {
let re_result = Regex::new(&pattern);
if re_result.is_err() {
let errmsg = format!(
"cannot parse regex. [regex:{}, key:{}]",
"Cannot parse regex. [regex:{}, key:{}]",
pattern,
utils::concat_selection_key(key_list)
);
@@ -779,7 +779,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -846,7 +846,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -880,7 +880,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -913,7 +913,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -946,7 +946,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -980,7 +980,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1014,7 +1014,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1048,7 +1048,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1082,7 +1082,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1116,7 +1116,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1149,7 +1149,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1186,7 +1186,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1223,7 +1223,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1259,7 +1259,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1304,7 +1304,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1349,7 +1349,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1394,7 +1394,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1439,7 +1439,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1484,7 +1484,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1529,7 +1529,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1562,7 +1562,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1595,7 +1595,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1628,7 +1628,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1721,7 +1721,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1756,7 +1756,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1791,7 +1791,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -1826,7 +1826,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}

View File

@@ -102,6 +102,9 @@ impl RuleNode {
}
return ret;
}
pub fn check_exist_countdata(&self) -> bool {
self.countdata.len() > 0
}
}
/// Ruleファイルのdetectionを表すード
@@ -141,7 +144,7 @@ impl DetectionNode {
let mut keys = self.name_to_selection.keys().clone();
if keys.len() >= 2 {
return Result::Err(vec![
"There are no condition node under detection.".to_string()
"There is no condition node under detection.".to_string()
]);
}
@@ -188,7 +191,7 @@ impl DetectionNode {
fn parse_name_to_selection(&mut self, detection_yaml: &Yaml) -> Result<(), Vec<String>> {
let detection_hash = detection_yaml.as_hash();
if detection_hash.is_none() {
return Result::Err(vec!["not found detection node".to_string()]);
return Result::Err(vec!["Detection node was not found.".to_string()]);
}
// selectionをパースする。
@@ -226,7 +229,7 @@ impl DetectionNode {
// selectionードが無いのはエラー
if self.name_to_selection.len() == 0 {
return Result::Err(vec![
"There are no selection node under detection.".to_string()
"There is no selection node under detection.".to_string()
]);
}
@@ -357,7 +360,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -390,7 +393,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -423,7 +426,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -509,7 +512,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -571,7 +574,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -640,7 +643,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -687,7 +690,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -735,7 +738,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -802,7 +805,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -869,7 +872,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -918,7 +921,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -940,7 +943,7 @@ mod tests {
assert_eq!(
rule_node.init(),
Err(vec![
"unknown pipe element was specified. key:detection -> selection -> Channel|failed"
"An unknown pipe element was specified. key:detection -> selection -> Channel|failed"
.to_string()
])
);
@@ -959,7 +962,7 @@ mod tests {
assert_eq!(
rule_node.init(),
Err(vec!["not found detection node".to_string()])
Err(vec!["Detection node was not found.".to_string()])
);
}
@@ -994,7 +997,7 @@ mod tests {
);
}
Err(_rec) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}

View File

@@ -392,7 +392,7 @@ impl SelectionNode for LeafSelectionNode {
if self.select_value.is_badvalue() {
return Result::Err(vec![format!(
"Cannot parse yaml file. key:{}",
"Cannot parse yml file. key:{}",
utils::concat_selection_key(&match_key_list)
)]);
}
@@ -446,7 +446,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -482,7 +482,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -517,7 +517,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -552,7 +552,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), true);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}
@@ -587,7 +587,7 @@ mod tests {
assert_eq!(rule_node.select(&"testpath".to_owned(), &recinfo), false);
}
Err(_) => {
assert!(false, "failed to parse json record.");
assert!(false, "Failed to parse json record.");
}
}
}

View File

@@ -49,7 +49,7 @@ pub fn check_allowlist(target: &str, regexes: &Vec<Regex>) -> bool {
pub fn read_txt(filename: &str) -> Result<Vec<String>, String> {
let f = File::open(filename);
if f.is_err() {
let errmsg = format!("cannot open file. [file:{}]", filename);
let errmsg = format!("Cannot open file. [file:{}]", filename);
return Result::Err(errmsg);
}
let reader = BufReader::new(f.unwrap());
@@ -62,7 +62,7 @@ pub fn read_txt(filename: &str) -> Result<Vec<String>, String> {
}
pub fn read_csv(filename: &str) -> Result<Vec<Vec<String>>, String> {
let mut f = File::open(filename).expect("file not found!!!");
let mut f = File::open(filename).expect("File not found!!!");
let mut contents: String = String::new();
let mut ret = vec![];
let read_res = f.read_to_string(&mut contents);
@@ -85,16 +85,23 @@ pub fn read_csv(filename: &str) -> Result<Vec<Vec<String>>, String> {
return Result::Ok(ret);
}
pub fn is_target_event_id(s: &String) -> bool {
return configs::CONFIG.read().unwrap().target_eventids.is_target(s);
}
pub fn get_event_id_key() -> String {
return "Event.System.EventID".to_string();
}
/// serde:Valueの型を確認し、文字列を返します。
pub fn get_serde_number_to_string(value: &serde_json::Value) -> String {
pub fn get_serde_number_to_string(value: &serde_json::Value) -> Option<String> {
if value.is_string() {
return value.as_str().unwrap_or("").to_string();
return Option::Some(value.as_str().unwrap_or("").to_string());
} else if value.is_object() {
// Object type is not specified record value.
return Option::None;
} else {
return value.to_string();
return Option::Some(value.to_string());
}
}
@@ -159,6 +166,7 @@ pub fn create_tokio_runtime() -> Runtime {
mod tests {
use crate::detections::utils;
use regex::Regex;
use serde_json::Value;
#[test]
fn test_check_regex() {
@@ -187,4 +195,62 @@ mod tests {
let commandline = "\"C:\\Program Files\\Google\\Update\\GoogleUpdate2.exe\"";
assert!(false == utils::check_allowlist(commandline, &allowlist));
}
#[test]
/// Serde::Valueの数値型の値を文字列として返却することを確かめるテスト
fn test_get_serde_number_to_string() {
let json_str = r##"
{
"Event": {
"System": {
"EventID": 11111
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
assert_eq!(
utils::get_serde_number_to_string(&event_record["Event"]["System"]["EventID"]).unwrap(),
"11111".to_owned()
);
}
#[test]
/// Serde::Valueの文字列型の値を文字列として返却することを確かめるテスト
fn test_get_serde_number_serde_string_to_string() {
let json_str = r##"
{
"Event": {
"EventData": {
"ComputerName": "HayabusaComputer1"
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
assert_eq!(
utils::get_serde_number_to_string(&event_record["Event"]["EventData"]["ComputerName"])
.unwrap(),
"HayabusaComputer1".to_owned()
);
}
#[test]
/// Serde::Valueのオブジェクト型の内容を誤って渡した際にNoneを返却することを確かめるテスト
fn test_get_serde_number_serde_object_ret_none() {
let json_str = r##"
{
"Event": {
"EventData": {
"ComputerName": "HayabusaComputer1"
}
}
}
"##;
let event_record: Value = serde_json::from_str(json_str).unwrap();
assert!(utils::get_serde_number_to_string(&event_record["Event"]["EventData"]).is_none());
}
}

View File

@@ -10,6 +10,8 @@ use hayabusa::omikuji::Omikuji;
use hayabusa::{afterfact::after_fact, detections::utils};
use hayabusa::{detections::configs, timeline::timeline::Timeline};
use hhmmss::Hhmmss;
use pbr::ProgressBar;
use serde_json::Value;
use std::{
fs::{self, File},
path::PathBuf,
@@ -36,7 +38,7 @@ fn main() {
if !filepath.ends_with(".evtx") {
AlertMessage::alert(
&mut std::io::stderr().lock(),
"--filepath is only accepted evtx file.".to_owned(),
"--filepath only accepts .evtx files.".to_owned(),
)
.ok();
return;
@@ -47,7 +49,7 @@ fn main() {
if evtx_files.len() == 0 {
AlertMessage::alert(
&mut std::io::stderr().lock(),
"No exist evtx file.".to_owned(),
"No .evtx files were found.".to_owned(),
)
.ok();
return;
@@ -116,12 +118,12 @@ fn analysis_files(evtx_files: Vec<PathBuf>) {
.unwrap()
.args
.value_of("level")
.unwrap_or("INFO")
.unwrap_or("informational")
.to_uppercase();
// TODO: config.rs に移す
// ./target/debug/hayabusa -f ./test_files/evtx/test1.evtx --start-time 2014-11-28T12:00:09Z
let start_time= if let Some(s_time) = configs::CONFIG
let start_time = if let Some(s_time) = configs::CONFIG
.read()
.unwrap()
.args
@@ -144,7 +146,7 @@ fn analysis_files(evtx_files: Vec<PathBuf>) {
.args
.value_of("end-time")
{
match s_time.parse::<DateTime<Utc>>() {
match e_time.parse::<DateTime<Utc>>() {
Ok(dt)=> Some(dt),
Err(err) => {
AlertMessage::alert(&mut std::io::stderr().lock(), format!("start-time field: {}", err)).ok();
@@ -161,15 +163,17 @@ fn analysis_files(evtx_files: Vec<PathBuf>) {
level,
configs::CONFIG.read().unwrap().args.value_of("rules"),
);
let mut pb = ProgressBar::new(evtx_files.len() as u64);
let mut detection = detection::Detection::new(rule_files);
for evtx_file in evtx_files {
if configs::CONFIG.read().unwrap().args.is_present("verbose") {
println!("check target evtx FilePath: {:?}", &evtx_file);
println!("Checking target evtx FilePath: {:?}", &evtx_file);
}
detection = analysis_file(evtx_file, detection);
pb.inc();
}
after_fact();
detection.print_unique_results();
}
// Windowsイベントログファイルを1ファイル分解析する。
@@ -210,6 +214,21 @@ fn analysis_file(
}
let data = record_result.unwrap().data;
// target_eventids.txtでフィルタする。
let eventid = utils::get_event_value(&utils::get_event_id_key(), &data);
if eventid.is_some() {
let is_target = match eventid.unwrap() {
Value::String(s) => utils::is_target_event_id(s),
Value::Number(n) => utils::is_target_event_id(&n.to_string()),
_ => true, // レコードからEventIdが取得できない場合は、特にフィルタしない
};
if !is_target {
continue;
}
}
// EvtxRecordInfo構造体に変更
let data_string = data.to_string();
let record_info = EvtxRecordInfo::new((&filepath_disp).to_string(), data, data_string);
records_per_detect.push(record_info);

View File

@@ -15,8 +15,8 @@ use yaml_rust::YamlLoader;
pub struct ParseYaml {
pub files: Vec<(String, yaml_rust::Yaml)>,
pub rulecounter: HashMap<String, u128>,
pub ignore_count: u128,
pub parseerror_count: u128,
pub ignorerule_count: u128,
pub errorrule_count: u128,
}
impl ParseYaml {
@@ -24,8 +24,8 @@ impl ParseYaml {
ParseYaml {
files: Vec::new(),
rulecounter: HashMap::new(),
ignore_count: 0,
parseerror_count: 0,
ignorerule_count: 0,
errorrule_count: 0,
}
}
@@ -73,7 +73,7 @@ impl ParseYaml {
read_content.unwrap_err()
),
)?;
self.parseerror_count += 1;
self.errorrule_count += 1;
return io::Result::Ok(ret);
}
@@ -83,12 +83,12 @@ impl ParseYaml {
AlertMessage::warn(
&mut std::io::stdout().lock(),
format!(
"fail to parse as yaml: {}\n{} ",
"Failed to parse yml: {}\n{} ",
entry.path().display(),
yaml_contents.unwrap_err()
),
)?;
self.parseerror_count += 1;
self.errorrule_count += 1;
return io::Result::Ok(ret);
}
@@ -105,35 +105,27 @@ impl ParseYaml {
.filter_map(|(filepath, yaml_doc)| {
// ignoreフラグがONになっているルールは無視する。
if yaml_doc["ignore"].as_bool().unwrap_or(false) {
self.ignore_count += 1;
self.ignorerule_count += 1;
return Option::None;
}
self.rulecounter.insert(
yaml_doc["rulesection"]
.as_str()
.unwrap_or("other")
.to_string(),
yaml_doc["ruletype"].as_str().unwrap_or("Other").to_string(),
self.rulecounter
.get(
&yaml_doc["rulesection"]
.as_str()
.unwrap_or("other")
.to_string(),
)
.get(&yaml_doc["ruletype"].as_str().unwrap_or("Other").to_string())
.unwrap_or(&0)
+ 1,
);
if configs::CONFIG.read().unwrap().args.is_present("verbose") {
println!("Loaded yml FilePath: {}", filepath);
println!("Loaded yml file path: {}", filepath);
}
// 指定されたレベルより低いルールは無視する
let doc_level = &yaml_doc["level"]
.as_str()
.unwrap_or("LOW")
.unwrap_or("informational")
.to_string()
.to_uppercase();
let doc_level_num = configs::LEVELMAP.get(doc_level).unwrap_or(&2);
let args_level_num = configs::LEVELMAP.get(level).unwrap_or(&2);
let doc_level_num = configs::LEVELMAP.get(doc_level).unwrap_or(&1);
let args_level_num = configs::LEVELMAP.get(level).unwrap_or(&1);
if doc_level_num < args_level_num {
return Option::None;
}
@@ -187,19 +179,19 @@ mod tests {
}
#[test]
/// no specifed "level" arguments value is adapted default level(LOW)
/// no specifed "level" arguments value is adapted default level(informational)
fn test_default_level_read_yaml() {
let mut yaml = yaml::ParseYaml::new();
let path = Path::new("test_files/rules/level_yaml");
yaml.read_dir(path.to_path_buf(), &"").unwrap();
assert_eq!(yaml.files.len(), 4);
assert_eq!(yaml.files.len(), 5);
}
#[test]
fn test_info_level_read_yaml() {
let mut yaml = yaml::ParseYaml::new();
let path = Path::new("test_files/rules/level_yaml");
yaml.read_dir(path.to_path_buf(), &"INFO").unwrap();
yaml.read_dir(path.to_path_buf(), &"informational").unwrap();
assert_eq!(yaml.files.len(), 5);
}
#[test]