Merge branch 'main' into feature/emit_csv2
This commit is contained in:
@@ -13,7 +13,7 @@ ServiceName,Event.EventData.ServiceName
|
|||||||
ImagePath,Event.EventData.ImagePath
|
ImagePath,Event.EventData.ImagePath
|
||||||
ContextInfo,Event.EventData.ContextInfo
|
ContextInfo,Event.EventData.ContextInfo
|
||||||
Path,Event.EventData.Path
|
Path,Event.EventData.Path
|
||||||
ScriptBlockText,Event.EventData.ScriptBlockText#Name
|
ScriptBlockText,Event.EventData.ScriptBlockText
|
||||||
MemberName,Event.EventData.SubjectUserName
|
MemberName,Event.EventData.SubjectUserName
|
||||||
MemberSid,Event.EventData.SubjectUserSid
|
MemberSid,Event.EventData.SubjectUserSid
|
||||||
TargetSid,Event.EventData.TargetSid
|
TargetSid,Event.EventData.TargetSid
|
||||||
|
|||||||
1
credits.txt
Normal file
1
credits.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Zach Mathis (田中ザック)Yamato Security Yea! Project Leader
|
||||||
@@ -48,7 +48,7 @@ fn build_app() -> clap::App<'static, 'static> {
|
|||||||
.arg(Arg::from_usage("--rfc-2822 'output date and time in RFC 2822 format. Example: Mon, 07 Aug 2006 12:34:56 -0600'"))
|
.arg(Arg::from_usage("--rfc-2822 'output date and time in RFC 2822 format. Example: Mon, 07 Aug 2006 12:34:56 -0600'"))
|
||||||
.arg(Arg::from_usage("-l --lang=[LANG] 'output language'"))
|
.arg(Arg::from_usage("-l --lang=[LANG] 'output language'"))
|
||||||
.arg(Arg::from_usage("-u --utc 'output time in UTC format(default: local time)'"))
|
.arg(Arg::from_usage("-u --utc 'output time in UTC format(default: local time)'"))
|
||||||
.arg(Arg::from_usage("-d --directory 'event log files directory'"))
|
.arg(Arg::from_usage("-d --directory=[DIRECTORY] 'event log files directory'"))
|
||||||
.arg(Arg::from_usage("-s --statistics 'event statistics'"))
|
.arg(Arg::from_usage("-s --statistics 'event statistics'"))
|
||||||
.arg(Arg::from_usage("--credits 'Zachary Mathis, Akira Nishikawa'"))
|
.arg(Arg::from_usage("--credits 'Zachary Mathis, Akira Nishikawa'"))
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
extern crate chrono;
|
|
||||||
extern crate csv;
|
extern crate csv;
|
||||||
|
|
||||||
use crate::detections::print::MESSAGES;
|
use crate::detections::print::MESSAGES;
|
||||||
|
use std::path::PathBuf;
|
||||||
use crate::detections::rule;
|
use crate::detections::rule;
|
||||||
use crate::detections::rule::RuleNode;
|
use crate::detections::rule::RuleNode;
|
||||||
use crate::yaml::ParseYaml;
|
use crate::yaml::ParseYaml;
|
||||||
|
use evtx::err;
|
||||||
use evtx::EvtxParser;
|
use evtx::{EvtxParser, SerializedEvtxRecord};
|
||||||
use serde_json::{Error, Value};
|
use serde_json::{Error, Value};
|
||||||
|
|
||||||
const DIRPATH_RULES: &str = "rules";
|
const DIRPATH_RULES: &str = "rules";
|
||||||
@@ -20,37 +20,92 @@ impl Detection {
|
|||||||
Detection {}
|
Detection {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start(&mut self, mut parser: EvtxParser<std::fs::File>) {
|
pub fn start(&mut self, evtx_files: Vec<PathBuf>) {
|
||||||
// serialize from .etvx to jsons
|
if evtx_files.is_empty() {
|
||||||
let event_records: Vec<Value> = parser
|
return;
|
||||||
.records_json()
|
}
|
||||||
.filter_map(|result_record| {
|
|
||||||
if result_record.is_err() {
|
// parse rule files
|
||||||
eprintln!("{}", result_record.unwrap_err());
|
let mut selection_rules = self.parse_rule_files();
|
||||||
return Option::None;
|
if selection_rules.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// serialize from evtx files to json
|
||||||
|
let evtx_records = self.serialize_evtx_to_jsons(evtx_files);
|
||||||
|
|
||||||
|
// select rule files and collect message
|
||||||
|
let mut message = MESSAGES.lock().unwrap();
|
||||||
|
selection_rules.iter_mut().for_each(|rule| {
|
||||||
|
evtx_records.iter().for_each(|event_record| {
|
||||||
|
if !rule.select(event_record) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
//// https://rust-lang-nursery.github.io/rust-cookbook/encoding/complex.html
|
message.insert(
|
||||||
let result_json: Result<Value, Error> =
|
event_record,
|
||||||
serde_json::from_str(&result_record.unwrap().data);
|
rule.yaml["output"].as_str().unwrap_or("").to_string(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// output message
|
||||||
|
message.print();
|
||||||
|
}
|
||||||
|
|
||||||
|
// serialize evtx files to json
|
||||||
|
fn serialize_evtx_to_jsons(&self, evtx_files: Vec<PathBuf>) -> Vec<Value> {
|
||||||
|
return evtx_files
|
||||||
|
.iter()
|
||||||
|
.filter_map(|evtx_file| {
|
||||||
|
// convert to evtx parser
|
||||||
|
match EvtxParser::from_path(evtx_file) {
|
||||||
|
Ok(parser) => Option::Some(parser),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("{}", e);
|
||||||
|
return Option::None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|mut cur| {
|
||||||
|
let ret: Vec<err::Result<SerializedEvtxRecord<String>>> =
|
||||||
|
cur.records_json().collect();
|
||||||
|
return ret;
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.filter_map(|json_record| {
|
||||||
|
// convert from evtx parser to evtx json string records
|
||||||
|
if json_record.is_ok() {
|
||||||
|
return Option::Some(json_record.unwrap());
|
||||||
|
} else {
|
||||||
|
eprintln!("{}", json_record.unwrap_err());
|
||||||
|
return Option::None;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter_map(|json_record| {
|
||||||
|
// serialize json from json string
|
||||||
|
let result_json: Result<Value, Error> = serde_json::from_str(&json_record.data); //// https://rust-lang-nursery.github.io/rust-cookbook/encoding/complex.html
|
||||||
if result_json.is_err() {
|
if result_json.is_err() {
|
||||||
eprintln!("{}", result_json.unwrap_err());
|
eprintln!("{}", result_json.unwrap_err());
|
||||||
return Option::None;
|
return Option::None;
|
||||||
|
} else {
|
||||||
|
return result_json.ok();
|
||||||
}
|
}
|
||||||
return result_json.ok();
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_rule_files(&self) -> Vec<RuleNode> {
|
||||||
// load rule files
|
// load rule files
|
||||||
let mut rulefile_loader = ParseYaml::new();
|
let mut rulefile_loader = ParseYaml::new();
|
||||||
let resutl_readdir = rulefile_loader.read_dir(DIRPATH_RULES);
|
let resutl_readdir = rulefile_loader.read_dir(DIRPATH_RULES);
|
||||||
if resutl_readdir.is_err() {
|
if resutl_readdir.is_err() {
|
||||||
eprintln!("{}", resutl_readdir.unwrap_err());
|
eprintln!("{}", resutl_readdir.unwrap_err());
|
||||||
return;
|
return vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse rule files
|
// parse rule files
|
||||||
let mut selection_rules: Vec<RuleNode> = rulefile_loader
|
return rulefile_loader
|
||||||
.files
|
.files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|rule_file| rule::parse_rule(rule_file))
|
.map(|rule_file| rule::parse_rule(rule_file))
|
||||||
@@ -78,20 +133,5 @@ impl Detection {
|
|||||||
return Option::None;
|
return Option::None;
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// selection rule files and collect message
|
|
||||||
let mut message = MESSAGES.lock().unwrap();
|
|
||||||
selection_rules.iter_mut().for_each(|rule| {
|
|
||||||
event_records.iter().for_each(|event_record| {
|
|
||||||
if !rule.select(event_record) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
message.insert(
|
|
||||||
event_record,
|
|
||||||
rule.yaml["output"].as_str().unwrap_or("").to_string(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
90
src/main.rs
90
src/main.rs
@@ -1,39 +1,91 @@
|
|||||||
extern crate serde;
|
extern crate serde;
|
||||||
|
extern crate serde_derive;
|
||||||
|
|
||||||
use evtx::EvtxParser;
|
use std::{fs, path::PathBuf};
|
||||||
use quick_xml::de::DeError;
|
|
||||||
use std::{fs, path::PathBuf, process};
|
|
||||||
use yamato_event_analyzer::afterfact::after_fact;
|
use yamato_event_analyzer::afterfact::after_fact;
|
||||||
use yamato_event_analyzer::detections::configs;
|
use yamato_event_analyzer::detections::configs;
|
||||||
use yamato_event_analyzer::detections::detection;
|
use yamato_event_analyzer::detections::detection;
|
||||||
use yamato_event_analyzer::omikuji::Omikuji;
|
use yamato_event_analyzer::omikuji::Omikuji;
|
||||||
|
|
||||||
fn main() -> Result<(), DeError> {
|
fn main() {
|
||||||
if let Some(filepath) = configs::singleton().args.value_of("filepath") {
|
if let Some(filepath) = configs::singleton().args.value_of("filepath") {
|
||||||
parse_file(&filepath);
|
detect_files(vec![PathBuf::from(filepath)]);
|
||||||
|
} else if let Some(directory) = configs::singleton().args.value_of("directory") {
|
||||||
|
let evtx_files = collect_evtxfiles(&directory);
|
||||||
|
detect_files(evtx_files);
|
||||||
|
} else if configs::singleton().args.is_present("credits") {
|
||||||
|
print_credits();
|
||||||
}
|
}
|
||||||
|
|
||||||
after_fact();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_file(filepath: &str) {
|
fn collect_evtxfiles(dirpath: &str) -> Vec<PathBuf> {
|
||||||
let fp = PathBuf::from(filepath);
|
let entries = fs::read_dir(dirpath);
|
||||||
let parser = match EvtxParser::from_path(fp) {
|
if entries.is_err() {
|
||||||
Ok(pointer) => pointer,
|
eprintln!("{}", entries.unwrap_err());
|
||||||
Err(e) => {
|
return vec![];
|
||||||
eprintln!("{}", e);
|
}
|
||||||
process::exit(1);
|
|
||||||
|
let mut ret = vec![];
|
||||||
|
for e in entries.unwrap() {
|
||||||
|
if e.is_err() {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
let mut detection = detection::Detection::new();
|
let path = e.unwrap().path();
|
||||||
&detection.start(parser);
|
if path.is_dir() {
|
||||||
|
path.to_str().and_then(|path_str| {
|
||||||
|
let subdir_ret = collect_evtxfiles(path_str);
|
||||||
|
ret.extend(subdir_ret);
|
||||||
|
return Option::Some(());
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let path_str = path.to_str().unwrap_or("");
|
||||||
|
if path_str.ends_with(".evtx") {
|
||||||
|
ret.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn output_with_omikuji(omikuji: Omikuji) {
|
fn print_credits() {
|
||||||
|
match fs::read_to_string("./credits.txt") {
|
||||||
|
Ok(contents) => println!("{}", contents),
|
||||||
|
Err(err) => println!("{}", err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_files(evtx_files: Vec<PathBuf>) {
|
||||||
|
let mut detection = detection::Detection::new();
|
||||||
|
&detection.start(evtx_files);
|
||||||
|
|
||||||
|
after_fact();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn _output_with_omikuji(omikuji: Omikuji) {
|
||||||
let fp = &format!("art/omikuji/{}", omikuji);
|
let fp = &format!("art/omikuji/{}", omikuji);
|
||||||
let content = fs::read_to_string(fp).unwrap();
|
let content = fs::read_to_string(fp).unwrap();
|
||||||
println!("{}", content);
|
println!("{}", content);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::collect_evtxfiles;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_collect_evtxfiles() {
|
||||||
|
let files = collect_evtxfiles("test_files/evtx");
|
||||||
|
assert_eq!(3, files.len());
|
||||||
|
|
||||||
|
files.iter().for_each(|file| {
|
||||||
|
let is_contains = &vec!["test1.evtx", "test2.evtx", "testtest4.evtx"]
|
||||||
|
.into_iter()
|
||||||
|
.any(|filepath_str| {
|
||||||
|
return file.file_name().unwrap().to_str().unwrap_or("") == filepath_str;
|
||||||
|
});
|
||||||
|
assert_eq!(is_contains, &true);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ impl ParseYaml {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
if entry.file_type().ok()?.is_dir() {
|
if entry.file_type().ok()?.is_dir() {
|
||||||
self.read_dir(entry.path());
|
let _ = self.read_dir(entry.path());
|
||||||
}
|
}
|
||||||
Some("")
|
Some("")
|
||||||
})
|
})
|
||||||
|
|||||||
0
test_files/evtx/sub/test.txt
Normal file
0
test_files/evtx/sub/test.txt
Normal file
0
test_files/evtx/sub/test2.evtx
Normal file
0
test_files/evtx/sub/test2.evtx
Normal file
0
test_files/evtx/sub/testtest4.evtx
Normal file
0
test_files/evtx/sub/testtest4.evtx
Normal file
0
test_files/evtx/test.txt
Normal file
0
test_files/evtx/test.txt
Normal file
0
test_files/evtx/test1.evtx
Normal file
0
test_files/evtx/test1.evtx
Normal file
Reference in New Issue
Block a user