implement directory option
This commit is contained in:
@@ -13,7 +13,7 @@ ServiceName,Event.EventData.ServiceName
|
|||||||
ImagePath,Event.EventData.ImagePath
|
ImagePath,Event.EventData.ImagePath
|
||||||
ContextInfo,Event.EventData.ContextInfo
|
ContextInfo,Event.EventData.ContextInfo
|
||||||
Path,Event.EventData.Path
|
Path,Event.EventData.Path
|
||||||
ScriptBlockText,Event.EventData.ScriptBlockText#Name
|
ScriptBlockText,Event.EventData.ScriptBlockText
|
||||||
MemberName,Event.EventData.SubjectUserName
|
MemberName,Event.EventData.SubjectUserName
|
||||||
MemberSid,Event.EventData.SubjectUserSid
|
MemberSid,Event.EventData.SubjectUserSid
|
||||||
TargetSid,Event.EventData.TargetSid
|
TargetSid,Event.EventData.TargetSid
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ fn build_app() -> clap::App<'static, 'static> {
|
|||||||
.arg(Arg::from_usage("--human-readable-timeline=[HUMAN_READABLE_TIMELINE] 'human readable timeline'"))
|
.arg(Arg::from_usage("--human-readable-timeline=[HUMAN_READABLE_TIMELINE] 'human readable timeline'"))
|
||||||
.arg(Arg::from_usage("-l --lang=[LANG] 'output language'"))
|
.arg(Arg::from_usage("-l --lang=[LANG] 'output language'"))
|
||||||
.arg(Arg::from_usage("-t --timezone=[TIMEZONE] 'timezone setting'"))
|
.arg(Arg::from_usage("-t --timezone=[TIMEZONE] 'timezone setting'"))
|
||||||
.arg(Arg::from_usage("-d --directory 'event log files directory'"))
|
.arg(Arg::from_usage("-d --directory=[DIRECTORY] 'event log files directory'"))
|
||||||
.arg(Arg::from_usage("-s --statistics 'event statistics'"))
|
.arg(Arg::from_usage("-s --statistics 'event statistics'"))
|
||||||
.arg(Arg::from_usage("-u --update 'signature update'"))
|
.arg(Arg::from_usage("-u --update 'signature update'"))
|
||||||
.arg(Arg::from_usage("--credits 'Zachary Mathis, Akira Nishikawa'"))
|
.arg(Arg::from_usage("--credits 'Zachary Mathis, Akira Nishikawa'"))
|
||||||
|
|||||||
@@ -1,13 +1,15 @@
|
|||||||
extern crate chrono;
|
extern crate chrono;
|
||||||
extern crate csv;
|
extern crate csv;
|
||||||
|
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::detections::print::Message;
|
use crate::detections::print::Message;
|
||||||
use crate::detections::rule;
|
use crate::detections::rule;
|
||||||
use crate::detections::rule::RuleNode;
|
use crate::detections::rule::RuleNode;
|
||||||
use crate::yaml::ParseYaml;
|
use crate::yaml::ParseYaml;
|
||||||
|
|
||||||
use chrono::{DateTime, FixedOffset, TimeZone, Utc};
|
use evtx::err;
|
||||||
use evtx::EvtxParser;
|
use evtx::{EvtxParser, SerializedEvtxRecord};
|
||||||
use serde_json::{Error, Value};
|
use serde_json::{Error, Value};
|
||||||
|
|
||||||
const DIRPATH_RULES: &str = "rules";
|
const DIRPATH_RULES: &str = "rules";
|
||||||
@@ -21,37 +23,92 @@ impl Detection {
|
|||||||
Detection {}
|
Detection {}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start(&mut self, mut parser: EvtxParser<std::fs::File>) {
|
pub fn start(&mut self, evtx_files: Vec<PathBuf>) {
|
||||||
// serialize from .etvx to jsons
|
if evtx_files.is_empty() {
|
||||||
let event_records: Vec<Value> = parser
|
return;
|
||||||
.records_json()
|
|
||||||
.filter_map(|result_record| {
|
|
||||||
if result_record.is_err() {
|
|
||||||
eprintln!("{}", result_record.unwrap_err());
|
|
||||||
return Option::None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//// https://rust-lang-nursery.github.io/rust-cookbook/encoding/complex.html
|
// parse rule files
|
||||||
let result_json: Result<Value, Error> =
|
let mut selection_rules = self.parse_rule_files();
|
||||||
serde_json::from_str(&result_record.unwrap().data);
|
if selection_rules.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// serialize from evtx files to json
|
||||||
|
let evtx_records = self.serialize_evtx_to_jsons(evtx_files);
|
||||||
|
|
||||||
|
// select rule files and collect message
|
||||||
|
let mut message = Message::new();
|
||||||
|
selection_rules.iter_mut().for_each(|rule| {
|
||||||
|
evtx_records.iter().for_each(|event_record| {
|
||||||
|
if !rule.select(event_record) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
message.insert(
|
||||||
|
event_record,
|
||||||
|
rule.yaml["output"].as_str().unwrap_or("").to_string(),
|
||||||
|
)
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// output message
|
||||||
|
message.print();
|
||||||
|
}
|
||||||
|
|
||||||
|
// serialize evtx files to json
|
||||||
|
fn serialize_evtx_to_jsons(&self, evtx_files: Vec<PathBuf>) -> Vec<Value> {
|
||||||
|
return evtx_files
|
||||||
|
.iter()
|
||||||
|
.filter_map(|evtx_file| {
|
||||||
|
// convert to evtx parser
|
||||||
|
match EvtxParser::from_path(evtx_file) {
|
||||||
|
Ok(parser) => Option::Some(parser),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("{}", e);
|
||||||
|
return Option::None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(|mut cur| {
|
||||||
|
let ret: Vec<err::Result<SerializedEvtxRecord<String>>> =
|
||||||
|
cur.records_json().collect();
|
||||||
|
return ret;
|
||||||
|
})
|
||||||
|
.flatten()
|
||||||
|
.filter_map(|json_record| {
|
||||||
|
// convert from evtx parser to evtx json string records
|
||||||
|
if json_record.is_ok() {
|
||||||
|
return Option::Some(json_record.unwrap());
|
||||||
|
} else {
|
||||||
|
eprintln!("{}", json_record.unwrap_err());
|
||||||
|
return Option::None;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter_map(|json_record| {
|
||||||
|
// serialize json from json string
|
||||||
|
let result_json: Result<Value, Error> = serde_json::from_str(&json_record.data); //// https://rust-lang-nursery.github.io/rust-cookbook/encoding/complex.html
|
||||||
if result_json.is_err() {
|
if result_json.is_err() {
|
||||||
eprintln!("{}", result_json.unwrap_err());
|
eprintln!("{}", result_json.unwrap_err());
|
||||||
return Option::None;
|
return Option::None;
|
||||||
}
|
} else {
|
||||||
return result_json.ok();
|
return result_json.ok();
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_rule_files(&self) -> Vec<RuleNode> {
|
||||||
// load rule files
|
// load rule files
|
||||||
let mut rulefile_loader = ParseYaml::new();
|
let mut rulefile_loader = ParseYaml::new();
|
||||||
let resutl_readdir = rulefile_loader.read_dir(DIRPATH_RULES);
|
let resutl_readdir = rulefile_loader.read_dir(DIRPATH_RULES);
|
||||||
if resutl_readdir.is_err() {
|
if resutl_readdir.is_err() {
|
||||||
eprintln!("{}", resutl_readdir.unwrap_err());
|
eprintln!("{}", resutl_readdir.unwrap_err());
|
||||||
return;
|
return vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
// parse rule files
|
// parse rule files
|
||||||
let mut selection_rules: Vec<RuleNode> = rulefile_loader
|
return rulefile_loader
|
||||||
.files
|
.files
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|rule_file| rule::parse_rule(rule_file))
|
.map(|rule_file| rule::parse_rule(rule_file))
|
||||||
@@ -79,23 +136,5 @@ impl Detection {
|
|||||||
return Option::None;
|
return Option::None;
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
// selection rule files and collect message
|
|
||||||
let mut message = Message::new();
|
|
||||||
selection_rules.iter_mut().for_each(|rule| {
|
|
||||||
event_records.iter().for_each(|event_record| {
|
|
||||||
if !rule.select(event_record) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
message.insert(
|
|
||||||
event_record,
|
|
||||||
rule.yaml["output"].as_str().unwrap_or("").to_string(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
// output message
|
|
||||||
message.print();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
70
src/main.rs
70
src/main.rs
@@ -2,9 +2,8 @@ extern crate serde;
|
|||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate serde_derive;
|
extern crate serde_derive;
|
||||||
|
|
||||||
use evtx::EvtxParser;
|
|
||||||
use quick_xml::de::DeError;
|
use quick_xml::de::DeError;
|
||||||
use std::{fs, path::PathBuf, process};
|
use std::{fs, path::PathBuf};
|
||||||
use yamato_event_analyzer::afterfact::after_fact;
|
use yamato_event_analyzer::afterfact::after_fact;
|
||||||
use yamato_event_analyzer::detections::configs;
|
use yamato_event_analyzer::detections::configs;
|
||||||
use yamato_event_analyzer::detections::detection;
|
use yamato_event_analyzer::detections::detection;
|
||||||
@@ -12,7 +11,10 @@ use yamato_event_analyzer::omikuji::Omikuji;
|
|||||||
|
|
||||||
fn main() -> Result<(), DeError> {
|
fn main() -> Result<(), DeError> {
|
||||||
if let Some(filepath) = configs::singleton().args.value_of("filepath") {
|
if let Some(filepath) = configs::singleton().args.value_of("filepath") {
|
||||||
parse_file(&filepath);
|
detect_files(vec![PathBuf::from(filepath)]);
|
||||||
|
} else if let Some(directory) = configs::singleton().args.value_of("directory") {
|
||||||
|
let evtx_files = collect_evtxfiles(&directory);
|
||||||
|
detect_files(evtx_files);
|
||||||
}
|
}
|
||||||
|
|
||||||
after_fact();
|
after_fact();
|
||||||
@@ -20,22 +22,64 @@ fn main() -> Result<(), DeError> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_file(filepath: &str) {
|
fn collect_evtxfiles(dirpath: &str) -> Vec<PathBuf> {
|
||||||
let fp = PathBuf::from(filepath);
|
let entries = fs::read_dir(dirpath);
|
||||||
let parser = match EvtxParser::from_path(fp) {
|
if entries.is_err() {
|
||||||
Ok(pointer) => pointer,
|
eprintln!("{}", entries.unwrap_err());
|
||||||
Err(e) => {
|
return vec![];
|
||||||
eprintln!("{}", e);
|
|
||||||
process::exit(1);
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
|
let mut ret = vec![];
|
||||||
|
for e in entries.unwrap() {
|
||||||
|
if e.is_err() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = e.unwrap().path();
|
||||||
|
if path.is_dir() {
|
||||||
|
path.to_str().and_then(|path_str| {
|
||||||
|
let subdir_ret = collect_evtxfiles(path_str);
|
||||||
|
ret.extend(subdir_ret);
|
||||||
|
return Option::Some(());
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
let path_str = path.to_str().unwrap_or("");
|
||||||
|
if path_str.ends_with(".evtx") {
|
||||||
|
ret.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn detect_files(evtx_files: Vec<PathBuf>) {
|
||||||
let mut detection = detection::Detection::new();
|
let mut detection = detection::Detection::new();
|
||||||
&detection.start(parser);
|
&detection.start(evtx_files);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn output_with_omikuji(omikuji: Omikuji) {
|
fn _output_with_omikuji(omikuji: Omikuji) {
|
||||||
let fp = &format!("art/omikuji/{}", omikuji);
|
let fp = &format!("art/omikuji/{}", omikuji);
|
||||||
let content = fs::read_to_string(fp).unwrap();
|
let content = fs::read_to_string(fp).unwrap();
|
||||||
println!("{}", content);
|
println!("{}", content);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::collect_evtxfiles;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_collect_evtxfiles() {
|
||||||
|
let files = collect_evtxfiles("test_files/evtx");
|
||||||
|
assert_eq!(3, files.len());
|
||||||
|
|
||||||
|
files.iter().for_each(|file| {
|
||||||
|
let is_contains = &vec!["test1.evtx", "test2.evtx", "testtest4.evtx"]
|
||||||
|
.into_iter()
|
||||||
|
.any(|filepath_str| {
|
||||||
|
return file.file_name().unwrap().to_str().unwrap_or("") == filepath_str;
|
||||||
|
});
|
||||||
|
assert_eq!(is_contains, &true);
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
0
test_files/evtx/sub/test.txt
Normal file
0
test_files/evtx/sub/test.txt
Normal file
0
test_files/evtx/sub/test2.evtx
Normal file
0
test_files/evtx/sub/test2.evtx
Normal file
0
test_files/evtx/sub/testtest4.evtx
Normal file
0
test_files/evtx/sub/testtest4.evtx
Normal file
0
test_files/evtx/test.txt
Normal file
0
test_files/evtx/test.txt
Normal file
0
test_files/evtx/test1.evtx
Normal file
0
test_files/evtx/test1.evtx
Normal file
Reference in New Issue
Block a user