Merge branch 'main' of https://github.com/Yamato-Security/hayabusa into feature/463-statistics-add-channel

This commit is contained in:
garigariganzy
2022-09-22 04:11:29 +09:00
19 changed files with 781 additions and 299 deletions
+368 -30
View File
@@ -9,11 +9,14 @@ use bytesize::ByteSize;
use chrono::{DateTime, Local, TimeZone, Utc};
use comfy_table::modifiers::UTF8_ROUND_CORNERS;
use comfy_table::presets::UTF8_FULL;
use csv::QuoteStyle;
use csv::{QuoteStyle, WriterBuilder};
use dashmap::Map;
use itertools::Itertools;
use krapslog::{build_sparkline, build_time_markers};
use lazy_static::lazy_static;
use linked_hash_map::LinkedHashMap;
use std::str::FromStr;
use comfy_table::*;
use hashbrown::{HashMap, HashSet};
@@ -189,7 +192,13 @@ pub fn after_fact(all_record_cnt: usize) {
Box::new(BufWriter::new(io::stdout()))
};
let color_map = set_output_color();
if let Err(err) = emit_csv(&mut target, displayflag, color_map, all_record_cnt as u128) {
if let Err(err) = emit_csv(
&mut target,
displayflag,
color_map,
all_record_cnt as u128,
PROFILES.clone().unwrap_or_default(),
) {
fn_emit_csv_err(Box::new(err));
}
}
@@ -199,10 +208,22 @@ fn emit_csv<W: std::io::Write>(
displayflag: bool,
color_map: HashMap<String, Colors>,
all_record_cnt: u128,
profile: LinkedHashMap<String, String>,
) -> io::Result<()> {
let disp_wtr = BufferWriter::stdout(ColorChoice::Always);
let mut disp_wtr_buf = disp_wtr.buffer();
let mut wtr = csv::WriterBuilder::new().from_writer(writer);
let json_output_flag = configs::CONFIG.read().unwrap().args.json_timeline;
let jsonl_output_flag = configs::CONFIG.read().unwrap().args.jsonl_timeline;
let mut wtr = if json_output_flag || jsonl_output_flag {
WriterBuilder::new()
.delimiter(b'\n')
.double_quote(false)
.quote_style(QuoteStyle::Never)
.from_writer(writer)
} else {
WriterBuilder::new().from_writer(writer)
};
disp_wtr_buf.set_color(ColorSpec::new().set_fg(None)).ok();
@@ -231,11 +252,20 @@ fn emit_csv<W: std::io::Write>(
let mut timestamps: Vec<i64> = Vec::new();
let mut plus_header = true;
let mut detected_record_idset: HashSet<String> = HashSet::new();
for time in message::MESSAGES.clone().into_read_only().keys().sorted() {
if json_output_flag {
wtr.write_field("[")?;
}
for (processed_message_cnt, time) in message::MESSAGES
.clone()
.into_read_only()
.keys()
.sorted()
.enumerate()
{
let multi = message::MESSAGES.get(time).unwrap();
let (_, detect_infos) = multi.pair();
timestamps.push(_get_timestamp(time));
for detect_info in detect_infos {
for (info_idx, detect_info) in detect_infos.iter().enumerate() {
if !detect_info.detail.starts_with("[condition]") {
detected_record_idset.insert(format!("{}_{}", time, detect_info.eventid));
}
@@ -245,7 +275,7 @@ fn emit_csv<W: std::io::Write>(
write_color_buffer(
&disp_wtr,
get_writable_color(None),
&_get_serialized_disp_output(PROFILES.as_ref().unwrap(), true),
&_get_serialized_disp_output(&profile, true),
false,
)
.ok();
@@ -263,6 +293,27 @@ fn emit_csv<W: std::io::Write>(
false,
)
.ok();
} else if json_output_flag {
// JSON output
wtr.write_field(" {")?;
wtr.write_field(&output_json_str(
&detect_info.ext_field,
&profile,
jsonl_output_flag,
))?;
if processed_message_cnt != message::MESSAGES._len() - 1
|| info_idx != detect_infos.len() - 1
{
wtr.write_field(" },")?;
} else {
wtr.write_field(" }")?;
}
} else if jsonl_output_flag {
// JSONL output format
wtr.write_field(format!(
"{{ {} }}",
&output_json_str(&detect_info.ext_field, &profile, jsonl_output_flag)
))?;
} else {
// csv output format
if plus_header {
@@ -332,6 +383,10 @@ fn emit_csv<W: std::io::Write>(
.insert(detect_info.level.to_lowercase(), detect_counts_by_date);
}
}
if json_output_flag {
wtr.write_field("]")?;
}
if displayflag {
println!();
} else {
@@ -385,17 +440,51 @@ fn emit_csv<W: std::io::Write>(
};
write_color_buffer(
&disp_wtr,
get_writable_color(None),
get_writable_color(Some(Color::Rgb(255, 255, 0))),
"Events with hits",
false,
)
.ok();
write_color_buffer(&disp_wtr, get_writable_color(None), " / ", false).ok();
write_color_buffer(
&disp_wtr,
get_writable_color(Some(Color::Rgb(0, 255, 255))),
"Total events",
false,
)
.ok();
write_color_buffer(&disp_wtr, get_writable_color(None), ": ", false).ok();
write_color_buffer(
&disp_wtr,
get_writable_color(Some(Color::Rgb(255, 255, 0))),
&(all_record_cnt - reducted_record_cnt).to_formatted_string(&Locale::en),
false,
)
.ok();
write_color_buffer(&disp_wtr, get_writable_color(None), " / ", false).ok();
write_color_buffer(
&disp_wtr,
get_writable_color(Some(Color::Rgb(0, 255, 255))),
&all_record_cnt.to_formatted_string(&Locale::en),
false,
)
.ok();
write_color_buffer(&disp_wtr, get_writable_color(None), " (", false).ok();
write_color_buffer(
&disp_wtr,
get_writable_color(Some(Color::Rgb(0, 255, 0))),
&format!(
"Saved alerts and events / Total events analyzed: {} / {} (Data reduction: {} events ({:.2}%))",
(all_record_cnt - reducted_record_cnt).to_formatted_string(&Locale::en),
all_record_cnt.to_formatted_string(&Locale::en),
"Data reduction: {} events ({:.2}%)",
reducted_record_cnt.to_formatted_string(&Locale::en),
reducted_percent
),
true,
false,
)
.ok();
write_color_buffer(&disp_wtr, get_writable_color(None), ")", false).ok();
println!();
println!();
_print_unique_results(
@@ -435,21 +524,27 @@ fn _get_serialized_disp_output(data: &LinkedHashMap<String, String>, header: boo
let data_length = &data.len();
let mut ret: Vec<String> = vec![];
if header {
for k in data.keys() {
ret.push(k.to_owned());
for (i, k) in data.keys().enumerate() {
if i == 0 {
ret.push(_format_cellpos(k, ColPos::First))
} else if i == data_length - 1 {
ret.push(_format_cellpos(k, ColPos::Last))
} else {
ret.push(_format_cellpos(k, ColPos::Other))
}
}
} else {
for (i, (_, v)) in data.iter().enumerate() {
if i == 0 {
ret.push(_format_cellpos(v, ColPos::First))
ret.push(_format_cellpos(v, ColPos::First).replace('|', "🦅"))
} else if i == data_length - 1 {
ret.push(_format_cellpos(v, ColPos::Last))
ret.push(_format_cellpos(v, ColPos::Last).replace('|', "🦅"))
} else {
ret.push(_format_cellpos(v, ColPos::Other))
ret.push(_format_cellpos(v, ColPos::Other).replace('|', "🦅"))
}
}
}
let mut disp_serializer = csv::WriterBuilder::new()
let mut disp_serializer = WriterBuilder::new()
.double_quote(false)
.quote_style(QuoteStyle::Never)
.delimiter(b'|')
@@ -457,7 +552,10 @@ fn _get_serialized_disp_output(data: &LinkedHashMap<String, String>, header: boo
.from_writer(vec![]);
disp_serializer.write_record(ret).ok();
String::from_utf8(disp_serializer.into_inner().unwrap_or_default()).unwrap_or_default()
String::from_utf8(disp_serializer.into_inner().unwrap_or_default())
.unwrap_or_default()
.replace('|', "")
.replace('🦅', "|")
}
/// return str position in output file
@@ -731,6 +829,246 @@ fn _get_timestamp(time: &DateTime<Utc>) -> i64 {
}
}
/// json出力の際に配列として対応させるdetails,MitreTactics,MitreTags,OtherTagsに該当する場合に配列を返す関数
fn _get_json_vec(target_alias_context: &str, target_data: &String) -> Vec<String> {
if target_alias_context.contains("%MitreTactics%")
|| target_alias_context.contains("%OtherTags%")
|| target_alias_context.contains("%MitreTags%")
{
let ret: Vec<String> = target_data
.to_owned()
.split(": ")
.map(|x| x.to_string())
.collect();
ret
} else if target_alias_context.contains("%Details%") {
let ret: Vec<String> = target_data
.to_owned()
.split(" ¦ ")
.map(|x| x.to_string())
.collect();
if target_data == &ret[0] && !target_data.contains(": ") {
vec![]
} else {
ret
}
} else {
vec![]
}
}
/// JSONの出力フォーマットに合わせた文字列を出力する関数
fn _create_json_output_format(
key: &String,
value: &str,
key_quote_exclude_flag: bool,
concat_flag: bool,
) -> String {
let head = if key_quote_exclude_flag {
key.to_string()
} else {
format!("\"{}\"", key)
};
// 4 space is json indent.
if let Ok(i) = i64::from_str(value) {
format!(" {}: {}", head, i)
} else if let Ok(b) = bool::from_str(value) {
format!(" {}: {}", head, b)
} else if concat_flag {
format!(" {}: {}", head, value)
} else {
format!(" {}: \"{}\"", head, value)
}
}
/// JSONの値に対して文字列の出力形式をJSON出力でエラーにならないようにするための変換を行う関数
fn _convert_valid_json_str(input: &[&str], concat_flag: bool) -> String {
let tmp = if input.len() == 1 {
input[0].to_string()
} else if concat_flag {
input.join(": ")
} else {
input[1..].join(": ")
};
let char_cnt = tmp.char_indices().count();
let con_val = tmp.as_str();
if char_cnt == 0 {
tmp
} else if con_val.starts_with('\"') {
let addition_header = if !con_val.starts_with('\"') { "\"" } else { "" };
let addition_quote = if !con_val.ends_with('\"') && concat_flag {
"\""
} else if !con_val.ends_with('\"') {
"\\\""
} else {
""
};
[
addition_header,
con_val
.to_string()
.replace('\\', "\\\\")
.replace('\"', "\\\"")
.trim(),
addition_quote,
]
.join("")
} else {
con_val
.replace('\\', "\\\\")
.replace('\"', "\\\"")
.trim()
.to_string()
}
}
/// JSONに出力する1検知分のオブジェクトの文字列を出力する関数
fn output_json_str(
ext_field: &LinkedHashMap<String, String>,
profile: &LinkedHashMap<String, String>,
jsonl_output_flag: bool,
) -> String {
let mut target: Vec<String> = vec![];
for (k, v) in ext_field.iter() {
let output_value_fmt = profile.get(k).unwrap();
let vec_data = _get_json_vec(output_value_fmt, v);
if vec_data.is_empty() {
let tmp_val: Vec<&str> = v.split(": ").collect();
let output_val =
_convert_valid_json_str(&tmp_val, output_value_fmt.contains("%RecordInformation%"));
target.push(_create_json_output_format(
k,
&output_val,
k.starts_with('\"'),
output_val.starts_with('\"'),
));
} else if output_value_fmt.contains("%Details%") {
let mut stocked_value = vec![];
let mut key_index_stock = vec![];
for detail_contents in vec_data.iter() {
// 分解してキーとなりえる箇所を抽出する
let space_split: Vec<&str> = detail_contents.split(' ').collect();
let mut tmp_stock = vec![];
for sp in space_split.iter() {
if sp.ends_with(':') && sp != &":" {
stocked_value.push(tmp_stock);
tmp_stock = vec![];
key_index_stock.push(sp.replace(':', "").to_owned());
} else {
tmp_stock.push(sp.to_owned());
}
}
stocked_value.push(tmp_stock);
}
let mut key_idx = 0;
let mut output_value_stock = String::default();
for (value_idx, value) in stocked_value.iter().enumerate() {
let mut tmp = if key_idx >= key_index_stock.len() {
String::default()
} else if value_idx == 0 && !value.is_empty() {
k.to_string()
} else {
key_index_stock[key_idx].to_string()
};
if !output_value_stock.is_empty() {
output_value_stock.push_str(" | ");
}
output_value_stock.push_str(&value.join(" "));
//``1つまえのキーの段階で以降にvalueの配列で区切りとなる空の配列が存在しているかを確認する
let is_remain_split_stock = if key_idx == key_index_stock.len() - 2
&& value_idx < stocked_value.len() - 1
&& !output_value_stock.is_empty()
{
let mut ret = true;
for remain_value in stocked_value[value_idx + 1..].iter() {
if remain_value.is_empty() {
ret = false;
break;
}
}
ret
} else {
false
};
if (value_idx < stocked_value.len() - 1 && stocked_value[value_idx + 1].is_empty())
|| is_remain_split_stock
{
// 次の要素を確認して、存在しないもしくは、キーが入っているとなった場合現在ストックしている内容が出力していいことが確定するので出力処理を行う
let output_tmp = format!("{}: {}", tmp, output_value_stock);
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
fmted_val.starts_with('\"'),
));
output_value_stock.clear();
tmp = String::default();
key_idx += 1;
}
if value_idx == stocked_value.len() - 1 {
let output_tmp = format!("{}: {}", tmp, output_value_stock);
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
fmted_val.starts_with('\"'),
));
key_idx += 1;
}
}
} else if output_value_fmt.contains("%MitreTags%")
|| output_value_fmt.contains("%MitreTactics%")
|| output_value_fmt.contains("%OtherTags%")
{
let tmp_val: Vec<&str> = v.split(": ").collect();
let key = _convert_valid_json_str(&[k.as_str()], false);
let values: Vec<&&str> = tmp_val.iter().filter(|x| x.trim() != "").collect();
let mut value: Vec<String> = vec![];
if values.is_empty() {
continue;
}
for (idx, tag_val) in values.iter().enumerate() {
if idx == 0 {
value.push("[\n".to_string());
}
let insert_val = format!(" \"{}\"", tag_val.trim());
value.push(insert_val);
if idx != values.len() - 1 {
value.push(",\n".to_string());
}
}
value.push("\n ]".to_string());
let fmted_val = if jsonl_output_flag {
value.iter().map(|x| x.replace('\n', "")).join("")
} else {
value.join("")
};
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
true,
));
}
}
if jsonl_output_flag {
// JSONL output
target.into_iter().map(|x| x.replace(" ", "")).join(",")
} else {
// JSON format output
target.join(",\n")
}
}
#[cfg(test)]
mod tests {
use crate::afterfact::_get_serialized_disp_output;
@@ -795,7 +1133,7 @@ mod tests {
(
"%Channel%".to_owned(),
mock_ch_filter
.get("Security")
.get(&"Security".to_ascii_lowercase())
.unwrap_or(&String::default())
.to_string(),
),
@@ -820,7 +1158,7 @@ mod tests {
eventid: test_eventid.to_string(),
detail: String::default(),
record_information: Option::Some(test_recinfo.to_string()),
ext_field: output_profile,
ext_field: output_profile.clone(),
},
expect_time,
&mut profile_converter,
@@ -860,7 +1198,7 @@ mod tests {
+ test_attack
+ "\n";
let mut file: Box<dyn io::Write> = Box::new(File::create("./test_emit_csv.csv").unwrap());
assert!(emit_csv(&mut file, false, HashMap::new(), 1).is_ok());
assert!(emit_csv(&mut file, false, HashMap::new(), 1, output_profile).is_ok());
match read_to_string("./test_emit_csv.csv") {
Err(_) => panic!("Failed to open file."),
Ok(s) => {
@@ -884,28 +1222,28 @@ mod tests {
let test_timestamp = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_header = "Timestamp|Computer|Channel|EventID|Level|RecordID|RuleTitle|Details|RecordInformation\n";
let expect_header = "TimestampComputerChannelEventIDLevelRecordIDRuleTitleDetailsRecordInformation\n";
let expect_tz = test_timestamp.with_timezone(&Local);
let expect_no_header = expect_tz
.clone()
.format("%Y-%m-%d %H:%M:%S%.3f %:z")
.to_string()
+ " | "
+ " "
+ test_computername
+ " | "
+ " "
+ test_channel
+ " | "
+ " "
+ test_eventid
+ " | "
+ " "
+ test_level
+ " | "
+ " "
+ test_recid
+ " | "
+ " "
+ test_title
+ " | "
+ " "
+ output
+ " | "
+ " "
+ test_recinfo
+ "\n";
let mut data: LinkedHashMap<String, String> = LinkedHashMap::new();
+8
View File
@@ -237,6 +237,14 @@ pub struct Config {
#[clap(help_heading = Some("OTHER-ACTIONS"), long = "set-default-profile", value_name = "PROFILE")]
pub set_default_profile: Option<String>,
/// Save the timeline in JSON format (ex: -j -o results.json)
#[clap(help_heading = Some("OUTPUT"), short = 'j', long = "json", requires = "output")]
pub json_timeline: bool,
/// Save the timeline in JSONL format (ex: -J -o results.jsonl)
#[clap(help_heading = Some("OUTPUT"), short = 'J', long = "jsonl", requires = "output")]
pub jsonl_timeline: bool,
/// Do not display result summary
#[clap(help_heading = Some("DISPLAY-SETTINGS"), long = "no-summary")]
pub no_summary: bool,
+10 -7
View File
@@ -264,7 +264,10 @@ impl Detection {
"%Channel%" => {
profile_converter.insert(
"%Channel%".to_string(),
CH_CONFIG.get(ch_str).unwrap_or(ch_str).to_string(),
CH_CONFIG
.get(&ch_str.to_ascii_lowercase())
.unwrap_or(ch_str)
.to_string(),
);
}
"%Level%" => {
@@ -323,7 +326,7 @@ impl Detection {
.filter(|x| TAGS_CONFIG.values().contains(x))
.map(|y| y.to_owned())
.collect();
profile_converter.insert("%MitreTactics%".to_string(), tactics.join(" : "));
profile_converter.insert("%MitreTactics%".to_string(), tactics.join(" ¦ "));
}
"%MitreTags%" => {
let techniques: &Vec<String> = &tag_info
@@ -339,7 +342,7 @@ impl Detection {
make_ascii_titlecase(&mut replaced_tag)
})
.collect();
profile_converter.insert("%MitreTags%".to_string(), techniques.join(" : "));
profile_converter.insert("%MitreTags%".to_string(), techniques.join(" ¦ "));
}
"%OtherTags%" => {
let tags: &Vec<String> = &tag_info
@@ -352,7 +355,7 @@ impl Detection {
})
.map(|y| y.to_owned())
.collect();
profile_converter.insert("%OtherTags%".to_string(), tags.join(" : "));
profile_converter.insert("%OtherTags%".to_string(), tags.join(" ¦ "));
}
_ => {}
@@ -455,7 +458,7 @@ impl Detection {
.filter(|x| TAGS_CONFIG.values().contains(x))
.map(|y| y.to_owned())
.collect();
profile_converter.insert("%MitreTactics%".to_string(), tactics.join(" : "));
profile_converter.insert("%MitreTactics%".to_string(), tactics.join(" ¦ "));
}
"%MitreTags%" => {
let techniques: &Vec<String> = &tag_info
@@ -471,7 +474,7 @@ impl Detection {
make_ascii_titlecase(&mut replaced_tag)
})
.collect();
profile_converter.insert("%MitreTags%".to_string(), techniques.join(" : "));
profile_converter.insert("%MitreTags%".to_string(), techniques.join(" ¦ "));
}
"%OtherTags%" => {
let tags: &Vec<String> = &tag_info
@@ -484,7 +487,7 @@ impl Detection {
})
.map(|y| y.to_owned())
.collect();
profile_converter.insert("%OtherTags%".to_string(), tags.join(" : "));
profile_converter.insert("%OtherTags%".to_string(), tags.join(" ¦ "));
}
_ => {}
}
+3 -3
View File
@@ -105,10 +105,10 @@ pub fn create_output_filter_config(path: &str) -> HashMap<String, String> {
return;
}
let tag_full_str = line[0].trim();
let tag_full_str = line[0].trim().to_ascii_lowercase();
let tag_replace_str = line[1].trim();
ret.insert(tag_full_str.to_owned(), tag_replace_str.to_owned());
ret.insert(tag_full_str, tag_replace_str.to_owned());
});
ret
}
@@ -599,7 +599,7 @@ mod tests {
let actual = create_output_filter_config("test_files/config/channel_abbreviations.txt");
let actual2 = create_output_filter_config("test_files/config/channel_abbreviations.txt");
let expected: HashMap<String, String> = HashMap::from([
("Security".to_string(), "Sec".to_string()),
("Security".to_ascii_lowercase(), "Sec".to_string()),
("xxx".to_string(), "yyy".to_string()),
]);
_check_hashmap_element(&expected, actual);
+4 -9
View File
@@ -297,15 +297,10 @@ fn create_recordinfos(record: &Value) -> String {
let summary: Vec<String> = output
.iter()
.map(|(key, value)| format!("{}:{}", key, value))
.map(|(key, value)| format!("{}: {}", key, value))
.collect();
// 標準出力する時はセルがハイプ区切りになるので、パイプ区切りにしない
if configs::CONFIG.read().unwrap().args.output.is_some() {
summary.join(" | ")
} else {
summary.join(" ")
}
summary.join(" ¦ ")
}
/**
@@ -510,7 +505,7 @@ mod tests {
Ok(record) => {
let ret = utils::create_recordinfos(&record);
// Systemは除外される/属性(_attributesも除外される)/key順に並ぶ
let expected = "AccessMask:%%1369 Process:lsass.exe User:u1".to_string();
let expected = "AccessMask: %%1369 ¦ Process: lsass.exe ¦ User: u1".to_string();
assert_eq!(ret, expected);
}
Err(_) => {
@@ -544,7 +539,7 @@ mod tests {
Ok(record) => {
let ret = utils::create_recordinfos(&record);
// Systemは除外される/属性(_attributesも除外される)/key順に並ぶ
let expected = "Binary:hogehoge Data: Data:Data1 Data:DataData2 Data:DataDataData3"
let expected = "Binary: hogehoge ¦ Data: ¦ Data: Data1 ¦ Data: DataData2 ¦ Data: DataDataData3"
.to_string();
assert_eq!(ret, expected);
}