Merge pull request #670 from Yamato-Security/654-enhancement-output-to-json

Added Output to json
This commit is contained in:
DustInDark
2022-09-15 22:32:43 +09:00
committed by GitHub
7 changed files with 313 additions and 79 deletions

View File

@@ -4,7 +4,7 @@
**新機能:**
- XXX
- 解析結果をJSONに出力する機能を追加した (`-j``--json-timeline` )。 (#654) (@hitenkoku)
**改善:**

View File

@@ -4,7 +4,7 @@
**New Features:**
- XXX
- You can now save the timeline to JSON files with the `-j, --json-timeline` option. (#654) (@hitenkoku)
**Enhancements:**
@@ -40,6 +40,7 @@
- Customizable output of fields defined at `config/profiles.yaml` and `config/default_profile.yaml`. (#165) (@hitenkoku)
- Implemented the `null` keyword for rule detection. It is used to check if a target field exists or not. (#643) (@hitenkoku)
- Added output to JSON option (`-j` and `--json-timeline` ) (#654) (@hitenkoku)
**Enhancements:**

63
Cargo.lock generated
View File

@@ -275,7 +275,7 @@ dependencies = [
"encode_unicode 0.3.6",
"libc",
"once_cell",
"terminal_size 0.1.17",
"terminal_size",
"unicode-width",
"winapi",
]
@@ -545,27 +545,6 @@ dependencies = [
"termcolor",
]
[[package]]
name = "errno"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f639046355ee4f37944e44f60642c6f3a7efa3cf6b78c78a0d989a8ce6c396a1"
dependencies = [
"errno-dragonfly",
"libc",
"winapi",
]
[[package]]
name = "errno-dragonfly"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
dependencies = [
"cc",
"libc",
]
[[package]]
name = "error-chain"
version = "0.12.4"
@@ -781,7 +760,7 @@ dependencies = [
"serde_json",
"static_vcruntime",
"termcolor",
"terminal_size 0.2.1",
"terminal_size",
"tokio",
"yaml-rust",
]
@@ -943,12 +922,6 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "io-lifetimes"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ea37f355c05dde75b84bba2d767906ad522e97cd9e2eef2be7a4ab7fb442c06"
[[package]]
name = "is_elevated"
version = "0.1.2"
@@ -1036,7 +1009,7 @@ dependencies = [
"rayon",
"regex",
"tempfile",
"terminal_size 0.1.17",
"terminal_size",
]
[[package]]
@@ -1097,12 +1070,6 @@ version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "linux-raw-sys"
version = "0.0.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4d2456c373231a208ad294c33dc5bff30051eafd954cd4caae83a712b12854d"
[[package]]
name = "lock_api"
version = "0.4.8"
@@ -1606,20 +1573,6 @@ dependencies = [
"semver 0.9.0",
]
[[package]]
name = "rustix"
version = "0.35.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "72c825b8aa8010eb9ee99b75f05e10180b9278d161583034d7574c9d617aeada"
dependencies = [
"bitflags",
"errno",
"io-lifetimes",
"libc",
"linux-raw-sys",
"windows-sys",
]
[[package]]
name = "rustversion"
version = "1.0.9"
@@ -1939,16 +1892,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "terminal_size"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8440c860cf79def6164e4a0a983bcc2305d82419177a0e0c71930d049e3ac5a1"
dependencies = [
"rustix",
"windows-sys",
]
[[package]]
name = "textwrap"
version = "0.11.0"

View File

@@ -388,6 +388,7 @@ ADVANCED:
--target-file-ext <EVTX_FILE_EXT>... evtx以外の拡張子を解析対象に追加する。 (例1: evtx_data 例evtx1 evtx2)
OUTPUT:
-j, --json タイムラインの出力をJSON形式で保存する例: -j -o results.json
-o, --output <FILE> タイムラインをCSV形式で保存する (例: results.csv)
-P, --profile <PROFILE> 利用する出力プロファイル名を指定する (minimal, standard, verbose, verbose-all-field-info, verbose-details-and-all-field-info)
@@ -408,13 +409,13 @@ FILTERING:
--timeline-start <DATE> 解析対象とするイベントログの開始時刻 (例: "2020-02-22 00:00:00 +09:00")
OTHER-ACTIONS:
--contributors コントリビュータの一覧表示
-L, --logon-summary 成功と失敗したログオン情報の要約を出力する
--contributors コントリビュータの一覧表示
-L, --logon-summary 成功と失敗したログオン情報の要約を出力する
--level-tuning [<FILE>] ルールlevelのチューニング (デフォルト: ./rules/config/level_tuning.txt)
-p, --pivot-keywords-list ピボットキーワードの一覧作成
-s, --statistics イベントIDの統計情報を表示する
--set-default-profile <PROFILE> デフォルトの出力コンフィグを設定する
-u, --update-rules rulesフォルダをhayabusa-rulesのgithubリポジトリの最新版に更新する
-p, --pivot-keywords-list ピボットキーワードの一覧作成
-s, --statistics イベントIDの統計情報を表示する
--set-default-profile <PROFILE> デフォルトの出力コンフィグを設定する
-u, --update-rules rulesフォルダをhayabusa-rulesのgithubリポジトリの最新版に更新する
TIME-FORMAT:
--European-time ヨーロッパ形式で日付と時刻を出力する (例: 22-02-2022 22:00:00.123 +02:00)
@@ -442,7 +443,13 @@ hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -P verbose
* 全てのフィールド情報も含めてつのCSVファイルにエクスポートして、Excel、Timeline Explorer、Elastic Stack等でさらに分析することができる(注意: `verbose-details-and-all-field-info`プロファイルを使すると、出力するファイルのサイズがとても大きくなる!):
```bash
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.csv -P `verbose-details-and-all-field-info`
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.csv -P verbose-details-and-all-field-info
```
* タイムラインをJSON形式で保存する:
```bash
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.json -j
```
* Hayabusaルールのみを実行するデフォルトでは`-r .\rules`にあるすべてのルールが利用される):

View File

@@ -380,6 +380,7 @@ ADVANCED:
--target-file-ext <EVTX_FILE_EXT>... Specify additional target file extensions (ex: evtx_data) (ex: evtx1 evtx2)
OUTPUT:
-j, --json Save the timeline in JSON format (ex: -j -o results.json)
-o, --output <FILE> Save the timeline in CSV format (ex: results.csv)
-P, --profile <PROFILE> Specify output profile (minimal, standard, verbose, verbose-all-field-info, verbose-details-and-all-field-info)
@@ -434,7 +435,13 @@ hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -P verbose
* Export to a single CSV file for further analysis with excel, timeline explorer, elastic stack, etc... and include all field information (Warning: your file output size will become much larger with the `verbose-details-and-all-field-info` profile!):
```bash
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.csv -F
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.csv -P verbose-details-and-all-field-info
```
* Save the timline in JSON format:
```bash
hayabusa-1.5.1-win-x64.exe -d .\hayabusa-sample-evtx -o results.json -j
```
* Only run hayabusa rules (the default is to run all the rules in `-r .\rules`):

View File

@@ -9,11 +9,14 @@ use bytesize::ByteSize;
use chrono::{DateTime, Local, TimeZone, Utc};
use comfy_table::modifiers::UTF8_ROUND_CORNERS;
use comfy_table::presets::UTF8_FULL;
use csv::QuoteStyle;
use csv::{QuoteStyle, WriterBuilder};
use dashmap::Map;
use itertools::Itertools;
use krapslog::{build_sparkline, build_time_markers};
use lazy_static::lazy_static;
use linked_hash_map::LinkedHashMap;
use std::str::FromStr;
use comfy_table::*;
use hashbrown::{HashMap, HashSet};
@@ -189,7 +192,13 @@ pub fn after_fact(all_record_cnt: usize) {
Box::new(BufWriter::new(io::stdout()))
};
let color_map = set_output_color();
if let Err(err) = emit_csv(&mut target, displayflag, color_map, all_record_cnt as u128) {
if let Err(err) = emit_csv(
&mut target,
displayflag,
color_map,
all_record_cnt as u128,
PROFILES.clone().unwrap_or_default(),
) {
fn_emit_csv_err(Box::new(err));
}
}
@@ -199,10 +208,21 @@ fn emit_csv<W: std::io::Write>(
displayflag: bool,
color_map: HashMap<String, Colors>,
all_record_cnt: u128,
profile: LinkedHashMap<String, String>,
) -> io::Result<()> {
let disp_wtr = BufferWriter::stdout(ColorChoice::Always);
let mut disp_wtr_buf = disp_wtr.buffer();
let mut wtr = csv::WriterBuilder::new().from_writer(writer);
let json_output_flag = configs::CONFIG.read().unwrap().args.json_timeline;
let mut wtr = if json_output_flag {
WriterBuilder::new()
.delimiter(b'\n')
.double_quote(false)
.quote_style(QuoteStyle::Never)
.from_writer(writer)
} else {
WriterBuilder::new().from_writer(writer)
};
disp_wtr_buf.set_color(ColorSpec::new().set_fg(None)).ok();
@@ -231,11 +251,20 @@ fn emit_csv<W: std::io::Write>(
let mut timestamps: Vec<i64> = Vec::new();
let mut plus_header = true;
let mut detected_record_idset: HashSet<String> = HashSet::new();
for time in message::MESSAGES.clone().into_read_only().keys().sorted() {
if json_output_flag {
wtr.write_field("[")?;
}
for (processed_message_cnt, time) in message::MESSAGES
.clone()
.into_read_only()
.keys()
.sorted()
.enumerate()
{
let multi = message::MESSAGES.get(time).unwrap();
let (_, detect_infos) = multi.pair();
timestamps.push(_get_timestamp(time));
for detect_info in detect_infos {
for (info_idx, detect_info) in detect_infos.iter().enumerate() {
if !detect_info.detail.starts_with("[condition]") {
detected_record_idset.insert(format!("{}_{}", time, detect_info.eventid));
}
@@ -245,7 +274,7 @@ fn emit_csv<W: std::io::Write>(
write_color_buffer(
&disp_wtr,
get_writable_color(None),
&_get_serialized_disp_output(PROFILES.as_ref().unwrap(), true),
&_get_serialized_disp_output(&profile, true),
false,
)
.ok();
@@ -263,6 +292,16 @@ fn emit_csv<W: std::io::Write>(
false,
)
.ok();
} else if json_output_flag {
wtr.write_field(" {")?;
wtr.write_field(&output_json_str(&detect_info.ext_field, &profile))?;
if processed_message_cnt != message::MESSAGES._len() - 1
|| info_idx != detect_infos.len() - 1
{
wtr.write_field(" },")?;
} else {
wtr.write_field(" }")?;
}
} else {
// csv output format
if plus_header {
@@ -332,6 +371,10 @@ fn emit_csv<W: std::io::Write>(
.insert(detect_info.level.to_lowercase(), detect_counts_by_date);
}
}
if json_output_flag {
wtr.write_field("]")?;
}
if displayflag {
println!();
} else {
@@ -489,7 +532,7 @@ fn _get_serialized_disp_output(data: &LinkedHashMap<String, String>, header: boo
}
}
}
let mut disp_serializer = csv::WriterBuilder::new()
let mut disp_serializer = WriterBuilder::new()
.double_quote(false)
.quote_style(QuoteStyle::Never)
.delimiter(b'|')
@@ -774,6 +817,235 @@ fn _get_timestamp(time: &DateTime<Utc>) -> i64 {
}
}
/// json出力の際に配列として対応させるdetails,MitreTactics,MitreTags,OtherTagsに該当する場合に配列を返す関数
fn _get_json_vec(target_alias_context: &str, target_data: &String) -> Vec<String> {
if target_alias_context.contains("%MitreTactics%")
|| target_alias_context.contains("%OtherTags%")
|| target_alias_context.contains("%MitreTags%")
{
let ret: Vec<String> = target_data
.to_owned()
.split(": ")
.map(|x| x.to_string())
.collect();
ret
} else if target_alias_context.contains("%Details%") {
let ret: Vec<String> = target_data
.to_owned()
.split(" ¦ ")
.map(|x| x.to_string())
.collect();
if target_data == &ret[0] && !target_data.contains(": ") {
vec![]
} else {
ret
}
} else {
vec![]
}
}
/// JSONの出力フォーマットに合わせた文字列を出力する関数
fn _create_json_output_format(
key: &String,
value: &str,
key_quote_exclude_flag: bool,
concat_flag: bool,
) -> String {
let head = if key_quote_exclude_flag {
key.to_string()
} else {
format!("\"{}\"", key)
};
// 4 space is json indent.
if let Ok(i) = i64::from_str(value) {
format!(" {}: {}", head, i)
} else if let Ok(b) = bool::from_str(value) {
format!(" {}: {}", head, b)
} else if concat_flag {
format!(" {}: {}", head, value)
} else {
format!(" {}: \"{}\"", head, value)
}
}
/// JSONの値に対して文字列の出力形式をJSON出力でエラーにならないようにするための変換を行う関数
fn _convert_valid_json_str(input: &[&str], concat_flag: bool) -> String {
let tmp = if input.len() == 1 {
input[0].to_string()
} else if concat_flag {
input.join(": ")
} else {
input[1..].join(": ")
};
let char_cnt = tmp.char_indices().count();
let con_val = tmp.as_str();
if char_cnt == 0 {
tmp
} else if con_val.starts_with('\"') {
let addition_header = if !con_val.starts_with('\"') { "\"" } else { "" };
let addition_quote = if !con_val.ends_with('\"') && concat_flag {
"\""
} else if !con_val.ends_with('\"') {
"\\\""
} else {
""
};
[
addition_header,
con_val
.to_string()
.replace('\\', "\\\\")
.replace('\"', "\\\"")
.trim(),
addition_quote,
]
.join("")
} else {
con_val
.replace('\\', "\\\\")
.replace('\"', "\\\"")
.trim()
.to_string()
}
}
/// JSONに出力する1検知分のオブジェクトの文字列を出力する関数
fn output_json_str(
ext_field: &LinkedHashMap<String, String>,
profile: &LinkedHashMap<String, String>,
) -> String {
let mut target: Vec<String> = vec![];
for (k, v) in ext_field.iter() {
let output_value_fmt = profile.get(k).unwrap();
let vec_data = _get_json_vec(output_value_fmt, v);
if vec_data.is_empty() {
let tmp_val: Vec<&str> = v.split(": ").collect();
let output_val =
_convert_valid_json_str(&tmp_val, output_value_fmt.contains("%RecordInformation%"));
target.push(_create_json_output_format(
k,
&output_val,
k.starts_with('\"'),
output_val.starts_with('\"'),
));
} else if output_value_fmt.contains("%Details%") {
let mut stocked_value = vec![];
let mut key_index_stock = vec![];
for detail_contents in vec_data.iter() {
// 分解してキーとなりえる箇所を抽出する
let space_split: Vec<&str> = detail_contents.split(' ').collect();
let mut tmp_stock = vec![];
for sp in space_split.iter() {
if sp.ends_with(':') && sp != &":" {
stocked_value.push(tmp_stock);
tmp_stock = vec![];
key_index_stock.push(sp.replace(':', "").to_owned());
} else {
tmp_stock.push(sp.to_owned());
}
}
stocked_value.push(tmp_stock);
}
let mut key_idx = 0;
let mut output_value_stock = String::default();
for (value_idx, value) in stocked_value.iter().enumerate() {
let mut tmp = if key_idx >= key_index_stock.len() {
String::default()
} else if value_idx == 0 && !value.is_empty() {
k.to_string()
} else {
key_index_stock[key_idx].to_string()
};
if !output_value_stock.is_empty() {
output_value_stock.push_str(" | ");
}
output_value_stock.push_str(&value.join(" "));
//``1つまえのキーの段階で以降にvalueの配列で区切りとなる空の配列が存在しているかを確認する
let is_remain_split_stock = if key_idx == key_index_stock.len() - 2
&& value_idx < stocked_value.len() - 1
&& !output_value_stock.is_empty()
{
let mut ret = true;
for remain_value in stocked_value[value_idx + 1..].iter() {
if remain_value.is_empty() {
ret = false;
break;
}
}
ret
} else {
false
};
if (value_idx < stocked_value.len() - 1 && stocked_value[value_idx + 1].is_empty())
|| is_remain_split_stock
{
// 次の要素を確認して、存在しないもしくは、キーが入っているとなった場合現在ストックしている内容が出力していいことが確定するので出力処理を行う
let output_tmp = format!("{}: {}", tmp, output_value_stock);
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
fmted_val.starts_with('\"'),
));
output_value_stock.clear();
tmp = String::default();
key_idx += 1;
}
if value_idx == stocked_value.len() - 1 {
let output_tmp = format!("{}: {}", tmp, output_value_stock);
let output: Vec<&str> = output_tmp.split(": ").collect();
let key = _convert_valid_json_str(&[output[0]], false);
let fmted_val = _convert_valid_json_str(&output, false);
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
fmted_val.starts_with('\"'),
));
key_idx += 1;
}
}
} else if output_value_fmt.contains("%MitreTags%")
|| output_value_fmt.contains("%MitreTactics%")
|| output_value_fmt.contains("%OtherTags%")
{
let tmp_val: Vec<&str> = v.split(": ").collect();
let key = _convert_valid_json_str(&[k.as_str()], false);
let values: Vec<&&str> = tmp_val.iter().filter(|x| x.trim() != "").collect();
let mut value: Vec<String> = vec![];
if values.is_empty() {
continue;
}
for (idx, tag_val) in values.iter().enumerate() {
if idx == 0 {
value.push("[\n".to_string());
}
let insert_val = format!(" \"{}\"", tag_val.trim());
value.push(insert_val);
if idx != values.len() - 1 {
value.push(",\n".to_string());
}
}
value.push("\n ]".to_string());
let fmted_val = value.join("");
target.push(_create_json_output_format(
&key,
&fmted_val,
key.starts_with('\"'),
true,
));
}
}
target.join(",\n")
}
#[cfg(test)]
mod tests {
use crate::afterfact::_get_serialized_disp_output;
@@ -863,7 +1135,7 @@ mod tests {
eventid: test_eventid.to_string(),
detail: String::default(),
record_information: Option::Some(test_recinfo.to_string()),
ext_field: output_profile,
ext_field: output_profile.clone(),
},
expect_time,
&mut profile_converter,
@@ -903,7 +1175,7 @@ mod tests {
+ test_attack
+ "\n";
let mut file: Box<dyn io::Write> = Box::new(File::create("./test_emit_csv.csv").unwrap());
assert!(emit_csv(&mut file, false, HashMap::new(), 1).is_ok());
assert!(emit_csv(&mut file, false, HashMap::new(), 1, output_profile).is_ok());
match read_to_string("./test_emit_csv.csv") {
Err(_) => panic!("Failed to open file."),
Ok(s) => {

View File

@@ -237,6 +237,10 @@ pub struct Config {
#[clap(help_heading = Some("OTHER-ACTIONS"), long = "set-default-profile", value_name = "PROFILE")]
pub set_default_profile: Option<String>,
/// Save the timeline in JSON format (ex: -j -o results.json)
#[clap(help_heading = Some("OUTPUT"), short = 'j', long = "json", requires = "output")]
pub json_timeline: bool,
/// Do not display result summary
#[clap(help_heading = Some("DISPLAY-SETTINGS"), long = "no-summary")]
pub no_summary: bool,