Merge branch 'develop' into feature/level-tuning#390
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,4 +4,4 @@
|
||||
/.vscode/
|
||||
.DS_Store
|
||||
test_*
|
||||
.env
|
||||
.env
|
||||
|
||||
12
Cargo.lock
generated
12
Cargo.lock
generated
@@ -108,12 +108,6 @@ dependencies = [
|
||||
"byteorder",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
@@ -842,9 +836,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "hayabusa"
|
||||
version = "1.1.0"
|
||||
version = "1.2.0"
|
||||
dependencies = [
|
||||
"base64 0.13.0",
|
||||
"base64",
|
||||
"chrono",
|
||||
"clap",
|
||||
"colored",
|
||||
@@ -1782,7 +1776,7 @@ version = "0.9.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f88643aea3c1343c804950d7bf983bd2067f5ab59db6d613a08e05572f2714ab"
|
||||
dependencies = [
|
||||
"base64 0.10.1",
|
||||
"base64",
|
||||
"bytes 0.4.12",
|
||||
"cookie",
|
||||
"cookie_store",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "hayabusa"
|
||||
version = "1.1.0"
|
||||
version = "1.2.0"
|
||||
authors = ["Yamato Security @SecurityYamato"]
|
||||
edition = "2021"
|
||||
|
||||
|
||||
@@ -35,7 +35,7 @@ Hayabusaは、日本の[Yamato Security](https://yamatosecurity.connpass.com/)
|
||||
- [Timeline Explorerでの解析:](#timeline-explorerでの解析)
|
||||
- [Criticalアラートのフィルタリングとコンピュータごとのグルーピング:](#criticalアラートのフィルタリングとコンピュータごとのグルーピング)
|
||||
- [タイムラインのサンプル結果](#タイムラインのサンプル結果)
|
||||
- [特徴](#特徴)
|
||||
- [特徴&機能](#特徴機能)
|
||||
- [予定されている機能](#予定されている機能)
|
||||
- [ダウンロード](#ダウンロード)
|
||||
- [ソースコードからのコンパイル(任意)](#ソースコードからのコンパイル任意)
|
||||
@@ -46,12 +46,12 @@ Hayabusaは、日本の[Yamato Security](https://yamatosecurity.connpass.com/)
|
||||
- [Hayabusaの実行](#hayabusaの実行)
|
||||
- [注意: アンチウィルス/EDRの誤検知](#注意-アンチウィルスedrの誤検知)
|
||||
- [Windows](#windows)
|
||||
- [Windows Terminalで利用する際の注意事項](#windows-terminalで利用する際の注意事項)
|
||||
- [Linux](#linux)
|
||||
- [macOS](#macos)
|
||||
- [使用方法](#使用方法)
|
||||
- [コマンドラインオプション](#コマンドラインオプション)
|
||||
- [使用例](#使用例)
|
||||
- [ピボットキーワードの作成](#ピボットキーワードの作成)
|
||||
- [サンプルevtxファイルでHayabusaをテストする](#サンプルevtxファイルでhayabusaをテストする)
|
||||
- [Hayabusaの出力](#hayabusaの出力)
|
||||
- [プログレスバー](#プログレスバー)
|
||||
@@ -115,7 +115,7 @@ CSVのタイムライン結果のサンプルは[こちら](https://github.com/Y
|
||||
|
||||
CSVのタイムラインをExcelやTimeline Explorerで分析する方法は[こちら](doc/CSV-AnalysisWithExcelAndTimelineExplorer-Japanese.pdf)で紹介しています。
|
||||
|
||||
# 特徴
|
||||
# 特徴&機能
|
||||
|
||||
* クロスプラットフォーム対応: Windows, Linux, macOS。
|
||||
* Rustで開発され、メモリセーフでハヤブサよりも高速です!
|
||||
@@ -128,6 +128,7 @@ CSVのタイムラインをExcelやTimeline Explorerで分析する方法は[こ
|
||||
* 不良ルールやノイズの多いルールを除外するルールチューニング設定が可能です。
|
||||
* MITRE ATT&CKとのマッピング (CSVの出力ファイルのみ)。
|
||||
* ルールレベルのチューニング。
|
||||
* イベントログから不審なユーザやファイルを素早く特定するのに有用な、ピボットキーワードの一覧作成。
|
||||
|
||||
# 予定されている機能
|
||||
|
||||
@@ -232,11 +233,6 @@ Hayabusaを実行する際にアンチウィルスやEDRにブロックされる
|
||||
コマンドプロンプトやWindows Terminalから32ビットもしくは64ビットのWindowsバイナリをHayabusaのルートディレクトリから実行します。
|
||||
例: `hayabusa-1.2.0-windows-x64.exe`
|
||||
|
||||
### Windows Terminalで利用する際の注意事項
|
||||
|
||||
2021/02/01の時点で、Windows Terminalから標準出力でhayabusaを使ったときに、コントロールコード(0x9D等)が検知結果に入っていると出力が止まることが確認されています。
|
||||
Windows Terminalからhayabusaを標準出力で解析させたい場合は、 `-c` (カラー出力)のオプションをつければ出力が止まることを回避できます。
|
||||
|
||||
## Linux
|
||||
|
||||
まず、バイナリに実行権限を与える必要があります。
|
||||
@@ -313,6 +309,7 @@ USAGE:
|
||||
-q --quiet 'Quietモード。起動バナーを表示しない。'
|
||||
-Q --quiet-errors 'Quiet errorsモード。エラーログを保存しない。'
|
||||
--level-tuning <LEVEL_TUNING_FILE> 'ルールlevelのチューニング [default: ./config/level_tuning.txt]'
|
||||
-p --pivot-keywords-list 'ピボットキーワードの一覧作成。'
|
||||
--contributors 'コントリビュータの一覧表示。'
|
||||
```
|
||||
|
||||
@@ -378,6 +375,12 @@ hayabusa.exe -d .\hayabusa-sample-evtx -r .\rules\hayabusa\default\events\Securi
|
||||
hayabusa.exe -l -m low
|
||||
```
|
||||
|
||||
* criticalレベルのアラートからピボットキーワードの一覧を作成します(結果は結果毎に`keywords-Ip Address.txt`や`keyworss-Users.txt`等に出力されます):
|
||||
|
||||
```bash
|
||||
hayabusa.exe -l -m critical -p -o keywords
|
||||
```
|
||||
|
||||
* イベントIDの統計情報を取得します:
|
||||
|
||||
```bash
|
||||
@@ -405,10 +408,28 @@ Checking target evtx FilePath: "./hayabusa-sample-evtx/YamatoSecurity/T1218.004_
|
||||
5 / 509 [=>------------------------------------------------------------------------------------------------------------------------------------------] 0.98 % 1s
|
||||
```
|
||||
|
||||
* Quiet error mode:
|
||||
* エラーログの出力をさせないようにする:
|
||||
デフォルトでは、Hayabusaはエラーメッセージをエラーログに保存します。
|
||||
エラーメッセージを保存したくない場合は、`-Q`を追加してください。
|
||||
|
||||
## ピボットキーワードの作成
|
||||
|
||||
`-p`もしくは`--pivot-keywords-list`オプションを使うことで不審なユーザやホスト名、プロセスなどを一覧で出力することができ、イベントログから素早く特定することができます。
|
||||
ピボットキーワードのカスタマイズは`config/pivot_keywords.txt`を変更することで行うことができます。以下はデフォルトの設定になります。:
|
||||
|
||||
```
|
||||
Users.SubjectUserName
|
||||
Users.TargetUserName
|
||||
Users.User
|
||||
Logon IDs.SubjectLogonId
|
||||
Logon IDs.TargetLogonId
|
||||
Workstation Names.WorkstationName
|
||||
Ip Addresses.IpAddress
|
||||
Processes.Image
|
||||
```
|
||||
|
||||
形式は`KeywordName.FieldName`となっています。例えばデフォルトの設定では、`Users`というリストは検知したイベントから`SubjectUserName`、 `TargetUserName` 、 `User`のフィールドの値が一覧として出力されます。hayabusaのデフォルトでは検知したすべてのイベントから結果を出力するため、`--pivot-keyword-list`オプションを使うときには `-m` もしくは `--min-level` オプションを併せて使って検知するイベントのレベルを指定することをおすすめします。まず`-m critical`を指定して、最も高い`critical`レベルのアラートのみを対象として、レベルを必要に応じて下げていくとよいでしょう。結果に正常なイベントにもある共通のキーワードが入っている可能性が高いため、手動で結果を確認してから、不審なイベントにありそうなキーワードリストを1つのファイルに保存し、`grep -f keywords.txt timeline.csv`等のコマンドで不審なアクティビティに絞ったタイムラインを作成することができます。
|
||||
|
||||
# サンプルevtxファイルでHayabusaをテストする
|
||||
|
||||
Hayabusaをテストしたり、新しいルールを作成したりするためのサンプルevtxファイルをいくつか提供しています: [https://github.com/Yamato-Security/Hayabusa-sample-evtx](https://github.com/Yamato-Security/Hayabusa-sample-evtx)
|
||||
@@ -547,6 +568,20 @@ id,new_level
|
||||
* [WELA (Windows Event Log Analyzer)](https://github.com/Yamato-Security/WELA/) - [Yamato Security](https://github.com/Yamato-Security/)によるWindowsイベントログ解析のマルチツール。
|
||||
* [Zircolite](https://github.com/wagga40/Zircolite) - Pythonで書かれたSigmaベースの攻撃検知ツール。
|
||||
|
||||
# Windowsイベントログ設定のススメ
|
||||
|
||||
Windows機での悪性な活動を検知する為には、デフォルトのログ設定を改善することが必要です。
|
||||
以下のサイトを閲覧することをおすすめします。:
|
||||
* [JSCU-NL (Joint Sigint Cyber Unit Netherlands) Logging Essentials](https://github.com/JSCU-NL/logging-essentials)
|
||||
* [ACSC (Australian Cyber Security Centre) Logging and Fowarding Guide](https://www.cyber.gov.au/acsc/view-all-content/publications/windows-event-logging-and-forwarding)
|
||||
* [Malware Archaeology Cheat Sheets](https://www.malwarearchaeology.com/cheat-sheets)
|
||||
|
||||
# Sysmon関係のプロジェクト
|
||||
|
||||
フォレンジックに有用な証拠を作り、高い精度で検知をさせるためには、sysmonをインストールする必要があります。以下のサイトを参考に設定することをおすすめします。:
|
||||
* [Sysmon Modular](https://github.com/olafhartong/sysmon-modular)
|
||||
* [TrustedSec Sysmon Community Guide](https://github.com/trustedsec/SysmonCommunityGuide)
|
||||
|
||||
# コミュニティによるドキュメンテーション
|
||||
|
||||
## 英語
|
||||
|
||||
51
README.md
51
README.md
@@ -46,12 +46,12 @@ Hayabusa is a **Windows event log fast forensics timeline generator** and **thre
|
||||
- [Running Hayabusa](#running-hayabusa)
|
||||
- [Caution: Anti-Virus/EDR Warnings](#caution-anti-virusedr-warnings)
|
||||
- [Windows](#windows)
|
||||
- [Caution: Windows Terminal Bug](#caution-windows-terminal-bug)
|
||||
- [Linux](#linux)
|
||||
- [macOS](#macos)
|
||||
- [Usage](#usage)
|
||||
- [Command Line Options](#command-line-options)
|
||||
- [Usage Examples](#usage-examples)
|
||||
- [Pivot Keyword Generator](#pivot-keyword-generator)
|
||||
- [Testing Hayabusa on Sample Evtx Files](#testing-hayabusa-on-sample-evtx-files)
|
||||
- [Hayabusa Output](#hayabusa-output)
|
||||
- [Progress Bar](#progress-bar)
|
||||
@@ -62,6 +62,8 @@ Hayabusa is a **Windows event log fast forensics timeline generator** and **thre
|
||||
- [Detection Level Tuning](#detection-level-tuning)
|
||||
- [Event ID Filtering](#event-id-filtering)
|
||||
- [Other Windows Event Log Analyzers and Related Projects](#other-windows-event-log-analyzers-and-related-projects)
|
||||
- [Windows Logging Recommendations](#windows-logging-recommendations)
|
||||
- [Sysmon Related Projects](#sysmon-related-projects)
|
||||
- [Community Documentation](#community-documentation)
|
||||
- [English](#english)
|
||||
- [Japanese](#japanese)
|
||||
@@ -126,6 +128,7 @@ You can learn how to analyze CSV timelines in Excel and Timeline Explorer [here]
|
||||
* Rule tuning configuration by excluding unneeded or noisy rules.
|
||||
* MITRE ATT&CK mapping of tactics (only in saved CSV files).
|
||||
* Rule level tuning.
|
||||
* Create a list of unique pivot keywords to quickly identify abnormal users, hostnames, processes, etc... as well as correlate events.
|
||||
|
||||
# Planned Features
|
||||
|
||||
@@ -225,12 +228,6 @@ You may receive warning from anti-virus or EDR when trying to run hayabusa. Thes
|
||||
In Command Prompt or Windows Terminal, just run 32-bit or 64-bit Windoows binary from the hayabusa root directory.
|
||||
Example: `hayabusa-1.2.0-windows-x64.exe`
|
||||
|
||||
### Caution: Windows Terminal Bug
|
||||
|
||||
As of Feb 1, 2022, Windows Terminal will freeze midway when displaying results to the screen when run against the sample evtx files.
|
||||
This is because there is a control code (0x9D) in the output.
|
||||
This is known Windows Terminal bug which will eventually be fixed but for the meantime, you can avoid this bug by adding the `-c` (colored output) option when you run hayabusa.
|
||||
|
||||
## Linux
|
||||
|
||||
You first need to make the binary executable.
|
||||
@@ -307,6 +304,7 @@ USAGE:
|
||||
-q --quiet 'Quiet mode. Do not display the launch banner.'
|
||||
-Q --quiet-errors 'Quiet errors mode. Do not save error logs.'
|
||||
--level-tuning <LEVEL_TUNING_FILE> 'Tune the rule level [default: ./config/level_tuning.txt]'
|
||||
-p --pivot-keywords-list 'Create a list of pivot keywords.'
|
||||
--contributors 'Prints the list of contributors.'
|
||||
```
|
||||
|
||||
@@ -372,7 +370,13 @@ hayabusa.exe -d .\hayabusa-sample-evtx -r .\rules\hayabusa\default\events\Securi
|
||||
hayabusa.exe -l -m low
|
||||
```
|
||||
|
||||
* Get event ID statistics:
|
||||
* Create a list of pivot keywords from critical alerts and save the results. (Results will be saved to `keywords-Ip Addresses.txt`, `keywords-Users.txt`, etc...):
|
||||
|
||||
```bash
|
||||
hayabusa.exe -l -m critical -p -o keywords
|
||||
```
|
||||
|
||||
* Print Event ID statistics:
|
||||
|
||||
```bash
|
||||
hayabusa.exe -f Security.evtx -s
|
||||
@@ -403,6 +407,24 @@ Checking target evtx FilePath: "./hayabusa-sample-evtx/YamatoSecurity/T1218.004_
|
||||
By default, hayabusa will save error messages to error log files.
|
||||
If you do not want to save error messages, please add `-Q`.
|
||||
|
||||
## Pivot Keyword Generator
|
||||
|
||||
You can use the `-p` or `--pivot-keywords-list` option to create a list of unique pivot keywords to quickly identify abnormal users, hostnames, processes, etc... as well as correlate events. You can customize what keywords you want to search for by editing `config/pivot_keywords.txt`.
|
||||
This is the default setting:
|
||||
|
||||
```
|
||||
Users.SubjectUserName
|
||||
Users.TargetUserName
|
||||
Users.User
|
||||
Logon IDs.SubjectLogonId
|
||||
Logon IDs.TargetLogonId
|
||||
Workstation Names.WorkstationName
|
||||
Ip Addresses.IpAddress
|
||||
Processes.Image
|
||||
```
|
||||
|
||||
The format is `KeywordName.FieldName`. For example, when creating the list of `Users`, hayabusa will list up all the values in the `SubjectUserName`, `TargetUserName` and `User` fields. By default, hayabusa will return results from all events (informational and higher) so we highly recommend combining the `--pivot-keyword-list` option with the `-m` or `--min-level` option. For example, start off with only creating keywords from `critical` alerts with `-m critical` and then continue with `-m high`, `-m medium`, etc... There will most likely be common keywords in your results that will match on many normal events, so after manually checking the results and creating a list of unique keywords in a single file, you can then create a narrowed down timeline of suspicious activity with a command like `grep -f keywords.txt timeline.csv`.
|
||||
|
||||
# Testing Hayabusa on Sample Evtx Files
|
||||
|
||||
We have provided some sample evtx files for you to test hayabusa and/or create new rules at [https://github.com/Yamato-Security/hayabusa-sample-evtx](https://github.com/Yamato-Security/hayabusa-sample-evtx)
|
||||
@@ -541,6 +563,19 @@ There is no "one tool to rule them all" and we have found that each has its own
|
||||
* [WELA (Windows Event Log Analyzer)](https://github.com/Yamato-Security/WELA) - The swiff-army knife for Windows event logs by [Yamato Security](https://github.com/Yamato-Security/)
|
||||
* [Zircolite](https://github.com/wagga40/Zircolite) - Sigma-based attack detection tool written in Python.
|
||||
|
||||
# Windows Logging Recommendations
|
||||
|
||||
In order to properly detect malicious activity on Windows machines, you will need to improve the default log settings. We recommend the following sites for guidance:
|
||||
* [JSCU-NL (Joint Sigint Cyber Unit Netherlands) Logging Essentials](https://github.com/JSCU-NL/logging-essentials)
|
||||
* [ACSC (Australian Cyber Security Centre) Logging and Fowarding Guide](https://www.cyber.gov.au/acsc/view-all-content/publications/windows-event-logging-and-forwarding)
|
||||
* [Malware Archaeology Cheat Sheets](https://www.malwarearchaeology.com/cheat-sheets)
|
||||
|
||||
# Sysmon Related Projects
|
||||
|
||||
To create the most forensic evidence and detect with the highest accuracy, you need to install sysmon. We recommend the following sites:
|
||||
* [Sysmon Modular](https://github.com/olafhartong/sysmon-modular)
|
||||
* [TrustedSec Sysmon Community Guide](https://github.com/trustedsec/SysmonCommunityGuide)
|
||||
|
||||
# Community Documentation
|
||||
|
||||
## English
|
||||
|
||||
15
config/output_tag.txt
Normal file
15
config/output_tag.txt
Normal file
@@ -0,0 +1,15 @@
|
||||
tag_full_str,tag_output_str
|
||||
attack.reconnaissance,Recon
|
||||
attack.resource_development,ResDev
|
||||
attack.initial_access,InitAccess
|
||||
attack.execution,Exec
|
||||
attack.persistence,Persis
|
||||
attack.privilege_escalation,PrivEsc
|
||||
attack.defense_evasion,Evas
|
||||
attack.credential_access,CredAccess
|
||||
attack.discovery,Disc
|
||||
attack.lateral_movement,LatMov
|
||||
attack.collection,Collect
|
||||
attack.command_and_control,C2
|
||||
attack.exfiltration,Exfil
|
||||
attack.impact,Impact
|
||||
8
config/pivot_keywords.txt
Normal file
8
config/pivot_keywords.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
Users.SubjectUserName
|
||||
Users.TargetUserName
|
||||
Users.User
|
||||
Logon IDs.SubjectLogonId
|
||||
Logon IDs.TargetLogonId
|
||||
Workstation Names.WorkstationName
|
||||
Ip Addresses.IpAddress
|
||||
Processes.Image
|
||||
140
src/afterfact.rs
140
src/afterfact.rs
@@ -21,9 +21,11 @@ pub struct CsvFormat<'a> {
|
||||
computer: &'a str,
|
||||
event_i_d: &'a str,
|
||||
level: &'a str,
|
||||
mitre_attack: &'a str,
|
||||
rule_title: &'a str,
|
||||
details: &'a str,
|
||||
mitre_attack: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
record_information: Option<&'a str>,
|
||||
rule_path: &'a str,
|
||||
file_path: &'a str,
|
||||
}
|
||||
@@ -37,6 +39,8 @@ pub struct DisplayFormat<'a> {
|
||||
pub level: &'a str,
|
||||
pub rule_title: &'a str,
|
||||
pub details: &'a str,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub record_information: Option<&'a str>,
|
||||
}
|
||||
|
||||
/// level_color.txtファイルを読み込み対応する文字色のマッピングを返却する関数
|
||||
@@ -139,81 +143,44 @@ fn emit_csv<W: std::io::Write>(
|
||||
for (time, detect_infos) in messages.iter() {
|
||||
for detect_info in detect_infos {
|
||||
if displayflag {
|
||||
if color_map.is_some() {
|
||||
let output_color =
|
||||
_get_output_color(color_map.as_ref().unwrap(), &detect_info.level);
|
||||
wtr.serialize(DisplayFormat {
|
||||
timestamp: &format!(
|
||||
"{} ",
|
||||
&format_time(time).truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
level: &format!(
|
||||
" {} ",
|
||||
&detect_info.level.truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
computer: &format!(
|
||||
" {} ",
|
||||
&detect_info.computername.truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
event_i_d: &format!(
|
||||
" {} ",
|
||||
&detect_info.eventid.truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
rule_title: &format!(
|
||||
" {} ",
|
||||
&detect_info.alert.truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
details: &format!(
|
||||
" {}",
|
||||
&detect_info.detail.truecolor(
|
||||
output_color[0],
|
||||
output_color[1],
|
||||
output_color[2]
|
||||
)
|
||||
),
|
||||
})?;
|
||||
} else {
|
||||
wtr.serialize(DisplayFormat {
|
||||
timestamp: &format!("{} ", &format_time(time)),
|
||||
level: &format!(" {} ", &detect_info.level),
|
||||
computer: &format!(" {} ", &detect_info.computername),
|
||||
event_i_d: &format!(" {} ", &detect_info.eventid),
|
||||
rule_title: &format!(" {} ", &detect_info.alert),
|
||||
details: &format!(" {}", &detect_info.detail),
|
||||
})?;
|
||||
}
|
||||
let colors = color_map
|
||||
.as_ref()
|
||||
.map(|cl_mp| _get_output_color(cl_mp, &detect_info.level));
|
||||
let colors = colors.as_ref();
|
||||
|
||||
let recinfo = detect_info
|
||||
.record_information
|
||||
.as_ref()
|
||||
.map(|recinfo| _format_cell(recinfo, ColPos::Last, colors));
|
||||
let details = detect_info
|
||||
.detail
|
||||
.chars()
|
||||
.filter(|&c| !c.is_control())
|
||||
.collect::<String>();
|
||||
|
||||
let dispformat = DisplayFormat {
|
||||
timestamp: &_format_cell(&format_time(time), ColPos::First, colors),
|
||||
level: &_format_cell(&detect_info.level, ColPos::Other, colors),
|
||||
computer: &_format_cell(&detect_info.computername, ColPos::Other, colors),
|
||||
event_i_d: &_format_cell(&detect_info.eventid, ColPos::Other, colors),
|
||||
rule_title: &_format_cell(&detect_info.alert, ColPos::Other, colors),
|
||||
details: &_format_cell(&details, ColPos::Other, colors),
|
||||
record_information: recinfo.as_deref(),
|
||||
};
|
||||
wtr.serialize(dispformat)?;
|
||||
} else {
|
||||
// csv出力時フォーマット
|
||||
wtr.serialize(CsvFormat {
|
||||
timestamp: &format_time(time),
|
||||
file_path: &detect_info.filepath,
|
||||
rule_path: &detect_info.rulepath,
|
||||
level: &detect_info.level,
|
||||
computer: &detect_info.computername,
|
||||
event_i_d: &detect_info.eventid,
|
||||
mitre_attack: &detect_info.tag_info,
|
||||
rule_title: &detect_info.alert,
|
||||
details: &detect_info.detail,
|
||||
mitre_attack: &detect_info.tag_info,
|
||||
record_information: detect_info.record_information.as_deref(),
|
||||
file_path: &detect_info.filepath,
|
||||
rule_path: &detect_info.rulepath,
|
||||
})?;
|
||||
}
|
||||
let level_suffix = *configs::LEVELMAP
|
||||
@@ -245,6 +212,29 @@ fn emit_csv<W: std::io::Write>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
enum ColPos {
|
||||
First, // 先頭
|
||||
Last, // 最後
|
||||
Other, // それ以外
|
||||
}
|
||||
|
||||
fn _format_cellpos(column: ColPos, colval: &str) -> String {
|
||||
return match column {
|
||||
ColPos::First => format!("{} ", colval),
|
||||
ColPos::Last => format!(" {}", colval),
|
||||
ColPos::Other => format!(" {} ", colval),
|
||||
};
|
||||
}
|
||||
|
||||
fn _format_cell(word: &str, column: ColPos, output_color: Option<&Vec<u8>>) -> String {
|
||||
if let Some(color) = output_color {
|
||||
let colval = format!("{}", word.truecolor(color[0], color[1], color[2]));
|
||||
_format_cellpos(column, &colval)
|
||||
} else {
|
||||
_format_cellpos(column, word)
|
||||
}
|
||||
}
|
||||
|
||||
/// 与えられたユニークな検知数と全体の検知数の情報(レベル別と総計)を元に結果文を標準出力に表示する関数
|
||||
fn _print_unique_results(
|
||||
mut counts_by_level: Vec<u128>,
|
||||
@@ -351,6 +341,7 @@ mod tests {
|
||||
let test_eventid = "1111";
|
||||
let output = "pokepoke";
|
||||
let test_attack = "execution/txxxx.yyy";
|
||||
let test_recinfo = "record_infoinfo11";
|
||||
{
|
||||
let mut messages = print::MESSAGES.lock().unwrap();
|
||||
messages.clear();
|
||||
@@ -381,6 +372,7 @@ mod tests {
|
||||
alert: test_title.to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: test_attack.to_string(),
|
||||
record_information: Option::Some(test_recinfo.to_string()),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -389,7 +381,7 @@ mod tests {
|
||||
.unwrap();
|
||||
let expect_tz = expect_time.with_timezone(&Local);
|
||||
let expect =
|
||||
"Timestamp,Computer,EventID,Level,RuleTitle,Details,MitreAttack,RulePath,FilePath\n"
|
||||
"Timestamp,Computer,EventID,Level,MitreAttack,RuleTitle,Details,RecordInformation,RulePath,FilePath\n"
|
||||
.to_string()
|
||||
+ &expect_tz
|
||||
.clone()
|
||||
@@ -402,11 +394,13 @@ mod tests {
|
||||
+ ","
|
||||
+ test_level
|
||||
+ ","
|
||||
+ test_attack
|
||||
+ ","
|
||||
+ test_title
|
||||
+ ","
|
||||
+ output
|
||||
+ ","
|
||||
+ test_attack
|
||||
+ test_recinfo
|
||||
+ ","
|
||||
+ testrulepath
|
||||
+ ","
|
||||
@@ -463,6 +457,7 @@ mod tests {
|
||||
alert: test_title.to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: test_attack.to_string(),
|
||||
record_information: Option::Some(String::default()),
|
||||
},
|
||||
);
|
||||
messages.debug();
|
||||
@@ -471,7 +466,8 @@ mod tests {
|
||||
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
|
||||
.unwrap();
|
||||
let expect_tz = expect_time.with_timezone(&Local);
|
||||
let expect_header = "Timestamp|Computer|EventID|Level|RuleTitle|Details\n";
|
||||
let expect_header =
|
||||
"Timestamp|Computer|EventID|Level|RuleTitle|Details|RecordInformation\n";
|
||||
let expect_colored = expect_header.to_string()
|
||||
+ &get_white_color_string(
|
||||
&expect_tz
|
||||
@@ -489,6 +485,8 @@ mod tests {
|
||||
+ &get_white_color_string(test_title)
|
||||
+ " | "
|
||||
+ &get_white_color_string(output)
|
||||
+ " | "
|
||||
+ &get_white_color_string("")
|
||||
+ "\n";
|
||||
let expect_nocoloed = expect_header.to_string()
|
||||
+ &expect_tz
|
||||
@@ -505,6 +503,8 @@ mod tests {
|
||||
+ test_title
|
||||
+ " | "
|
||||
+ output
|
||||
+ " | "
|
||||
+ ""
|
||||
+ "\n";
|
||||
|
||||
let mut file: Box<dyn io::Write> =
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use crate::detections::pivot::PivotKeyword;
|
||||
use crate::detections::pivot::PIVOT_KEYWORD;
|
||||
use crate::detections::print::AlertMessage;
|
||||
use crate::detections::utils;
|
||||
use chrono::{DateTime, Utc};
|
||||
@@ -70,7 +72,8 @@ fn build_app<'a>() -> ArgMatches<'a> {
|
||||
|
||||
let usages = "-d --directory=[DIRECTORY] 'Directory of multiple .evtx files.'
|
||||
-f --filepath=[FILEPATH] 'File path to one .evtx file.'
|
||||
-r --rules=[RULEFILE/RULEDIRECTORY] 'Rule file or directory. (Default: ./rules)'
|
||||
-F --full-data 'Print all field information.'
|
||||
-r --rules=[RULEDIRECTORY/RULEFILE] 'Rule file or directory (default: ./rules)'
|
||||
-c --color 'Output with color. (Terminal needs to support True Color.)'
|
||||
-C --config=[RULECONFIGDIRECTORY] 'Rule config folder. (Default: ./rules/config)'
|
||||
-o --output=[CSV_TIMELINE] 'Save the timeline in CSV format. (Example: results.csv)'
|
||||
@@ -89,6 +92,7 @@ fn build_app<'a>() -> ArgMatches<'a> {
|
||||
-s --statistics 'Prints statistics of event IDs.'
|
||||
-q --quiet 'Quiet mode. Do not display the launch banner.'
|
||||
-Q --quiet-errors 'Quiet errors mode. Do not save error logs.'
|
||||
-p --pivot-keywords-list 'Create a list of pivot keywords.'
|
||||
--contributors 'Prints the list of contributors.'";
|
||||
App::new(&program)
|
||||
.about("Hayabusa: Aiming to be the world's greatest Windows event log analysis tool!")
|
||||
@@ -276,6 +280,7 @@ impl Default for EventKeyAliasConfig {
|
||||
fn load_eventkey_alias(path: &str) -> EventKeyAliasConfig {
|
||||
let mut config = EventKeyAliasConfig::new();
|
||||
|
||||
// eventkey_aliasが読み込めなかったらエラーで終了とする。
|
||||
let read_result = utils::read_csv(path);
|
||||
if read_result.is_err() {
|
||||
AlertMessage::alert(
|
||||
@@ -285,7 +290,7 @@ fn load_eventkey_alias(path: &str) -> EventKeyAliasConfig {
|
||||
.ok();
|
||||
return config;
|
||||
}
|
||||
// eventkey_aliasが読み込めなかったらエラーで終了とする。
|
||||
|
||||
read_result.unwrap().into_iter().for_each(|line| {
|
||||
if line.len() != 2 {
|
||||
return;
|
||||
@@ -310,6 +315,40 @@ fn load_eventkey_alias(path: &str) -> EventKeyAliasConfig {
|
||||
config
|
||||
}
|
||||
|
||||
///設定ファイルを読み込み、keyとfieldsのマップをPIVOT_KEYWORD大域変数にロードする。
|
||||
pub fn load_pivot_keywords(path: &str) {
|
||||
let read_result = utils::read_txt(path);
|
||||
if read_result.is_err() {
|
||||
AlertMessage::alert(
|
||||
&mut BufWriter::new(std::io::stderr().lock()),
|
||||
read_result.as_ref().unwrap_err(),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
|
||||
read_result.unwrap().into_iter().for_each(|line| {
|
||||
let map: Vec<&str> = line.split('.').collect();
|
||||
if map.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
//存在しなければ、keyを作成
|
||||
PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.entry(map[0].to_string())
|
||||
.or_insert(PivotKeyword::new());
|
||||
|
||||
PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut(&map[0].to_string())
|
||||
.unwrap()
|
||||
.fields
|
||||
.insert(map[1].to_string());
|
||||
});
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EventInfo {
|
||||
pub evttitle: String,
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
extern crate csv;
|
||||
|
||||
use crate::detections::configs;
|
||||
use crate::detections::pivot::insert_pivot_keyword;
|
||||
use crate::detections::print::AlertMessage;
|
||||
use crate::detections::print::DetectInfo;
|
||||
use crate::detections::print::ERROR_LOG_STACK;
|
||||
use crate::detections::print::MESSAGES;
|
||||
use crate::detections::print::PIVOT_KEYWORD_LIST_FLAG;
|
||||
use crate::detections::print::QUIET_ERRORS_FLAG;
|
||||
use crate::detections::print::STATISTICS_FLAG;
|
||||
use crate::detections::print::TAGS_CONFIG;
|
||||
use crate::detections::rule;
|
||||
use crate::detections::rule::AggResult;
|
||||
use crate::detections::rule::RuleNode;
|
||||
@@ -29,6 +32,7 @@ pub struct EvtxRecordInfo {
|
||||
pub record: Value, // 1レコード分のデータをJSON形式にシリアライズしたもの
|
||||
pub data_string: String,
|
||||
pub key_2_value: hashbrown::HashMap<String, String>,
|
||||
pub record_information: Option<String>,
|
||||
}
|
||||
|
||||
impl EvtxRecordInfo {
|
||||
@@ -177,6 +181,12 @@ impl Detection {
|
||||
if !result {
|
||||
continue;
|
||||
}
|
||||
|
||||
if *PIVOT_KEYWORD_LIST_FLAG {
|
||||
insert_pivot_keyword(&record_info.record);
|
||||
continue;
|
||||
}
|
||||
|
||||
// aggregation conditionが存在しない場合はそのまま出力対応を行う
|
||||
if !agg_condition {
|
||||
Detection::insert_message(&rule, record_info);
|
||||
@@ -192,26 +202,32 @@ impl Detection {
|
||||
.as_vec()
|
||||
.unwrap_or(&Vec::default())
|
||||
.iter()
|
||||
.map(|info| info.as_str().unwrap_or("").replace("attack.", ""))
|
||||
.filter_map(|info| TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())))
|
||||
.map(|str| str.to_owned())
|
||||
.collect();
|
||||
|
||||
let recinfo = record_info
|
||||
.record_information
|
||||
.as_ref()
|
||||
.map(|recinfo| recinfo.to_string());
|
||||
let detect_info = DetectInfo {
|
||||
filepath: record_info.evtx_filepath.to_string(),
|
||||
rulepath: rule.rulepath.to_string(),
|
||||
level: rule.yaml["level"].as_str().unwrap_or("-").to_string(),
|
||||
computername: record_info.record["Event"]["System"]["Computer"]
|
||||
.to_string()
|
||||
.replace('\"', ""),
|
||||
eventid: get_serde_number_to_string(&record_info.record["Event"]["System"]["EventID"])
|
||||
.unwrap_or_else(|| "-".to_owned()),
|
||||
alert: rule.yaml["title"].as_str().unwrap_or("").to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: tag_info.join(" | "),
|
||||
record_information: recinfo,
|
||||
};
|
||||
MESSAGES.lock().unwrap().insert(
|
||||
&record_info.record,
|
||||
rule.yaml["details"].as_str().unwrap_or("").to_string(),
|
||||
DetectInfo {
|
||||
filepath: record_info.evtx_filepath.to_string(),
|
||||
rulepath: rule.rulepath.to_string(),
|
||||
level: rule.yaml["level"].as_str().unwrap_or("-").to_string(),
|
||||
computername: record_info.record["Event"]["System"]["Computer"]
|
||||
.to_string()
|
||||
.replace('\"', ""),
|
||||
eventid: get_serde_number_to_string(
|
||||
&record_info.record["Event"]["System"]["EventID"],
|
||||
)
|
||||
.unwrap_or_else(|| "-".to_owned()),
|
||||
alert: rule.yaml["title"].as_str().unwrap_or("").to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: tag_info.join(" : "),
|
||||
},
|
||||
detect_info,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -224,19 +240,27 @@ impl Detection {
|
||||
.map(|info| info.as_str().unwrap_or("").replace("attack.", ""))
|
||||
.collect();
|
||||
let output = Detection::create_count_output(rule, &agg_result);
|
||||
MESSAGES.lock().unwrap().insert_message(
|
||||
DetectInfo {
|
||||
filepath: "-".to_owned(),
|
||||
rulepath: rule.rulepath.to_owned(),
|
||||
level: rule.yaml["level"].as_str().unwrap_or("").to_owned(),
|
||||
computername: "-".to_owned(),
|
||||
eventid: "-".to_owned(),
|
||||
alert: rule.yaml["title"].as_str().unwrap_or("").to_owned(),
|
||||
detail: output,
|
||||
tag_info: tag_info.join(" : "),
|
||||
},
|
||||
agg_result.start_timedate,
|
||||
)
|
||||
let rec_info = if configs::CONFIG.read().unwrap().args.is_present("full-data") {
|
||||
Option::Some(String::default())
|
||||
} else {
|
||||
Option::None
|
||||
};
|
||||
let detect_info = DetectInfo {
|
||||
filepath: "-".to_owned(),
|
||||
rulepath: rule.rulepath.to_owned(),
|
||||
level: rule.yaml["level"].as_str().unwrap_or("").to_owned(),
|
||||
computername: "-".to_owned(),
|
||||
eventid: "-".to_owned(),
|
||||
alert: rule.yaml["title"].as_str().unwrap_or("").to_owned(),
|
||||
detail: output,
|
||||
record_information: rec_info,
|
||||
tag_info: tag_info.join(" : "),
|
||||
};
|
||||
|
||||
MESSAGES
|
||||
.lock()
|
||||
.unwrap()
|
||||
.insert_message(detect_info, agg_result.start_timedate)
|
||||
}
|
||||
|
||||
///aggregation conditionのcount部分の検知出力文の文字列を返す関数
|
||||
@@ -499,4 +523,7 @@ mod tests {
|
||||
expected_output
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_fields_value() {}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod configs;
|
||||
pub mod detection;
|
||||
pub mod pivot;
|
||||
pub mod print;
|
||||
pub mod rule;
|
||||
pub mod utils;
|
||||
|
||||
270
src/detections/pivot.rs
Normal file
270
src/detections/pivot.rs
Normal file
@@ -0,0 +1,270 @@
|
||||
use hashbrown::HashMap;
|
||||
use hashbrown::HashSet;
|
||||
use lazy_static::lazy_static;
|
||||
use serde_json::Value;
|
||||
use std::sync::RwLock;
|
||||
|
||||
use crate::detections::configs;
|
||||
use crate::detections::utils::get_serde_number_to_string;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PivotKeyword {
|
||||
pub keywords: HashSet<String>,
|
||||
pub fields: HashSet<String>,
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
pub static ref PIVOT_KEYWORD: RwLock<HashMap<String, PivotKeyword>> =
|
||||
RwLock::new(HashMap::new());
|
||||
}
|
||||
|
||||
impl Default for PivotKeyword {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl PivotKeyword {
|
||||
pub fn new() -> PivotKeyword {
|
||||
PivotKeyword {
|
||||
keywords: HashSet::new(),
|
||||
fields: HashSet::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///levelがlowより大きいレコードの場合、keywordがrecord内にみつかれば、
|
||||
///それをPIVOT_KEYWORD.keywordsに入れる。
|
||||
pub fn insert_pivot_keyword(event_record: &Value) {
|
||||
//levelがlow異常なら続ける
|
||||
let mut is_exist_event_key = false;
|
||||
let mut tmp_event_record: &Value = event_record;
|
||||
for s in ["Event", "System", "Level"] {
|
||||
if let Some(record) = tmp_event_record.get(s) {
|
||||
is_exist_event_key = true;
|
||||
tmp_event_record = record;
|
||||
}
|
||||
}
|
||||
if is_exist_event_key {
|
||||
let hash_value = get_serde_number_to_string(tmp_event_record);
|
||||
|
||||
if hash_value.is_some() && hash_value.as_ref().unwrap() == "infomational"
|
||||
|| hash_value.as_ref().unwrap() == "undefined"
|
||||
|| hash_value.as_ref().unwrap() == "-"
|
||||
{
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
for (_, pivot) in PIVOT_KEYWORD.write().unwrap().iter_mut() {
|
||||
for field in &pivot.fields {
|
||||
if let Some(array_str) = configs::EVENTKEY_ALIAS.get_event_key(&String::from(field)) {
|
||||
let split: Vec<&str> = array_str.split('.').collect();
|
||||
let mut is_exist_event_key = false;
|
||||
let mut tmp_event_record: &Value = event_record;
|
||||
for s in split {
|
||||
if let Some(record) = tmp_event_record.get(s) {
|
||||
is_exist_event_key = true;
|
||||
tmp_event_record = record;
|
||||
}
|
||||
}
|
||||
if is_exist_event_key {
|
||||
let hash_value = get_serde_number_to_string(tmp_event_record);
|
||||
|
||||
if let Some(value) = hash_value {
|
||||
if value == "-" || value == "127.0.0.1" || value == "::1" {
|
||||
continue;
|
||||
}
|
||||
pivot.keywords.insert(value);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::detections::configs::load_pivot_keywords;
|
||||
use crate::detections::pivot::insert_pivot_keyword;
|
||||
use crate::detections::pivot::PIVOT_KEYWORD;
|
||||
use serde_json;
|
||||
|
||||
//PIVOT_KEYWORDはグローバルなので、他の関数の影響も考慮する必要がある。
|
||||
#[test]
|
||||
fn insert_pivot_keyword_local_ip4() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "high"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "127.0.0.1"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(!PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("127.0.0.1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_ip4() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "high"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "10.0.0.1"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("10.0.0.1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_ip_empty() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "high"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "-"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(!PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("-"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_local_ip6() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "high"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "::1"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(!PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("::1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_level_infomational() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "infomational"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "10.0.0.2"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(!PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("10.0.0.2"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_level_low() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "low"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "10.0.0.1"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("10.0.0.1"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn insert_pivot_keyword_level_none() {
|
||||
load_pivot_keywords("test_files/config/pivot_keywords.txt");
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {
|
||||
"Level": "-"
|
||||
},
|
||||
"EventData": {
|
||||
"IpAddress": "10.0.0.3"
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
insert_pivot_keyword(&serde_json::from_str(record_json_str).unwrap());
|
||||
|
||||
assert!(!PIVOT_KEYWORD
|
||||
.write()
|
||||
.unwrap()
|
||||
.get_mut("Ip Addresses")
|
||||
.unwrap()
|
||||
.keywords
|
||||
.contains("10.0.0.3"));
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,7 @@ pub struct DetectInfo {
|
||||
pub alert: String,
|
||||
pub detail: String,
|
||||
pub tag_info: String,
|
||||
pub record_information: Option<String>,
|
||||
}
|
||||
|
||||
pub struct AlertMessage {}
|
||||
@@ -53,6 +54,13 @@ lazy_static! {
|
||||
.unwrap()
|
||||
.args
|
||||
.is_present("statistics");
|
||||
pub static ref TAGS_CONFIG: HashMap<String, String> =
|
||||
Message::create_tags_config("config/output_tag.txt");
|
||||
pub static ref PIVOT_KEYWORD_LIST_FLAG: bool = configs::CONFIG
|
||||
.read()
|
||||
.unwrap()
|
||||
.args
|
||||
.is_present("pivot-keywords-list");
|
||||
}
|
||||
|
||||
impl Default for Message {
|
||||
@@ -67,6 +75,33 @@ impl Message {
|
||||
Message { map: messages }
|
||||
}
|
||||
|
||||
/// ファイルパスで記載されたtagでのフル名、表示の際に置き換えられる文字列のHashMapを作成する関数。tagではこのHashMapのキーに対応しない出力は出力しないものとする
|
||||
/// ex. attack.impact,Impact
|
||||
pub fn create_tags_config(path: &str) -> HashMap<String, String> {
|
||||
let read_result = utils::read_csv(path);
|
||||
if read_result.is_err() {
|
||||
AlertMessage::alert(
|
||||
&mut BufWriter::new(std::io::stderr().lock()),
|
||||
read_result.as_ref().unwrap_err(),
|
||||
)
|
||||
.ok();
|
||||
return HashMap::default();
|
||||
}
|
||||
let mut ret: HashMap<String, String> = HashMap::new();
|
||||
read_result.unwrap().into_iter().for_each(|line| {
|
||||
if line.len() != 2 {
|
||||
return;
|
||||
}
|
||||
|
||||
let empty = &"".to_string();
|
||||
let tag_full_str = line.get(0).unwrap_or(empty).trim();
|
||||
let tag_replace_str = line.get(1).unwrap_or(empty).trim();
|
||||
|
||||
ret.insert(tag_full_str.to_owned(), tag_replace_str.to_owned());
|
||||
});
|
||||
ret
|
||||
}
|
||||
|
||||
/// メッセージの設定を行う関数。aggcondition対応のためrecordではなく出力をする対象時間がDatetime形式での入力としている
|
||||
pub fn insert_message(&mut self, detect_info: DetectInfo, event_time: DateTime<Utc>) {
|
||||
if let Some(v) = self.map.get_mut(&event_time) {
|
||||
@@ -217,6 +252,7 @@ impl AlertMessage {
|
||||
mod tests {
|
||||
use crate::detections::print::DetectInfo;
|
||||
use crate::detections::print::{AlertMessage, Message};
|
||||
use hashbrown::HashMap;
|
||||
use serde_json::Value;
|
||||
use std::io::BufWriter;
|
||||
|
||||
@@ -250,6 +286,7 @@ mod tests {
|
||||
alert: "test1".to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: "txxx.001".to_string(),
|
||||
record_information: Option::Some("record_information1".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -280,6 +317,7 @@ mod tests {
|
||||
alert: "test2".to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: "txxx.002".to_string(),
|
||||
record_information: Option::Some("record_information2".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -310,6 +348,7 @@ mod tests {
|
||||
alert: "test3".to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: "txxx.003".to_string(),
|
||||
record_information: Option::Some("record_information3".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -335,12 +374,13 @@ mod tests {
|
||||
alert: "test4".to_string(),
|
||||
detail: String::default(),
|
||||
tag_info: "txxx.004".to_string(),
|
||||
record_information: Option::Some("record_information4".to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
let display = format!("{}", format_args!("{:?}", message));
|
||||
println!("display::::{}", display);
|
||||
let expect = "Message { map: {1970-01-01T00:00:00Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule4\", level: \"medium\", computername: \"testcomputer4\", eventid: \"4\", alert: \"test4\", detail: \"CommandLine4: hoge\", tag_info: \"txxx.004\" }], 1996-02-27T01:05:01Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule\", level: \"high\", computername: \"testcomputer1\", eventid: \"1\", alert: \"test1\", detail: \"CommandLine1: hoge\", tag_info: \"txxx.001\" }, DetectInfo { filepath: \"a\", rulepath: \"test_rule2\", level: \"high\", computername: \"testcomputer2\", eventid: \"2\", alert: \"test2\", detail: \"CommandLine2: hoge\", tag_info: \"txxx.002\" }], 2000-01-21T09:06:01Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule3\", level: \"high\", computername: \"testcomputer3\", eventid: \"3\", alert: \"test3\", detail: \"CommandLine3: hoge\", tag_info: \"txxx.003\" }]} }";
|
||||
let expect = "Message { map: {1970-01-01T00:00:00Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule4\", level: \"medium\", computername: \"testcomputer4\", eventid: \"4\", alert: \"test4\", detail: \"CommandLine4: hoge\", tag_info: \"txxx.004\", record_information: Some(\"record_information4\") }], 1996-02-27T01:05:01Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule\", level: \"high\", computername: \"testcomputer1\", eventid: \"1\", alert: \"test1\", detail: \"CommandLine1: hoge\", tag_info: \"txxx.001\", record_information: Some(\"record_information1\") }, DetectInfo { filepath: \"a\", rulepath: \"test_rule2\", level: \"high\", computername: \"testcomputer2\", eventid: \"2\", alert: \"test2\", detail: \"CommandLine2: hoge\", tag_info: \"txxx.002\", record_information: Some(\"record_information2\") }], 2000-01-21T09:06:01Z: [DetectInfo { filepath: \"a\", rulepath: \"test_rule3\", level: \"high\", computername: \"testcomputer3\", eventid: \"3\", alert: \"test3\", detail: \"CommandLine3: hoge\", tag_info: \"txxx.003\", record_information: Some(\"record_information3\") }]} }";
|
||||
assert_eq!(display, expect);
|
||||
}
|
||||
|
||||
@@ -461,4 +501,18 @@ mod tests {
|
||||
expected,
|
||||
);
|
||||
}
|
||||
#[test]
|
||||
/// output_tag.txtの読み込みテスト
|
||||
fn test_load_output_tag() {
|
||||
let actual = Message::create_tags_config("test_files/config/output_tag.txt");
|
||||
let expected: HashMap<String, String> = HashMap::from([
|
||||
("attack.impact".to_string(), "Impact".to_string()),
|
||||
("xxx".to_string(), "yyy".to_string()),
|
||||
]);
|
||||
|
||||
assert_eq!(actual.len(), expected.len());
|
||||
for (k, v) in expected.iter() {
|
||||
assert!(actual.get(k).unwrap_or(&String::default()) == v);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,11 +10,13 @@ use tokio::runtime::Runtime;
|
||||
use chrono::{DateTime, TimeZone, Utc};
|
||||
use regex::Regex;
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::fs::File;
|
||||
use std::io::prelude::*;
|
||||
use std::io::{BufRead, BufReader};
|
||||
use std::str;
|
||||
use std::string::String;
|
||||
use std::vec;
|
||||
|
||||
use super::detection::EvtxRecordInfo;
|
||||
|
||||
@@ -87,7 +89,7 @@ pub fn read_csv(filename: &str) -> Result<Vec<Vec<String>>, String> {
|
||||
return Result::Err(e.to_string());
|
||||
}
|
||||
|
||||
let mut rdr = csv::Reader::from_reader(contents.as_bytes());
|
||||
let mut rdr = csv::ReaderBuilder::new().from_reader(contents.as_bytes());
|
||||
rdr.records().for_each(|r| {
|
||||
if r.is_err() {
|
||||
return;
|
||||
@@ -199,15 +201,6 @@ pub fn create_tokio_runtime() -> Runtime {
|
||||
|
||||
// EvtxRecordInfoを作成します。
|
||||
pub fn create_rec_info(data: Value, path: String, keys: &[String]) -> EvtxRecordInfo {
|
||||
// EvtxRecordInfoを作る
|
||||
let data_str = data.to_string();
|
||||
let mut rec = EvtxRecordInfo {
|
||||
evtx_filepath: path,
|
||||
record: data,
|
||||
data_string: data_str,
|
||||
key_2_value: hashbrown::HashMap::new(),
|
||||
};
|
||||
|
||||
// 高速化のための処理
|
||||
|
||||
// 例えば、Value型から"Event.System.EventID"の値を取得しようとすると、value["Event"]["System"]["EventID"]のように3回アクセスする必要がある。
|
||||
@@ -215,8 +208,9 @@ pub fn create_rec_info(data: Value, path: String, keys: &[String]) -> EvtxRecord
|
||||
// これなら、"Event.System.EventID"というキーを1回指定するだけで値を取得できるようになるので、高速化されるはず。
|
||||
// あと、serde_jsonのValueからvalue["Event"]みたいな感じで値を取得する処理がなんか遅いので、そういう意味でも早くなるかも
|
||||
// それと、serde_jsonでは内部的に標準ライブラリのhashmapを使用しているが、hashbrownを使った方が早くなるらしい。
|
||||
let mut key_2_values = hashbrown::HashMap::new();
|
||||
for key in keys {
|
||||
let val = get_event_value(key, &rec.record);
|
||||
let val = get_event_value(key, &data);
|
||||
if val.is_none() {
|
||||
continue;
|
||||
}
|
||||
@@ -226,10 +220,110 @@ pub fn create_rec_info(data: Value, path: String, keys: &[String]) -> EvtxRecord
|
||||
continue;
|
||||
}
|
||||
|
||||
rec.key_2_value.insert(key.trim().to_string(), val.unwrap());
|
||||
key_2_values.insert(key.to_string(), val.unwrap());
|
||||
}
|
||||
|
||||
rec
|
||||
// EvtxRecordInfoを作る
|
||||
let data_str = data.to_string();
|
||||
let rec_info = if configs::CONFIG.read().unwrap().args.is_present("full-data") {
|
||||
Option::Some(create_recordinfos(&data))
|
||||
} else {
|
||||
Option::None
|
||||
};
|
||||
EvtxRecordInfo {
|
||||
evtx_filepath: path,
|
||||
record: data,
|
||||
data_string: data_str,
|
||||
key_2_value: key_2_values,
|
||||
record_information: rec_info,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* CSVのrecord infoカラムに出力する文字列を作る
|
||||
*/
|
||||
fn create_recordinfos(record: &Value) -> String {
|
||||
let mut output = vec![];
|
||||
_collect_recordinfo(&mut vec![], "", record, &mut output);
|
||||
|
||||
// 同じレコードなら毎回同じ出力になるようにソートしておく
|
||||
output.sort_by(|(left, left_data), (right, right_data)| {
|
||||
let ord = left.cmp(right);
|
||||
if ord == Ordering::Equal {
|
||||
left_data.cmp(right_data)
|
||||
} else {
|
||||
ord
|
||||
}
|
||||
});
|
||||
|
||||
let summary: Vec<String> = output
|
||||
.iter()
|
||||
.map(|(key, value)| {
|
||||
return format!("{}:{}", key, value);
|
||||
})
|
||||
.collect();
|
||||
|
||||
// 標準出力する時はセルがハイプ区切りになるので、パイプ区切りにしない
|
||||
if configs::CONFIG.read().unwrap().args.is_present("output") {
|
||||
summary.join(" | ")
|
||||
} else {
|
||||
summary.join(" ")
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* CSVのfieldsカラムに出力する要素を全て収集する
|
||||
*/
|
||||
fn _collect_recordinfo<'a>(
|
||||
keys: &mut Vec<&'a str>,
|
||||
parent_key: &'a str,
|
||||
value: &'a Value,
|
||||
output: &mut Vec<(String, String)>,
|
||||
) {
|
||||
match value {
|
||||
Value::Array(ary) => {
|
||||
for sub_value in ary {
|
||||
_collect_recordinfo(keys, parent_key, sub_value, output);
|
||||
}
|
||||
}
|
||||
Value::Object(obj) => {
|
||||
// lifetimeの関係でちょっと変な実装になっている
|
||||
if !parent_key.is_empty() {
|
||||
keys.push(parent_key);
|
||||
}
|
||||
for (key, value) in obj {
|
||||
// 属性は出力しない
|
||||
if key.ends_with("_attributes") {
|
||||
continue;
|
||||
}
|
||||
// Event.Systemは出力しない
|
||||
if key.eq("System") && keys.get(0).unwrap_or(&"").eq(&"Event") {
|
||||
continue;
|
||||
}
|
||||
|
||||
_collect_recordinfo(keys, key, value, output);
|
||||
}
|
||||
if !parent_key.is_empty() {
|
||||
keys.pop();
|
||||
}
|
||||
}
|
||||
Value::Null => (),
|
||||
_ => {
|
||||
// 一番子の要素の値しか収集しない
|
||||
let strval = value_to_string(value);
|
||||
if let Some(strval) = strval {
|
||||
let strval = strval.trim().chars().fold(String::default(), |mut acc, c| {
|
||||
if c.is_control() || c.is_ascii_whitespace() {
|
||||
acc.push(' ');
|
||||
} else {
|
||||
acc.push(c);
|
||||
};
|
||||
acc
|
||||
});
|
||||
output.push((parent_key.to_string(), strval));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -238,6 +332,66 @@ mod tests {
|
||||
use regex::Regex;
|
||||
use serde_json::Value;
|
||||
|
||||
#[test]
|
||||
fn test_create_recordinfos() {
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {"EventID": 4103, "Channel": "PowerShell", "Computer":"DESKTOP-ICHIICHI"},
|
||||
"UserData": {"User": "u1", "AccessMask": "%%1369", "Process":"lsass.exe"},
|
||||
"UserData_attributes": {"xmlns": "http://schemas.microsoft.com/win/2004/08/events/event"}
|
||||
},
|
||||
"Event_attributes": {"xmlns": "http://schemas.microsoft.com/win/2004/08/events/event"}
|
||||
}"#;
|
||||
|
||||
match serde_json::from_str(record_json_str) {
|
||||
Ok(record) => {
|
||||
let ret = utils::create_recordinfos(&record);
|
||||
// Systemは除外される/属性(_attributesも除外される)/key順に並ぶ
|
||||
let expected = "AccessMask:%%1369 Process:lsass.exe User:u1".to_string();
|
||||
assert_eq!(ret, expected);
|
||||
}
|
||||
Err(_) => {
|
||||
panic!("Failed to parse json record.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_create_recordinfos2() {
|
||||
// EventDataの特殊ケース
|
||||
let record_json_str = r#"
|
||||
{
|
||||
"Event": {
|
||||
"System": {"EventID": 4103, "Channel": "PowerShell", "Computer":"DESKTOP-ICHIICHI"},
|
||||
"EventData": {
|
||||
"Binary": "hogehoge",
|
||||
"Data":[
|
||||
"Data1",
|
||||
"DataData2",
|
||||
"",
|
||||
"DataDataData3"
|
||||
]
|
||||
},
|
||||
"EventData_attributes": {"xmlns": "http://schemas.microsoft.com/win/2004/08/events/event"}
|
||||
},
|
||||
"Event_attributes": {"xmlns": "http://schemas.microsoft.com/win/2004/08/events/event"}
|
||||
}"#;
|
||||
|
||||
match serde_json::from_str(record_json_str) {
|
||||
Ok(record) => {
|
||||
let ret = utils::create_recordinfos(&record);
|
||||
// Systemは除外される/属性(_attributesも除外される)/key順に並ぶ
|
||||
let expected = "Binary:hogehoge Data: Data:Data1 Data:DataData2 Data:DataDataData3"
|
||||
.to_string();
|
||||
assert_eq!(ret, expected);
|
||||
}
|
||||
Err(_) => {
|
||||
panic!("Failed to parse json record.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_check_regex() {
|
||||
let regexes: Vec<Regex> =
|
||||
|
||||
@@ -59,7 +59,7 @@ impl RuleExclude {
|
||||
ERROR_LOG_STACK
|
||||
.lock()
|
||||
.unwrap()
|
||||
.push(format!("[WARN] {} does not exist", filename));
|
||||
.push(format!("{} does not exist", filename));
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
114
src/main.rs
114
src/main.rs
@@ -9,9 +9,12 @@ use chrono::{DateTime, Datelike, Local, TimeZone};
|
||||
use evtx::{EvtxParser, ParserSettings};
|
||||
use git2::Repository;
|
||||
use hashbrown::{HashMap, HashSet};
|
||||
use hayabusa::detections::configs::load_pivot_keywords;
|
||||
use hayabusa::detections::detection::{self, EvtxRecordInfo};
|
||||
use hayabusa::detections::pivot::PIVOT_KEYWORD;
|
||||
use hayabusa::detections::print::{
|
||||
AlertMessage, ERROR_LOG_PATH, ERROR_LOG_STACK, QUIET_ERRORS_FLAG, STATISTICS_FLAG,
|
||||
AlertMessage, ERROR_LOG_PATH, ERROR_LOG_STACK, PIVOT_KEYWORD_LIST_FLAG, QUIET_ERRORS_FLAG,
|
||||
STATISTICS_FLAG,
|
||||
};
|
||||
use hayabusa::detections::rule::{get_detection_keys, RuleNode};
|
||||
use hayabusa::filter;
|
||||
@@ -24,14 +27,15 @@ use hhmmss::Hhmmss;
|
||||
use pbr::ProgressBar;
|
||||
use serde_json::Value;
|
||||
use std::cmp::Ordering;
|
||||
use std::ffi::OsStr;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fmt::Display;
|
||||
use std::fs::create_dir;
|
||||
use std::io::BufWriter;
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::SystemTime;
|
||||
use std::{
|
||||
env,
|
||||
fs::{self, File},
|
||||
path::PathBuf,
|
||||
vec,
|
||||
@@ -41,7 +45,7 @@ use tokio::spawn;
|
||||
use tokio::task::JoinHandle;
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
use {is_elevated::is_elevated, std::env};
|
||||
use is_elevated::is_elevated;
|
||||
|
||||
// 一度にtimelineやdetectionを実行する行数
|
||||
const MAX_DETECT_RECORDS: usize = 5000;
|
||||
@@ -72,6 +76,10 @@ impl App {
|
||||
}
|
||||
|
||||
fn exec(&mut self) {
|
||||
if *PIVOT_KEYWORD_LIST_FLAG {
|
||||
load_pivot_keywords("config/pivot_keywords.txt");
|
||||
}
|
||||
|
||||
let analysis_start_time: DateTime<Local> = Local::now();
|
||||
|
||||
// Show usage when no arguments.
|
||||
@@ -90,6 +98,17 @@ impl App {
|
||||
&analysis_start_time.day().to_owned()
|
||||
));
|
||||
}
|
||||
|
||||
if !self.is_matched_architecture_and_binary() {
|
||||
AlertMessage::alert(
|
||||
&mut BufWriter::new(std::io::stderr().lock()),
|
||||
"The hayabusa version you ran does not match your PC architecture.\n Please use the correct architecture. (Binary ending in -x64.exe for 64-bit and -x86.exe for 32-bit.)",
|
||||
)
|
||||
.ok();
|
||||
println!();
|
||||
return;
|
||||
}
|
||||
|
||||
if configs::CONFIG
|
||||
.read()
|
||||
.unwrap()
|
||||
@@ -124,6 +143,20 @@ impl App {
|
||||
}
|
||||
|
||||
if let Some(csv_path) = configs::CONFIG.read().unwrap().args.value_of("output") {
|
||||
for (key, _) in PIVOT_KEYWORD.read().unwrap().iter() {
|
||||
let keywords_file_name = csv_path.to_owned() + "-" + key + ".txt";
|
||||
if Path::new(&keywords_file_name).exists() {
|
||||
AlertMessage::alert(
|
||||
&mut BufWriter::new(std::io::stderr().lock()),
|
||||
&format!(
|
||||
" The file {} already exists. Please specify a different filename.",
|
||||
&keywords_file_name
|
||||
),
|
||||
)
|
||||
.ok();
|
||||
return;
|
||||
}
|
||||
}
|
||||
if Path::new(csv_path).exists() {
|
||||
AlertMessage::alert(
|
||||
&mut BufWriter::new(std::io::stderr().lock()),
|
||||
@@ -136,6 +169,7 @@ impl App {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if *STATISTICS_FLAG {
|
||||
println!("Generating Event ID Statistics");
|
||||
println!();
|
||||
@@ -234,6 +268,60 @@ impl App {
|
||||
if ERROR_LOG_STACK.lock().unwrap().len() > 0 {
|
||||
AlertMessage::create_error_log(ERROR_LOG_PATH.to_string());
|
||||
}
|
||||
|
||||
if *PIVOT_KEYWORD_LIST_FLAG {
|
||||
//ファイル出力の場合
|
||||
if let Some(pivot_file) = configs::CONFIG.read().unwrap().args.value_of("output") {
|
||||
for (key, pivot_keyword) in PIVOT_KEYWORD.read().unwrap().iter() {
|
||||
let mut f = BufWriter::new(
|
||||
fs::File::create(pivot_file.to_owned() + "-" + key + ".txt").unwrap(),
|
||||
);
|
||||
let mut output = "".to_string();
|
||||
output += &format!("{}: ", key).to_string();
|
||||
|
||||
output += "( ";
|
||||
for i in pivot_keyword.fields.iter() {
|
||||
output += &format!("%{}% ", i).to_string();
|
||||
}
|
||||
output += "):";
|
||||
output += "\n";
|
||||
|
||||
for i in pivot_keyword.keywords.iter() {
|
||||
output += &format!("{}\n", i).to_string();
|
||||
}
|
||||
|
||||
f.write_all(output.as_bytes()).unwrap();
|
||||
}
|
||||
|
||||
//output to stdout
|
||||
let mut output =
|
||||
"Pivot keyword results saved to the following files:\n".to_string();
|
||||
for (key, _) in PIVOT_KEYWORD.read().unwrap().iter() {
|
||||
output += &(pivot_file.to_owned() + "-" + key + ".txt" + "\n");
|
||||
}
|
||||
println!("{}", output);
|
||||
} else {
|
||||
//標準出力の場合
|
||||
let mut output = "The following pivot keywords were found:\n".to_string();
|
||||
for (key, pivot_keyword) in PIVOT_KEYWORD.read().unwrap().iter() {
|
||||
output += &format!("{}: ", key).to_string();
|
||||
|
||||
output += "( ";
|
||||
for i in pivot_keyword.fields.iter() {
|
||||
output += &format!("%{}% ", i).to_string();
|
||||
}
|
||||
output += "):";
|
||||
output += "\n";
|
||||
|
||||
for i in pivot_keyword.keywords.iter() {
|
||||
output += &format!("{}\n", i).to_string();
|
||||
}
|
||||
|
||||
output += "\n";
|
||||
}
|
||||
print!("{}", output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -368,7 +456,7 @@ impl App {
|
||||
pb.inc();
|
||||
}
|
||||
detection.add_aggcondition_msges(&self.rt);
|
||||
if !*STATISTICS_FLAG {
|
||||
if !*STATISTICS_FLAG && !*PIVOT_KEYWORD_LIST_FLAG {
|
||||
after_fact();
|
||||
}
|
||||
}
|
||||
@@ -750,6 +838,22 @@ impl App {
|
||||
Ok("You currently have the latest rules.".to_string())
|
||||
}
|
||||
}
|
||||
|
||||
/// check architecture
|
||||
fn is_matched_architecture_and_binary(&self) -> bool {
|
||||
if cfg!(target_os = "windows") {
|
||||
let is_processor_arch_32bit = env::var_os("PROCESSOR_ARCHITECTURE")
|
||||
.unwrap_or_default()
|
||||
.eq("x86");
|
||||
// PROCESSOR_ARCHITEW6432は32bit環境には存在しないため、環境変数存在しなかった場合は32bit環境であると判断する
|
||||
let not_wow_flag = env::var_os("PROCESSOR_ARCHITEW6432")
|
||||
.unwrap_or_else(|| OsString::from("x86"))
|
||||
.eq("x86");
|
||||
return (cfg!(target_pointer_width = "64") && !is_processor_arch_32bit)
|
||||
|| (cfg!(target_pointer_width = "32") && is_processor_arch_32bit && not_wow_flag);
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
3
test_files/config/output_tag.txt
Normal file
3
test_files/config/output_tag.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
tag_full_str,tag_output_str
|
||||
attack.impact,Impact
|
||||
xxx,yyy
|
||||
8
test_files/config/pivot_keywords.txt
Normal file
8
test_files/config/pivot_keywords.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
Users.SubjectUserName
|
||||
Users.TargetUserName
|
||||
Users.User
|
||||
Logon IDs.SubjectLogonId
|
||||
Logon IDs.TargetLogonId
|
||||
Workstation Names.WorkstationName
|
||||
Ip Addresses.IpAddress
|
||||
Processes.Image
|
||||
Reference in New Issue
Block a user