Merge pull request #631 from Yamato-Security/165-customize-columns-with-output-profiles

customize columns with output profiles
This commit is contained in:
DustInDark
2022-08-03 19:00:24 +09:00
committed by GitHub
22 changed files with 1107 additions and 525 deletions

View File

@@ -1,5 +1,19 @@
# 変更点
## v1.5.0 [2022/XX/XX]
**新機能:**
- `config/profiles.yaml``config/default_profile.yaml`の設定ファイルで、出力内容をカストマイズできる。 (#165) (@hitenkoku)
**改善:**
- XXX
**バグ修正:**
- VC再頒布パッケージがインストールされていない環境でエラーが発生している状態を修正した。 (#635) (@fukusuket)
## v1.4.3 [2022/08/03]
**バグ修正:**

View File

@@ -1,5 +1,19 @@
# Changes
## v1.5.0 [2022/XX/XX]
**New Features:**
- Customizable output of fields defined at `config/profiles.yaml` and `config/default_profile.yaml` (#165) (@hitenkoku)
**Enhancements:**
- XXX
**Bug Fixes:**
- XXX
## v1.4.3 [2022/08/03]
**Bug Fixes:**

38
Cargo.lock generated
View File

@@ -39,9 +39,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.59"
version = "1.0.60"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c91f1f46651137be86f3a2b9a8359f9ab421d04d941c62b5982e1ca21113adf9"
checksum = "c794e162a5eff65c72ef524dfe393eb923c354e350bb78b9c7383df13f3bc142"
[[package]]
name = "arrayref"
@@ -169,7 +169,7 @@ checksum = "4acbb09d9ee8e23699b9634375c72795d095bf268439da88562cf9b501f181fa"
dependencies = [
"camino",
"cargo-platform",
"semver 1.0.12",
"semver 1.0.13",
"serde",
"serde_json",
]
@@ -528,7 +528,7 @@ dependencies = [
[[package]]
name = "evtx"
version = "0.7.3"
source = "git+https://github.com/Yamato-Security/hayabusa-evtx.git#773f4500a62e277a426167301960361320836255"
source = "git+https://github.com/Yamato-Security/hayabusa-evtx.git#cc2e4ec1c314daea1b0da11c908825140c42af4b"
dependencies = [
"anyhow",
"bitflags",
@@ -705,7 +705,7 @@ dependencies = [
[[package]]
name = "hayabusa"
version = "1.4.2"
version = "1.5.0-dev"
dependencies = [
"base64",
"bytesize",
@@ -1327,9 +1327,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.42"
version = "1.0.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c278e965f1d8cf32d6e0e96de3d3e79712178ae67986d9cf9151f51e95aac89b"
checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
dependencies = [
"unicode-ident",
]
@@ -1369,9 +1369,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.20"
version = "1.0.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
dependencies = [
"proc-macro2",
]
@@ -1515,9 +1515,9 @@ dependencies = [
[[package]]
name = "ryu"
version = "1.0.10"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
[[package]]
name = "same-file"
@@ -1545,9 +1545,9 @@ dependencies = [
[[package]]
name = "semver"
version = "1.0.12"
version = "1.0.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
dependencies = [
"serde",
]
@@ -1733,9 +1733,9 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "syn"
version = "1.0.98"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
dependencies = [
"proc-macro2",
"quote",
@@ -1813,18 +1813,18 @@ checksum = "b1141d4d61095b28419e22cb0bbf02755f5e54e0526f97f1e3d1d160e60885fb"
[[package]]
name = "thiserror"
version = "1.0.31"
version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.31"
version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21"
dependencies = [
"proc-macro2",
"quote",

View File

@@ -1,6 +1,6 @@
[package]
name = "hayabusa"
version = "1.4.3"
version = "1.5.0-dev"
authors = ["Yamato Security @SecurityYamato"]
edition = "2021"

View File

@@ -60,6 +60,7 @@ Hayabusaは、日本の[Yamato Security](https://yamatosecurity.connpass.com/)
- [ログオン情報の要約](#ログオン情報の要約)
- [サンプルevtxファイルでHayabusaをテストする](#サンプルevtxファイルでhayabusaをテストする)
- [Hayabusaの出力](#hayabusaの出力)
- [プロファイルによる出力のカスタマイズ](#プロファイルによる出力のカスタマイズ)
- [Levelの省略](#levelの省略)
- [MITRE ATT&CK戦術の省略](#mitre-attck戦術の省略)
- [Channel情報の省略](#channel情報の省略)
@@ -326,43 +327,43 @@ USAGE:
hayabusa.exe -f file.evtx [OPTIONS] / hayabusa.exe -d evtx-directory [OPTIONS]
OPTIONS:
--European-time ヨーロッパ形式で日付と時刻を出力する (例: 22-02-2022 22:00:00.123 +02:00)
--RFC-2822 RFC 2822形式で日付と時刻を出力する (例: Fri, 22 Feb 2022 22:00:00 -0600)
--RFC-3339 RFC 3339形式で日付と時刻を出力する (例: 2022-02-22 22:00:00.123456-06:00)
--US-military-time 24時間制(ミリタリータイム)のアメリカ形式で日付と時刻を出力する (例: 02-22-2022 22:00:00.123 -06:00)
--US-time アメリカ形式で日付と時刻を出力する (例: 02-22-2022 10:00:00.123 PM -06:00)
--target-file-ext <EVTX_FILE_EXT>... evtx以外の拡張子を解析対象に追加する。 (例1: evtx_data 例evtx1 evtx2)
--all-tags 出力したCSVファイルにルール内のタグ情報を全て出力する
-c, --rules-config <RULE_CONFIG_DIRECTORY> ルールフォルダのコンフィグディレクトリ (デフォルト: ./rules/config)
--contributors コントリビュータの一覧表示
-d, --directory <DIRECTORY> .evtxファイルを持つディレクトリのパス
-D, --deep-scan すべてのイベントIDを対象にしたスキャンを行う
--enable-deprecated-rules Deprecatedルールを有効にする
--end-timeline <END_TIMELINE> 解析対象とするイベントログの終了時刻 (例: "2022-02-22 23:59:59 +09:00")
--exclude-status <EXCLUDE_STATUS>... 読み込み対象外とするルール内でのステータス (ex: experimental) (ex: stable test)
-f, --filepath <FILE_PATH> 1つの.evtxファイルに対して解析を行う
-F, --full-data 全てのフィールド情報を出力する
-h, --help ヘルプ情報を表示する
-l, --live-analysis ローカル端末のC:\Windows\System32\winevt\Logsフォルダを解析する
-L, --logon-summary 成功と失敗したログオン情報の要約を出力する
--level-tuning [<LEVEL_TUNING_FILE>] ルールlevelのチューニング (デフォルト: ./rules/config/level_tuning.txt)
-m, --min-level <LEVEL> 結果出力をするルールの最低レベル (デフォルト: informational)
-n, --enable-noisy-rules Noisyルールを有効にする
--no_color カラー出力を無効にする
-o, --output <CSV_TIMELINE> タイムラインをCSV形式で保存する (例: results.csv)
-p, --pivot-keywords-list ピボットキーワードの一覧作成
-q, --quiet Quietモード: 起動バナーを表示しない
-Q, --quiet-errors Quiet errorsモード: エラーログを保存しない
-r, --rules <RULE_DIRECTORY/RULE_FILE> ルールファイルまたはルールファイルを持つディレクトリ (デフォルト: ./rules)
-R, --hide-record-id イベントレコードIDを表示しない
-s, --statistics イベントIDの統計情報を表示する
--start-timeline <START_TIMELINE> 解析対象とするイベントログの開始時刻 (例: "2020-02-22 00:00:00 +09:00")
-t, --thread-number <NUMBER> スレッド数 (デフォルト: パフォーマンスに最適な数値)
-u, --update-rules rulesフォルダをhayabusa-rulesのgithubリポジトリの最新版に更新する
-U, --UTC UTC形式で日付と時刻を出力する (デフォルト: 現地時間)
-v, --verbose 詳細な情報を出力する
-V, --visualize-timeline イベント頻度タイムラインを出力する
--version バージョン情報を表示する
--European-time ヨーロッパ形式で日付と時刻を出力する (例: 22-02-2022 22:00:00.123 +02:00)
--RFC-2822 RFC 2822形式で日付と時刻を出力する (例: Fri, 22 Feb 2022 22:00:00 -0600)
--RFC-3339 RFC 3339形式で日付と時刻を出力する (例: 2022-02-22 22:00:00.123456-06:00)
--US-military-time 24時間制(ミリタリータイム)のアメリカ形式で日付と時刻を出力する (例: 02-22-2022 22:00:00.123 -06:00)
--US-time アメリカ形式で日付と時刻を出力する (例: 02-22-2022 10:00:00.123 PM -06:00)
--all-tags 出力したCSVファイルにルール内のタグ情報を全て出力する
-c, --rules-config <RULE_CONFIG_DIRECTORY> ルールフォルダのコンフィグディレクトリ (デフォルト: ./rules/config)
--contributors コントリビュータの一覧表示
-d, --directory <DIRECTORY> .evtxファイルを持つディレクトリのパス
-D, --deep-scan すべてのイベントIDを対象にしたスキャンを行う
--enable-deprecated-rules Deprecatedルールを有効にする
--end-timeline <END_TIMELINE> 解析対象とするイベントログの終了時刻 (例: "2022-02-22 23:59:59 +09:00")
--exclude-status <EXCLUDE_STATUS>... 読み込み対象とするルール内でのステータス (ex: experimental) (ex: stable test)
-f, --filepath <FILE_PATH> 1つの.evtxファイルに対して解析を行う
-h, --help ヘルプ情報を表示する
-l, --live-analysis ローカル端末のC:\Windows\System32\winevt\Logsフォルダを解析する
-L, --logon-summary 成功と失敗したログオン情報の要約を出力する
--level-tuning [<LEVEL_TUNING_FILE>] ルールlevelのチューニング (デフォルト: ./rules/config/level_tuning.txt)
-m, --min-level <LEVEL> 結果出力するルールの最低レベル (デフォルト: informational)
-n, --enable-noisy-rules Noisyルールを有効にする
--no_color カラー出力を無効にする
-o, --output <CSV_TIMELINE> タイムラインをCSV形式で保存する (例: results.csv)
-p, --pivot-keywords-list ピボットキーワードの一覧作成
-P, --profile <PROFILE> 利用する出力プロファイル名を指定する
-q, --quiet Quietモード: 起動バナーを表示しない
-Q, --quiet-errors Quiet errorsモード: エラーログを保存しない
-r, --rules <RULE_DIRECTORY/RULE_FILE> ルールファイルまたはルールファイルを持つディレクトリ (デフォルト: ./rules)
-s, --statistics イベントIDの統計情報を表示する
--set-default-profile <SET_DEFAULT_PROFILE> デフォルトの出力コンフィグを設定する
--start-timeline <START_TIMELINE> 解析対象とするイベントログの開始時刻 (例: "2020-02-22 00:00:00 +09:00")
-t, --thread-number <NUMBER> スレッド数 (デフォルト: パフォーマンスに最適な数値)
--target-file-ext <EVTX_FILE_EXT>... evtx以外の拡張子を解析対象に追加する。 (例1: evtx_data 例evtx1 evtx2)
-u, --update-rules rulesフォルダをhayabusa-rulesのgithubリポジトリの最新版に更新する
-U, --UTC UTC形式で日付と時刻を出力する (デフォルト: 現地時間)
-v, --verbose 詳細な情報を出力する
-V, --visualize-timeline イベント頻度タイムラインを出力する
--version バージョン情報を表示する
```
## 使用例
@@ -498,7 +499,7 @@ git clone https://github.com/Yamato-Security/hayabusa-sample-evtx.git
# Hayabusaの出力
Hayabusaの結果を標準出力に表示しているときデフォルトは、以下の情報を表示ます:
Hayabusaの結果を標準出力に表示しているときデフォルトは、以下の情報を表示することができます:
* `Timestamp`: デフォルトでは`YYYY-MM-DD HH:mm:ss.sss +hh:mm`形式になっています。イベントログの`<Event><System><TimeCreated SystemTime>`フィールドから来ています。デフォルトのタイムゾーンはローカルのタイムゾーンになりますが、`--utc` オプションで UTC に変更することができます。
* `Computer`: イベントログの`<Event><System><Computer>`フィールドから来ています。
@@ -506,16 +507,42 @@ Hayabusaの結果を標準出力に表示しているときデフォルト
* `Event ID`: イベントログの`<Event><System><EventID>`フィールドから来ています。
* `Level`: YML検知ルールの`level`フィールドから来ています。(例:`informational`, `low`, `medium`, `high`, `critical`) デフォルトでは、すべてのレベルのアラートとイベントが出力されますが、`-m`オプションで最低のレベルを指定することができます。例えば`-m high`オプションを付けると、`high``critical`アラートしか出力されません。
* `Title`: YML検知ルールの`title`フィールドから来ています。
* `RecordID`: イベントレコードIDです。`<Event><System><EventRecordID>`フィールドから来ています。`-R`もしくは`--hide-record-id`オプションを付けると表示されません。
* `RecordID`: イベントレコードIDです。`<Event><System><EventRecordID>`フィールドから来ています。
* `Details`: YML検知ルールの`details`フィールドから来ていますが、このフィールドはHayabusaルールにしかありません。このフィールドはアラートとイベントに関する追加情報を提供し、ログのフィールドから有用なデータを抽出することができます。イベントキーのマッピングが間違っている場合、もしくはフィールドが存在しない場合で抽出ができなかった箇所は`n/a` (not available)と記載されます。YML検知ルールに`details`フィールドが存在しない時のdetailsのメッセージを`./rules/config/default_details.txt`で設定できます。`default_details.txt`では`Provider Name``EventID``details`の組み合わせで設定することができます。default_details.txt`やYML検知ルールに対応するルールが記載されていない場合はすべてのフィールド情報を出力します。
CSVファイルとして保存する場合、以下の列が追加されます:
* `MitreAttack`: MITRE ATT&CKの戦術。
* `RuleFile`: アラートまたはイベントを生成した検知ルールのファイル名。
* `EvtxFile`: アラートまたはイベントを起こしたevtxファイルへのパス。
* `RecordInformation`: すべてのフィールド情報。
`-F`もしくは`--full-data`オプションを指定した場合、全てのフィールド情報が`RecordInformation`カラムにで出力されます。
## プロファイルによる出力のカスタマイズ
Hayabusaの出力内容はconfig/profiles.txtとconfig/default_profile.txtを変更することでカスタマイズできます。カスタマイズではHayabusaの出力で用いられている内容を以下のエイリアスで呼び出すことができます。
もし、`config/profiles.txt`に書いてるプロファイルを用いたい場合は`-P/--profile`オプションを利用してください。
default_profiles.txtをprofile.txtに書かれているプロファイルで上書きしたい場合は`--set-default-profile`オプションを利用してください。
|エイリアス名|Haysbusaの出力にある情報|
|:---|:---|
|%Timestamp% | `Timestamp` |
|%Computer% | `Computer` |
|%Channel% | `Channel` |
|%Level% | `Level` |
|%EventID% | `EventID` |
|%MitreAttack% | `MitreAttack` |
|%RecordID% | `RecordID` |
|%RuleTitle% | `Title` |
|%Details% | `Details` |
|%RecordInformation% | `RecordInformation` |
|%RuleFile% | `RuleFile` |
|%EvtxFile% | `EvtxFile` |
profiles.txtへの記載例:
```yaml
(profilename):
(column name): '%Timestamp%'
(column name2): '%Computer%'
(column name3): '%Channel%'
```
## Levelの省略

117
README.md
View File

@@ -59,6 +59,7 @@ Hayabusa is a **Windows event log fast forensics timeline generator** and **thre
- [Logon Summary Generator](#logon-summary-generator)
- [Testing Hayabusa on Sample Evtx Files](#testing-hayabusa-on-sample-evtx-files)
- [Hayabusa Output](#hayabusa-output)
- [Output customize by profile](#output-customize-by-profile)
- [Level Abbrevations](#level-abbrevations)
- [MITRE ATT&CK Tactics Abbreviations](#mitre-attck-tactics-abbreviations)
- [Channel Abbreviations](#channel-abbreviations)
@@ -322,43 +323,43 @@ USAGE:
hayabusa.exe -f file.evtx [OPTIONS] / hayabusa.exe -d evtx-directory [OPTIONS]
OPTIONS:
--European-time Output timestamp in European time format (ex: 22-02-2022 22:00:00.123 +02:00)
--RFC-2822 Output timestamp in RFC 2822 format (ex: Fri, 22 Feb 2022 22:00:00 -0600)
--RFC-3339 Output timestamp in RFC 3339 format (ex: 2022-02-22 22:00:00.123456-06:00)
--US-military-time Output timestamp in US military time format (ex: 02-22-2022 22:00:00.123 -06:00)
--US-time Output timestamp in US time format (ex: 02-22-2022 10:00:00.123 PM -06:00)
--target-file-ext <EVTX_FILE_EXT>... Specify additional target file extensions (ex: evtx_data) (ex: evtx1 evtx2)
--all-tags Output all tags when saving to a CSV file
-c, --rules-config <RULE_CONFIG_DIRECTORY> Specify custom rule config folder (default: ./rules/config)
--contributors Print the list of contributors
-d, --directory <DIRECTORY> Directory of multiple .evtx files
-D, --deep-scan Disable event ID filter to scan all events
--enable-deprecated-rules Enable rules marked as deprecated
--end-timeline <END_TIMELINE> End time of the event logs to load (ex: "2022-02-22 23:59:59 +09:00")
--exclude-status <EXCLUDE_STATUS>... Ignore rules according to status (ex: experimental) (ex: stable test)
-f, --filepath <FILE_PATH> File path to one .evtx file
-F, --full-data Print all field information
-h, --help Print help information
-l, --live-analysis Analyze the local C:\Windows\System32\winevt\Logs folder
-L, --logon-summary Print a summary of successful and failed logons
--level-tuning [<LEVEL_TUNING_FILE>] Tune alert levels (default: ./rules/config/level_tuning.txt)
-m, --min-level <LEVEL> Minimum level for rules (default: informational)
-n, --enable-noisy-rules Enable rules marked as noisy
--no-color Disable color output
-o, --output <CSV_TIMELINE> Save the timeline in CSV format (ex: results.csv)
-p, --pivot-keywords-list Create a list of pivot keywords
-q, --quiet Quiet mode: do not display the launch banner
-Q, --quiet-errors Quiet errors mode: do not save error logs
-r, --rules <RULE_DIRECTORY/RULE_FILE> Specify a rule directory or file (default: ./rules)
-R, --hide-record-ID Do not display EventRecordID numbers
-s, --statistics Print statistics of event IDs
--start-timeline <START_TIMELINE> Start time of the event logs to load (ex: "2020-02-22 00:00:00 +09:00")
-t, --thread-number <NUMBER> Thread number (default: optimal number for performance)
-u, --update-rules Update to the latest rules in the hayabusa-rules github repository
-U, --UTC Output time in UTC format (default: local time)
-v, --verbose Output verbose information
-V, --visualize-timeline Output event frequency timeline
--version Print version information
--European-time Output timestamp in European time format (ex: 22-02-2022 22:00:00.123 +02:00)
--RFC-2822 Output timestamp in RFC 2822 format (ex: Fri, 22 Feb 2022 22:00:00 -0600)
--RFC-3339 Output timestamp in RFC 3339 format (ex: 2022-02-22 22:00:00.123456-06:00)
--US-military-time Output timestamp in US military time format (ex: 02-22-2022 22:00:00.123 -06:00)
--US-time Output timestamp in US time format (ex: 02-22-2022 10:00:00.123 PM -06:00)
--all-tags Output all tags when saving to a CSV file
-c, --rules-config <RULE_CONFIG_DIRECTORY> Specify custom rule config folder (default: ./rules/config)
--contributors Print the list of contributors
-d, --directory <DIRECTORY> Directory of multiple .evtx files
-D, --deep-scan Disable event ID filter to scan all events
--enable-deprecated-rules Enable rules marked as deprecated
--end-timeline <END_TIMELINE> End time of the event logs to load (ex: "2022-02-22 23:59:59 +09:00")
--exclude-status <EXCLUDE_STATUS>... Ignore rules according to status (ex: experimental) (ex: stable test)
-f, --filepath <FILE_PATH> File path to one .evtx file
-h, --help Print help information
-l, --live-analysis Analyze the local C:\Windows\System32\winevt\Logs folder
-L, --logon-summary Print a summary of successful and failed logons
--level-tuning [<LEVEL_TUNING_FILE>] Tune alert levels (default: ./rules/config/level_tuning.txt)
-m, --min-level <LEVEL> Minimum level for rules (default: informational)
-n, --enable-noisy-rules Enable rules marked as noisy
--no-color Disable color output
-o, --output <CSV_TIMELINE> Save the timeline in CSV format (ex: results.csv)
-p, --pivot-keywords-list Create a list of pivot keywords
-P, --profile <PROFILE> Specify output profile
-q, --quiet Quiet mode: do not display the launch banner
-Q, --quiet-errors Quiet errors mode: do not save error logs
-r, --rules <RULE_DIRECTORY/RULE_FILE> Specify a rule directory or file (default: ./rules)
-s, --statistics Print statistics of event IDs
--set-default-profile <SET_DEFAULT_PROFILE> Set default output profile
--start-timeline <START_TIMELINE> Start time of the event logs to load (ex: "2020-02-22 00:00:00 +09:00")
-t, --thread-number <NUMBER> Thread number (default: optimal number for performance)
--target-file-ext <EVTX_FILE_EXT>... Specify additional target file extensions (ex: evtx_data) (ex: evtx1 evtx2)
-u, --update-rules Update to the latest rules in the hayabusa-rules github repository
-U, --UTC Output time in UTC format (default: local time)
-v, --verbose Output verbose information
-V, --visualize-timeline Output event frequency timeline
--version Print version information
```
## Usage Examples
@@ -495,24 +496,52 @@ git clone https://github.com/Yamato-Security/hayabusa-sample-evtx.git
# Hayabusa Output
When hayabusa output is being displayed to the screen (the default), it will display the following information:
When hayabusa output is being displayed to the screen (the default), it can display the following information:
* `Timestamp`: Default is `YYYY-MM-DD HH:mm:ss.sss +hh:mm` format. This comes from the `<Event><System><TimeCreated SystemTime>` field in the event log. The default timezone will be the local timezone but you can change the timezone to UTC with the `--utc` option.
* `Computer`: This comes from the `<Event><System><Computer>` field in the event log.
* `Channel`: The name of log. This comes from the `<Event><System><Channel>` field in the event log.
* `Event ID`: This comes from the `<Event><System><EventID>` field in the event log.
* `Level`: This comes from the `level` field in the YML detection rule. (`informational`, `low`, `medium`, `high`, `critical`) By default, all level alerts will be displayed but you can set the minimum level with `-m`. For example, you can set `-m high`) in order to only scan for and display high and critical alerts.
* `RecordID`: This comes from the `<Event><System><EventRecordID>` field in the event log. You can hidde this output with the `-R` or `--hide-record-id` option.
* `RecordID`: This comes from the `<Event><System><EventRecordID>` field in the event log.
* `Title`: This comes from the `title` field in the YML detection rule.
* `Details`: This comes from the `details` field in the YML detection rule, however, only hayabusa rules have this field. This field gives extra information about the alert or event and can extract useful data from the fields in event logs. For example, usernames, command line information, process information, etc... When a placeholder points to a field that does not exist or there is an incorrect alias mapping, it will be outputted as `n/a` (not available). If the `details` field is not specified (i.e. sigma rules), default `details` messages to extract fields defined in `./rules/config/default_details.txt` will be outputted. You can add more default `details` messages by adding the `Provider Name`, `EventID` and `details` message you want to output in `default_details.txt`. When no `details` field is defined in a rule nor in `default_details.txt`, all fields will be outputted to the `details` column.
The following additional columns will be added to the output when saving to a CSV file:
* `MitreAttack`: MITRE ATT&CK tactics.
* `RuleFile`: The filename of the detection rule that generated the alert or event.
* `EvtxFile`: The path to the evtx file that caused the alert or event.
* `EvtxFile`: The evtx filename that caused the alert or event.
* `RecordInformation`: All field information.
## Output customize by profile
You can customize the output by modifiy `config/profiles.txt` and `config/default_profile.txt`.
You can be setting following alias.
If you want to use profile in `config/profiles.txt` , you use `-P/--profile` option.
Please use `--set-default-profile` option when you want to overwrite `default_profiles.txt` by profile in `config/profiles.txt`.
|alias name| Hayabusa output information|
|:---|:---|
|%Timestamp% | `Timestamp` |
|%Computer% | `Computer` |
|%Channel% | `Channel` |
|%Level% | `Level` |
|%EventID% | `EventID` |
|%MitreAttack% | `MitreAttack` |
|%RecordID% | `RecordID` |
|%RuleTitle% | `Title` |
|%Details% | `Details` |
|%RecordInformation% | `RecordInformation` |
|%RuleFile% | `RuleFile` |
|%EvtxFile% | `EvtxFile` |
e.g. profile customize
```yaml
(profilename):
(column name): '%Timestamp%'
(column name2): '%Computer%'
(column name3): '%Channel%'
```
If you add the `-F` or `--full-data` option, a `RecordInformation` column with all field information will also be added.
## Level Abbrevations

View File

@@ -0,0 +1,13 @@
---
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
Level: "%Level%"
EventID: "%EventID%"
MitreAttack: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
RecordInformation: "%RecordInformation%"
RuleFile: "%RuleFile%"
EvtxFile: "%EvtxFile%"

View File

@@ -1,44 +0,0 @@
minimal:
Timestamp: %Timestamp%
Computer: %Computer%
Channel: %Channel%
EventID: %EventID%
Level: %Level%
RuleTitle: %RuleTitle%
Details: %Details%
default:
Timestamp: %Timestamp%
Computer: %Computer%
Channel: %Channel%
EventID: %EventID%
Level: %Level%
Tags: %MitreAttack%
RecordID: %RecordID%
RuleTitle: %RuleTitle%
Details: %Details%
verbose-1:
Timestamp: %Timestamp%
Computer: %Computer%
Channel: %Channel%
EventID: %EventID%
Level: %Level%
Tags: %MitreAttack%
RecordID: %RecordID%
RuleTitle: %RuleTitle%
Details: %Details%
RulePath: %RulePath%
FilePath: %FilePath%
verbose-2:
Timestamp: %Timestamp%
Computer: %Computer%
Channel: %Channel%
EventID: %EventID%
Level: %Level%
Tags: %MitreAttack%
RecordID: %RecordID%
RuleTitle: %RuleTitle%
Details: %Details%
AllFieldInfo: %RecordInformation%

44
config/profiles.yaml Normal file
View File

@@ -0,0 +1,44 @@
minimal:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
standard:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
verbose-1:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
RuleFile: "%RuleFile%"
EvtxFile: "%EvtxFile%"
verbose-2:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
AllFieldInfo: "%RecordInformation%"

View File

@@ -1,66 +1,32 @@
use crate::detections::configs;
use crate::detections::configs::{CURRENT_EXE_PATH, TERM_SIZE};
use crate::detections::message::{self};
use crate::detections::message::{AlertMessage, IS_HIDE_RECORD_ID};
use crate::detections::utils;
use crate::detections::message::{self, LEVEL_ABBR};
use crate::detections::message::{AlertMessage, LEVEL_FULL};
use crate::detections::utils::{self, format_time};
use crate::detections::utils::{get_writable_color, write_color_buffer};
use crate::options::profile::PROFILES;
use bytesize::ByteSize;
use chrono::{DateTime, Local, TimeZone, Utc};
use csv::QuoteStyle;
use hashbrown::HashMap;
use hashbrown::HashSet;
use itertools::Itertools;
use krapslog::{build_sparkline, build_time_markers};
use lazy_static::lazy_static;
use serde::Serialize;
use linked_hash_map::LinkedHashMap;
use hashbrown::{HashMap, HashSet};
use std::cmp::min;
use std::error::Error;
use std::fmt::Debug;
use std::fs;
use std::fs::File;
use std::io;
use std::io::BufWriter;
use std::io::Write;
use std::path::Path;
use std::fs;
use std::process;
use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor};
use terminal_size::Width;
#[derive(Debug, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct CsvFormat<'a> {
timestamp: &'a str,
computer: &'a str,
channel: &'a str,
event_i_d: &'a str,
level: &'a str,
mitre_attack: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
record_i_d: Option<&'a str>,
rule_title: &'a str,
details: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
record_information: Option<&'a str>,
rule_file: &'a str,
evtx_file: &'a str,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "PascalCase")]
pub struct DisplayFormat<'a> {
timestamp: &'a str,
pub computer: &'a str,
pub channel: &'a str,
pub event_i_d: &'a str,
pub level: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
record_i_d: Option<&'a str>,
pub rule_title: &'a str,
pub details: &'a str,
#[serde(skip_serializing_if = "Option::is_none")]
pub record_information: Option<&'a str>,
}
lazy_static! {
pub static ref OUTPUT_COLOR: HashMap<String, Color> = set_output_color();
}
@@ -218,21 +184,7 @@ fn emit_csv<W: std::io::Write>(
let mut detect_counts_by_computer_and_level: HashMap<String, HashMap<String, i128>> =
HashMap::new();
let levels = Vec::from([
"critical",
"high",
"medium",
"low",
"informational",
"undefined",
]);
let level_abbr: HashMap<String, String> = HashMap::from([
(String::from("cruitical"), String::from("crit")),
(String::from("high"), String::from("high")),
(String::from("medium"), String::from("med ")),
(String::from("low"), String::from("low ")),
(String::from("informational"), String::from("info")),
]);
let levels = Vec::from(["crit", "high", "med ", "low ", "info", "undefined"]);
// レベル別、日ごとの集計用変数の初期化
for level_init in levels {
detect_counts_by_date_and_level.insert(level_init.to_string(), HashMap::new());
@@ -250,85 +202,46 @@ fn emit_csv<W: std::io::Write>(
timestamps.push(_get_timestamp(time));
for detect_info in detect_infos {
detected_record_idset.insert(format!("{}_{}", time, detect_info.eventid));
let level = detect_info.level.to_string();
let time_str = format_time(time, false);
if displayflag {
let record_id = detect_info
.record_id
.as_ref()
.map(|recinfo| _format_cellpos(recinfo, ColPos::Other));
let recinfo = detect_info
.record_information
.as_ref()
.map(|recinfo| _format_cellpos(recinfo, ColPos::Last));
let ctr_char_exclude_details = detect_info
.detail
.chars()
.filter(|&c| !c.is_control())
.collect::<String>();
let details = if ctr_char_exclude_details.is_empty() {
"-".to_string()
} else {
ctr_char_exclude_details
};
let dispformat: _ = DisplayFormat {
timestamp: &_format_cellpos(&time_str, ColPos::First),
level: &_format_cellpos(
level_abbr.get(&level).unwrap_or(&level),
ColPos::Other,
),
computer: &_format_cellpos(&detect_info.computername, ColPos::Other),
event_i_d: &_format_cellpos(&detect_info.eventid, ColPos::Other),
channel: &_format_cellpos(&detect_info.channel, ColPos::Other),
rule_title: &_format_cellpos(&detect_info.alert, ColPos::Other),
details: &_format_cellpos(&details, ColPos::Other),
record_information: recinfo.as_deref(),
record_i_d: record_id.as_deref(),
};
//ヘッダーのみを出力
if plus_header {
write_color_buffer(
&disp_wtr,
get_writable_color(None),
&_get_serialized_disp_output(None),
true,
&_get_serialized_disp_output(PROFILES.as_ref().unwrap(), true),
false,
)
.ok();
plus_header = false;
}
write_color_buffer(
&disp_wtr,
get_writable_color(_get_output_color(&color_map, &detect_info.level)),
&_get_serialized_disp_output(Some(dispformat)),
get_writable_color(_get_output_color(
&color_map,
LEVEL_ABBR
.get(&detect_info.level)
.unwrap_or(&String::default()),
)),
&_get_serialized_disp_output(&detect_info.ext_field, false),
false,
)
.ok();
} else {
// csv output format
wtr.serialize(CsvFormat {
timestamp: &time_str,
level: level_abbr.get(&level).unwrap_or(&level).trim(),
computer: &detect_info.computername,
event_i_d: &detect_info.eventid,
channel: &detect_info.channel,
mitre_attack: &detect_info.tag_info,
rule_title: &detect_info.alert,
details: &detect_info.detail,
record_information: detect_info.record_information.as_deref(),
evtx_file: &detect_info.filepath,
rule_file: Path::new(&detect_info.rulepath)
.file_name()
.unwrap()
.to_str()
.unwrap(),
record_i_d: detect_info.record_id.as_deref(),
})?;
if plus_header {
wtr.write_record(detect_info.ext_field.keys().map(|x| x.trim()))?;
plus_header = false;
}
wtr.write_record(detect_info.ext_field.values().map(|x| x.trim()))?;
}
let level_suffix = *configs::LEVELMAP
.get(&detect_info.level.to_uppercase())
.get(
&LEVEL_FULL
.get(&detect_info.level)
.unwrap_or(&"undefined".to_string())
.to_uppercase(),
)
.unwrap_or(&0) as usize;
let time_str_date = format_time(time, true);
let mut detect_counts_by_date = detect_counts_by_date_and_level
@@ -372,7 +285,7 @@ fn emit_csv<W: std::io::Write>(
wtr.flush()?;
}
let output_path = configs::CONFIG.read().unwrap().args.output.clone();
let output_path = &configs::CONFIG.read().unwrap().args.output;
if let Some(path) = output_path {
if let Ok(metadata) = fs::metadata(path) {
println!(
@@ -469,24 +382,23 @@ enum ColPos {
Other,
}
fn _get_serialized_disp_output(dispformat: Option<DisplayFormat>) -> String {
if dispformat.is_none() {
let mut titles = vec![
"Timestamp",
"Computer",
"Channel",
"EventID",
"Level",
"RuleTitle",
"Details",
];
if !*IS_HIDE_RECORD_ID {
titles.insert(5, "RecordID");
fn _get_serialized_disp_output(data: &LinkedHashMap<String, String>, header: bool) -> String {
let data_length = &data.len();
let mut ret: Vec<String> = vec![];
if header {
for k in data.keys() {
ret.push(k.to_owned());
}
if configs::CONFIG.read().unwrap().args.full_data {
titles.push("RecordInformation");
} else {
for (i, (_, v)) in data.iter().enumerate() {
if i == 0 {
ret.push(_format_cellpos(v, ColPos::First))
} else if i == data_length - 1 {
ret.push(_format_cellpos(v, ColPos::Last))
} else {
ret.push(_format_cellpos(v, ColPos::Other))
}
}
return titles.join("|");
}
let mut disp_serializer = csv::WriterBuilder::new()
.double_quote(false)
@@ -495,8 +407,7 @@ fn _get_serialized_disp_output(dispformat: Option<DisplayFormat>) -> String {
.has_headers(false)
.from_writer(vec![]);
disp_serializer.serialize(dispformat.unwrap()).ok();
disp_serializer.write_record(ret).ok();
String::from_utf8(disp_serializer.into_inner().unwrap_or_default()).unwrap_or_default()
}
@@ -516,15 +427,6 @@ fn _print_unique_results(
tail_word: String,
color_map: &HashMap<String, Color>,
) {
let levels = Vec::from([
"critical",
"high",
"medium",
"low",
"informational",
"undefined",
]);
// the order in which are registered and the order of levels to be displayed are reversed
counts_by_level.reverse();
@@ -542,7 +444,7 @@ fn _print_unique_results(
)
.ok();
for (i, level_name) in levels.iter().enumerate() {
for (i, level_name) in LEVEL_ABBR.keys().enumerate() {
if "undefined" == *level_name {
continue;
}
@@ -569,30 +471,32 @@ fn _print_detection_summary_by_date(
let mut wtr = buf_wtr.buffer();
wtr.set_color(ColorSpec::new().set_fg(None)).ok();
let output_levels = Vec::from(["critical", "high", "medium", "low", "informational"]);
for level in output_levels {
for level in LEVEL_ABBR.values() {
// output_levelsはlevelsからundefinedを除外した配列であり、各要素は必ず初期化されているのでSomeであることが保証されているのでunwrapをそのまま実施
let detections_by_day = detect_counts_by_date.get(level).unwrap();
let mut max_detect_str = String::default();
let mut tmp_cnt: u128 = 0;
let mut date_str = String::default();
let mut exist_max_data = false;
for (date, cnt) in detections_by_day {
if cnt > &tmp_cnt {
date_str = date.clone();
exist_max_data = true;
max_detect_str = format!("{} ({})", date, cnt);
tmp_cnt = *cnt;
}
}
wtr.set_color(ColorSpec::new().set_fg(_get_output_color(color_map, level)))
.ok();
if date_str == String::default() {
wtr.set_color(ColorSpec::new().set_fg(_get_output_color(
color_map,
LEVEL_FULL.get(level.as_str()).unwrap(),
)))
.ok();
if !exist_max_data {
max_detect_str = "n/a".to_string();
}
writeln!(
wtr,
"Date with most total {} detections: {}",
level, &max_detect_str
LEVEL_FULL.get(level.as_str()).unwrap(),
&max_detect_str
)
.ok();
}
@@ -608,9 +512,7 @@ fn _print_detection_summary_by_computer(
let mut wtr = buf_wtr.buffer();
wtr.set_color(ColorSpec::new().set_fg(None)).ok();
let output_levels = Vec::from(["critical", "high", "medium", "low", "informational"]);
for level in output_levels {
for level in LEVEL_ABBR.values() {
// output_levelsはlevelsからundefinedを除外した配列であり、各要素は必ず初期化されているのでSomeであることが保証されているのでunwrapをそのまま実施
let detections_by_computer = detect_counts_by_computer.get(level).unwrap();
let mut result_vec: Vec<String> = Vec::new();
@@ -631,26 +533,22 @@ fn _print_detection_summary_by_computer(
result_vec.join(", ")
};
wtr.set_color(ColorSpec::new().set_fg(_get_output_color(color_map, level)))
.ok();
wtr.set_color(ColorSpec::new().set_fg(_get_output_color(
color_map,
LEVEL_FULL.get(level.as_str()).unwrap(),
)))
.ok();
writeln!(
wtr,
"Top 5 computers with most unique {} detections: {}",
level, &result_str
LEVEL_FULL.get(level.as_str()).unwrap(),
&result_str
)
.ok();
}
buf_wtr.print(&wtr).ok();
}
fn format_time(time: &DateTime<Utc>, date_only: bool) -> String {
if configs::CONFIG.read().unwrap().args.utc {
format_rfc(time, date_only)
} else {
format_rfc(&time.with_timezone(&Local), date_only)
}
}
/// get timestamp to input datetime.
fn _get_timestamp(time: &DateTime<Utc>) -> i64 {
if configs::CONFIG.read().unwrap().args.utc {
@@ -661,71 +559,23 @@ fn _get_timestamp(time: &DateTime<Utc>) -> i64 {
}
}
/// return rfc time format string by option
fn format_rfc<Tz: TimeZone>(time: &DateTime<Tz>, date_only: bool) -> String
where
Tz::Offset: std::fmt::Display,
{
let time_args = &configs::CONFIG.read().unwrap().args;
if time_args.rfc_2822 {
if date_only {
time.format("%a, %e %b %Y").to_string()
} else {
time.format("%a, %e %b %Y %H:%M:%S %:z").to_string()
}
} else if time_args.rfc_3339 {
if date_only {
time.format("%Y-%m-%d").to_string()
} else {
time.format("%Y-%m-%d %H:%M:%S%.6f%:z").to_string()
}
} else if time_args.us_time {
if date_only {
time.format("%m-%d-%Y").to_string()
} else {
time.format("%m-%d-%Y %I:%M:%S%.3f %p %:z").to_string()
}
} else if time_args.us_military_time {
if date_only {
time.format("%m-%d-%Y").to_string()
} else {
time.format("%m-%d-%Y %H:%M:%S%.3f %:z").to_string()
}
} else if time_args.european_time {
if date_only {
time.format("%d-%m-%Y").to_string()
} else {
time.format("%d-%m-%Y %H:%M:%S%.3f %:z").to_string()
}
} else if date_only {
time.format("%Y-%m-%d").to_string()
} else {
time.format("%Y-%m-%d %H:%M:%S%.3f %:z").to_string()
}
}
#[cfg(test)]
mod tests {
use crate::afterfact::DisplayFormat;
use crate::afterfact::_get_serialized_disp_output;
use crate::afterfact::emit_csv;
use crate::afterfact::format_time;
use crate::detections::message;
use crate::detections::message::DetectInfo;
use crate::options::profile::load_profile;
use chrono::{Local, TimeZone, Utc};
use hashbrown::HashMap;
use linked_hash_map::LinkedHashMap;
use serde_json::Value;
use std::fs::File;
use std::fs::{read_to_string, remove_file};
use std::io;
#[test]
fn test_emit_csv() {
//テストの並列処理によって読み込みの順序が担保できずstatic変数の内容が担保が取れない為、このテストはシーケンシャルで行う
test_emit_csv_output();
test_emit_csv_output();
}
fn test_emit_csv_output() {
let mock_ch_filter = message::create_output_filter_config(
"rules/config/channel_abbreviations.txt",
@@ -743,6 +593,15 @@ mod tests {
let test_attack = "execution/txxxx.yyy";
let test_recinfo = "record_infoinfo11";
let test_record_id = "11111";
let expect_time = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_tz = expect_time.with_timezone(&Local);
let output_profile: LinkedHashMap<String, String> = load_profile(
"test_files/config/default_profile.yaml",
"test_files/config/profiles.yaml",
)
.unwrap();
{
let messages = &message::MESSAGES;
messages.clear();
@@ -761,33 +620,45 @@ mod tests {
}
"##;
let event: Value = serde_json::from_str(val).unwrap();
let mut profile_converter: HashMap<String, String> = HashMap::from([
("%Timestamp%".to_owned(), format_time(&expect_time, false)),
("%Computer%".to_owned(), test_computername.to_string()),
(
"%Channel%".to_owned(),
mock_ch_filter
.get("Security")
.unwrap_or(&String::default())
.to_string(),
),
("%Level%".to_owned(), test_level.to_string()),
("%EventID%".to_owned(), test_eventid.to_string()),
("%MitreAttack%".to_owned(), test_attack.to_string()),
("%RecordID%".to_owned(), test_record_id.to_string()),
("%RuleTitle%".to_owned(), test_title.to_owned()),
("%RecordInformation%".to_owned(), test_recinfo.to_owned()),
("%RuleFile%".to_owned(), test_rulepath.to_string()),
("%EvtxFile%".to_owned(), test_filepath.to_string()),
("%Tags%".to_owned(), test_attack.to_string()),
]);
message::insert(
&event,
output.to_string(),
DetectInfo {
filepath: test_filepath.to_string(),
rulepath: test_rulepath.to_string(),
level: test_level.to_string(),
computername: test_computername.to_string(),
eventid: test_eventid.to_string(),
channel: mock_ch_filter
.get("Security")
.unwrap_or(&String::default())
.to_string(),
alert: test_title.to_string(),
detail: String::default(),
tag_info: test_attack.to_string(),
record_information: Option::Some(test_recinfo.to_string()),
record_id: Option::Some(test_record_id.to_string()),
ext_field: output_profile,
},
expect_time,
&mut profile_converter,
false,
);
}
let expect_time = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_tz = expect_time.with_timezone(&Local);
let expect =
"Timestamp,Computer,Channel,EventID,Level,MitreAttack,RecordID,RuleTitle,Details,RecordInformation,RuleFile,EvtxFile\n"
"Timestamp,Computer,Channel,Level,EventID,MitreAttack,RecordID,RuleTitle,Details,RecordInformation,RuleFile,EvtxFile,Tags\n"
.to_string()
+ &expect_tz
.clone()
@@ -798,10 +669,10 @@ mod tests {
+ ","
+ test_channel
+ ","
+ test_eventid
+ ","
+ test_level
+ ","
+ test_eventid
+ ","
+ test_attack
+ ","
+ test_record_id
@@ -815,9 +686,11 @@ mod tests {
+ test_rulepath
+ ","
+ test_filepath
+ ","
+ test_attack
+ "\n";
let mut file: Box<dyn io::Write> = Box::new(File::create("./test_emit_csv.csv").unwrap());
assert!(emit_csv(&mut file, false, HashMap::default(), 1).is_ok());
assert!(emit_csv(&mut file, false, HashMap::new(), 1).is_ok());
match read_to_string("./test_emit_csv.csv") {
Err(_) => panic!("Failed to open file."),
Ok(s) => {
@@ -825,10 +698,10 @@ mod tests {
}
};
assert!(remove_file("./test_emit_csv.csv").is_ok());
check_emit_csv_display();
}
fn check_emit_csv_display() {
#[test]
fn test_emit_csv_display() {
let test_title = "test_title2";
let test_level = "medium";
let test_computername = "testcomputer2";
@@ -841,44 +714,42 @@ mod tests {
let test_timestamp = Utc
.datetime_from_str("1996-02-27T01:05:01Z", "%Y-%m-%dT%H:%M:%SZ")
.unwrap();
let expect_header = "Timestamp|Computer|Channel|EventID|Level|RecordID|RuleTitle|Details";
let expect_header = "Timestamp|Computer|Channel|EventID|Level|RecordID|RuleTitle|Details|RecordInformation\n";
let expect_tz = test_timestamp.with_timezone(&Local);
let expect_no_header = expect_tz
.clone()
.format("%Y-%m-%d %H:%M:%S%.3f %:z")
.to_string()
+ "|"
+ " | "
+ test_computername
+ "|"
+ " | "
+ test_channel
+ "|"
+ " | "
+ test_eventid
+ "|"
+ " | "
+ test_level
+ "|"
+ " | "
+ test_recid
+ "|"
+ " | "
+ test_title
+ "|"
+ " | "
+ output
+ "|"
+ " | "
+ test_recinfo
+ "\n";
assert_eq!(_get_serialized_disp_output(None), expect_header);
assert_eq!(
_get_serialized_disp_output(Some(DisplayFormat {
timestamp: &format_time(&test_timestamp, false),
level: test_level,
computer: test_computername,
event_i_d: test_eventid,
channel: test_channel,
rule_title: test_title,
details: output,
record_information: Some(test_recinfo),
record_i_d: Some(test_recid),
})),
expect_no_header
);
let mut data: LinkedHashMap<String, String> = LinkedHashMap::new();
data.insert("Timestamp".to_owned(), format_time(&test_timestamp, false));
data.insert("Computer".to_owned(), test_computername.to_owned());
data.insert("Channel".to_owned(), test_channel.to_owned());
data.insert("EventID".to_owned(), test_eventid.to_owned());
data.insert("Level".to_owned(), test_level.to_owned());
data.insert("RecordID".to_owned(), test_recid.to_owned());
data.insert("RuleTitle".to_owned(), test_title.to_owned());
data.insert("Details".to_owned(), output.to_owned());
data.insert("RecordInformation".to_owned(), test_recinfo.to_owned());
assert_eq!(_get_serialized_disp_output(&data, true), expect_header);
assert_eq!(_get_serialized_disp_output(&data, false), expect_no_header);
}
}

View File

@@ -4,8 +4,7 @@ use crate::detections::pivot::PIVOT_KEYWORD;
use crate::detections::utils;
use chrono::{DateTime, Utc};
use clap::{App, CommandFactory, Parser};
use hashbrown::HashMap;
use hashbrown::HashSet;
use hashbrown::{HashMap, HashSet};
use lazy_static::lazy_static;
use regex::Regex;
use std::env::current_exe;
@@ -70,10 +69,6 @@ pub struct Config {
#[clap(short = 'f', long, value_name = "FILE_PATH")]
pub filepath: Option<PathBuf>,
/// Print all field information
#[clap(short = 'F', long = "full-data")]
pub full_data: bool,
/// Specify a rule directory or file (default: ./rules)
#[clap(
short = 'r',
@@ -102,10 +97,6 @@ pub struct Config {
#[clap(long = "all-tags")]
pub all_tags: bool,
/// Do not display EventRecordID numbers
#[clap(short = 'R', long = "hide-record-id")]
pub hide_record_id: bool,
/// Output verbose information
#[clap(short = 'v', long)]
pub verbose: bool,
@@ -223,6 +214,14 @@ pub struct Config {
/// Ignore rules according to status (ex: experimental) (ex: stable test)
#[clap(long = "exclude-status", multiple_values = true)]
pub exclude_status: Option<Vec<String>>,
/// Specify output profile
#[clap(short = 'P', long = "profile")]
pub profile: Option<String>,
/// Set default output profile
#[clap(long = "set-default-profile")]
pub set_default_profile: Option<String>,
}
impl ConfigReader<'_> {
@@ -473,7 +472,7 @@ pub fn load_pivot_keywords(path: &str) {
.write()
.unwrap()
.entry(map[0].to_string())
.or_insert(PivotKeyword::new());
.or_insert_with(PivotKeyword::new);
PIVOT_KEYWORD
.write()

View File

@@ -1,13 +1,17 @@
extern crate csv;
use crate::detections::configs;
use crate::detections::utils::write_color_buffer;
use crate::detections::utils::{format_time, write_color_buffer};
use crate::options::profile::{
LOAEDED_PROFILE_ALIAS, PRELOAD_PROFILE, PRELOAD_PROFILE_REGEX, PROFILES,
};
use chrono::{TimeZone, Utc};
use termcolor::{BufferWriter, Color, ColorChoice};
use crate::detections::message::AlertMessage;
use crate::detections::message::DetectInfo;
use crate::detections::message::ERROR_LOG_STACK;
use crate::detections::message::{CH_CONFIG, DEFAULT_DETAILS, IS_HIDE_RECORD_ID, TAGS_CONFIG};
use crate::detections::message::{CH_CONFIG, DEFAULT_DETAILS, TAGS_CONFIG};
use crate::detections::message::{
LOGONSUMMARY_FLAG, PIVOT_KEYWORD_LIST_FLAG, QUIET_ERRORS_FLAG, STATISTICS_FLAG,
};
@@ -18,7 +22,6 @@ use crate::detections::rule::RuleNode;
use crate::detections::utils::{get_serde_number_to_string, make_ascii_titlecase};
use crate::filter;
use crate::yaml::ParseYaml;
use hashbrown;
use hashbrown::HashMap;
use serde_json::Value;
use std::fmt::Write;
@@ -28,6 +31,7 @@ use std::sync::Arc;
use tokio::{runtime::Runtime, spawn, task::JoinHandle};
use super::message;
use super::message::LEVEL_ABBR;
// イベントファイルの1レコード分の情報を保持する構造体
#[derive(Clone, Debug)]
@@ -35,7 +39,7 @@ pub struct EvtxRecordInfo {
pub evtx_filepath: String, // イベントファイルのファイルパス ログで出力するときに使う
pub record: Value, // 1レコード分のデータをJSON形式にシリアライズしたもの
pub data_string: String,
pub key_2_value: hashbrown::HashMap<String, String>,
pub key_2_value: HashMap<String, String>,
pub record_information: Option<String>,
}
@@ -202,34 +206,14 @@ impl Detection {
rule
}
/// 条件に合致したレコードを表示するための関数
/// 条件に合致したレコードを格納するための関数
fn insert_message(rule: &RuleNode, record_info: &EvtxRecordInfo) {
let tag_info: Vec<String> = match TAGS_CONFIG.is_empty() {
false => rule.yaml["tags"]
.as_vec()
.unwrap_or(&Vec::default())
.iter()
.filter_map(|info| TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())))
.map(|str| str.to_owned())
.collect(),
true => rule.yaml["tags"]
.as_vec()
.unwrap_or(&Vec::default())
.iter()
.map(
|info| match TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())) {
Some(s) => s.to_owned(),
_ => info.as_str().unwrap_or("").replace("attack.", ""),
},
)
.collect(),
};
let tag_info: Vec<String> = Detection::get_tag_info(rule);
let recinfo = record_info
.record_information
.as_ref()
.map(|recinfo| recinfo.to_string());
let rec_id = if !*IS_HIDE_RECORD_ID {
let rec_id = if LOAEDED_PROFILE_ALIAS.contains("%RecordID%") {
Some(
get_serde_number_to_string(&record_info.record["Event"]["System"]["EventRecordID"])
.unwrap_or_default(),
@@ -249,25 +233,107 @@ impl Detection {
Some(str) => str.to_owned(),
None => recinfo.as_ref().unwrap_or(&"-".to_string()).to_string(),
};
let opt_record_info = if configs::CONFIG.read().unwrap().args.full_data {
let opt_record_info = if LOAEDED_PROFILE_ALIAS.contains("%RecordInformation%") {
recinfo
} else {
None
};
let default_time = Utc.ymd(1970, 1, 1).and_hms(0, 0, 0);
let time = message::get_event_time(&record_info.record).unwrap_or(default_time);
let level = rule.yaml["level"].as_str().unwrap_or("-").to_string();
let mut profile_converter: HashMap<String, String> = HashMap::new();
for (_k, v) in PROFILES.as_ref().unwrap().iter() {
let tmp = v.as_str();
for target_profile in PRELOAD_PROFILE_REGEX.matches(tmp).into_iter() {
match PRELOAD_PROFILE[target_profile] {
"%Timestamp%" => {
profile_converter
.insert("%Timestamp%".to_string(), format_time(&time, false));
}
"%Computer%" => {
profile_converter.insert(
"%Computer%".to_string(),
record_info.record["Event"]["System"]["Computer"]
.to_string()
.replace('\"', ""),
);
}
"%Channel%" => {
profile_converter.insert(
"%Channel%".to_string(),
CH_CONFIG.get(ch_str).unwrap_or(ch_str).to_string(),
);
}
"%Level%" => {
profile_converter.insert(
"%Level%".to_string(),
LEVEL_ABBR.get(&level).unwrap_or(&level).to_string(),
);
}
"%EventID%" => {
profile_converter.insert("%EventID%".to_string(), eid.to_owned());
}
"%MitreAttack%" => {
profile_converter.insert("%MitreAttack%".to_string(), tag_info.join(" : "));
}
"%RecordID%" => {
profile_converter.insert(
"%RecordID%".to_string(),
rec_id.as_ref().unwrap_or(&"".to_string()).to_owned(),
);
}
"%RuleTitle%" => {
profile_converter.insert(
"%RuleTitle%".to_string(),
rule.yaml["title"].as_str().unwrap_or("").to_string(),
);
}
"%RecordInformation%" => {
profile_converter.insert(
"%RecordInformation%".to_string(),
opt_record_info
.as_ref()
.unwrap_or(&"-".to_string())
.to_owned(),
);
}
"%RuleFile%" => {
profile_converter.insert(
"%RuleFile%".to_string(),
Path::new(&rule.rulepath)
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_string(),
);
}
"%EvtxFile%" => {
profile_converter.insert(
"%EvtxFile%".to_string(),
Path::new(&record_info.evtx_filepath)
.to_str()
.unwrap_or_default()
.to_string(),
);
}
_ => {}
}
}
}
let detect_info = DetectInfo {
filepath: record_info.evtx_filepath.to_string(),
rulepath: (&rule.rulepath).to_owned(),
level: rule.yaml["level"].as_str().unwrap_or("-").to_string(),
level: LEVEL_ABBR.get(&level).unwrap_or(&level).to_string(),
computername: record_info.record["Event"]["System"]["Computer"]
.to_string()
.replace('\"', ""),
eventid: eid,
channel: CH_CONFIG.get(ch_str).unwrap_or(ch_str).to_string(),
alert: rule.yaml["title"].as_str().unwrap_or("").to_string(),
detail: String::default(),
tag_info: tag_info.join(" | "),
record_information: opt_record_info,
record_id: rec_id,
ext_field: PROFILES.as_ref().unwrap().to_owned(),
};
message::insert(
&record_info.record,
@@ -276,45 +342,126 @@ impl Detection {
.unwrap_or(&default_output)
.to_string(),
detect_info,
time,
&mut profile_converter,
false,
);
}
/// insert aggregation condition detection message to output stack
fn insert_agg_message(rule: &RuleNode, agg_result: AggResult) {
let tag_info: Vec<String> = rule.yaml["tags"]
.as_vec()
.unwrap_or(&Vec::default())
.iter()
.filter_map(|info| TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())))
.map(|str| str.to_owned())
.collect();
let tag_info: Vec<String> = Detection::get_tag_info(rule);
let output = Detection::create_count_output(rule, &agg_result);
let rec_info = if configs::CONFIG.read().unwrap().args.full_data {
let rec_info = if LOAEDED_PROFILE_ALIAS.contains("%RecordInformation%") {
Option::Some(String::default())
} else {
Option::None
};
let rec_id = if !*IS_HIDE_RECORD_ID {
Some(String::default())
} else {
None
};
let mut profile_converter: HashMap<String, String> = HashMap::new();
let level = rule.yaml["level"].as_str().unwrap_or("-").to_string();
for (_k, v) in PROFILES.as_ref().unwrap().iter() {
let tmp = v.as_str();
for target_profile in PRELOAD_PROFILE_REGEX.matches(tmp).into_iter() {
match PRELOAD_PROFILE[target_profile] {
"%Timestamp%" => {
profile_converter.insert(
"%Timestamp%".to_string(),
format_time(&agg_result.start_timedate, false),
);
}
"%Computer%" => {
profile_converter.insert("%Computer%".to_string(), "-".to_owned());
}
"%Channel%" => {
profile_converter.insert("%Channel%".to_string(), "-".to_owned());
}
"%Level%" => {
profile_converter.insert(
"%Level%".to_string(),
LEVEL_ABBR.get(&level).unwrap_or(&level).to_string(),
);
}
"%EventID%" => {
profile_converter.insert("%EventID%".to_string(), "-".to_owned());
}
"%MitreAttack%" => {
profile_converter.insert("%MitreAttack%".to_owned(), tag_info.join(" : "));
}
"%RecordID%" => {
profile_converter.insert("%RecordID%".to_string(), "".to_owned());
}
"%RuleTitle%" => {
profile_converter.insert(
"%RuleTitle%".to_string(),
rule.yaml["title"].as_str().unwrap_or("").to_string(),
);
}
"%RecordInformation%" => {
profile_converter.insert("%RecordInformation%".to_string(), "-".to_owned());
}
"%RuleFile%" => {
profile_converter.insert(
"%RuleFile%".to_string(),
Path::new(&rule.rulepath)
.file_name()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_string(),
);
}
"%EvtxFile%" => {
profile_converter.insert("%EvtxFile%".to_string(), "-".to_owned());
}
_ => {}
}
}
}
let detect_info = DetectInfo {
filepath: "-".to_owned(),
rulepath: (&rule.rulepath).to_owned(),
level: rule.yaml["level"].as_str().unwrap_or("").to_owned(),
level: LEVEL_ABBR.get(&level).unwrap_or(&level).to_string(),
computername: "-".to_owned(),
eventid: "-".to_owned(),
channel: "-".to_owned(),
alert: rule.yaml["title"].as_str().unwrap_or("").to_owned(),
detail: output,
record_information: rec_info,
tag_info: tag_info.join(" : "),
record_id: rec_id,
ext_field: PROFILES.as_ref().unwrap().to_owned(),
};
message::insert_message(detect_info, agg_result.start_timedate)
message::insert(
&Value::default(),
rule.yaml["details"].as_str().unwrap_or("-").to_string(),
detect_info,
agg_result.start_timedate,
&mut profile_converter,
true,
)
}
/// rule内のtagsの内容を配列として返却する関数
fn get_tag_info(rule: &RuleNode) -> Vec<String> {
match TAGS_CONFIG.is_empty() {
false => rule.yaml["tags"]
.as_vec()
.unwrap_or(&Vec::default())
.iter()
.filter_map(|info| TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())))
.map(|str| str.to_owned())
.collect(),
true => rule.yaml["tags"]
.as_vec()
.unwrap_or(&Vec::default())
.iter()
.map(
|info| match TAGS_CONFIG.get(info.as_str().unwrap_or(&String::default())) {
Some(s) => s.to_owned(),
_ => info.as_str().unwrap_or("").replace("attack.", ""),
},
)
.collect(),
}
}
///aggregation conditionのcount部分の検知出力文の文字列を返す関数

View File

@@ -4,10 +4,12 @@ use crate::detections::configs::CURRENT_EXE_PATH;
use crate::detections::utils;
use crate::detections::utils::get_serde_number_to_string;
use crate::detections::utils::write_color_buffer;
use chrono::{DateTime, Local, TimeZone, Utc};
use crate::options::profile::PROFILES;
use chrono::{DateTime, Local, Utc};
use dashmap::DashMap;
use hashbrown::HashMap;
use lazy_static::lazy_static;
use linked_hash_map::LinkedHashMap;
use regex::Regex;
use serde_json::Value;
use std::env;
@@ -21,17 +23,13 @@ use termcolor::{BufferWriter, ColorChoice};
#[derive(Debug, Clone)]
pub struct DetectInfo {
pub filepath: String,
pub rulepath: String,
pub level: String,
pub computername: String,
pub eventid: String,
pub channel: String,
pub alert: String,
pub detail: String,
pub tag_info: String,
pub record_information: Option<String>,
pub record_id: Option<String>,
pub ext_field: LinkedHashMap<String, String>,
}
pub struct AlertMessage {}
@@ -68,7 +66,6 @@ lazy_static! {
);
pub static ref PIVOT_KEYWORD_LIST_FLAG: bool =
configs::CONFIG.read().unwrap().args.pivot_keywords_list;
pub static ref IS_HIDE_RECORD_ID: bool = configs::CONFIG.read().unwrap().args.hide_record_id;
pub static ref DEFAULT_DETAILS: HashMap<String, String> = get_default_details(&format!(
"{}/default_details.txt",
configs::CONFIG
@@ -79,6 +76,20 @@ lazy_static! {
.as_path()
.display()
));
pub static ref LEVEL_ABBR: LinkedHashMap<String, String> = LinkedHashMap::from_iter([
("critical".to_string(), "crit".to_string()),
("high".to_string(), "high".to_string()),
("medium".to_string(), "med ".to_string()),
("low".to_string(), "low ".to_string()),
("informational".to_string(), "info".to_string()),
]);
pub static ref LEVEL_FULL: HashMap<String, String> = HashMap::from([
("crit".to_string(), "critical".to_string()),
("high".to_string(), "high".to_string()),
("med ".to_string(), "medium".to_string()),
("low ".to_string(), "low".to_string()),
("info".to_string(), "informational".to_string())
]);
}
/// ファイルパスで記載されたtagでのフル名、表示の際に置き換えられる文字列のHashMapを作成する関数。
@@ -102,9 +113,8 @@ pub fn create_output_filter_config(
return;
}
let empty = &"".to_string();
let tag_full_str = line.get(0).unwrap_or(empty).trim();
let tag_replace_str = line.get(1).unwrap_or(empty).trim();
let tag_full_str = line[0].trim();
let tag_replace_str = line[1].trim();
ret.insert(tag_full_str.to_owned(), tag_replace_str.to_owned());
});
@@ -123,15 +133,67 @@ pub fn insert_message(detect_info: DetectInfo, event_time: DateTime<Utc>) {
}
/// メッセージを設定
pub fn insert(event_record: &Value, output: String, mut detect_info: DetectInfo) {
detect_info.detail = parse_message(event_record, output);
let default_time = Utc.ymd(1970, 1, 1).and_hms(0, 0, 0);
let time = get_event_time(event_record).unwrap_or(default_time);
pub fn insert(
event_record: &Value,
output: String,
mut detect_info: DetectInfo,
time: DateTime<Utc>,
profile_converter: &mut HashMap<String, String>,
is_agg: bool,
) {
if !is_agg {
let parsed_detail = parse_message(event_record, &output)
.chars()
.filter(|&c| !c.is_control())
.collect::<String>();
detect_info.detail = if parsed_detail.is_empty() {
"-".to_string()
} else {
parsed_detail
};
}
let mut exist_detail = false;
PROFILES.as_ref().unwrap().iter().for_each(|(_k, v)| {
if v.contains("%Details%") {
exist_detail = true;
}
});
if exist_detail {
profile_converter.insert("%Details%".to_string(), detect_info.detail.to_owned());
}
let mut tmp_converted_info: LinkedHashMap<String, String> = LinkedHashMap::new();
for (k, v) in &detect_info.ext_field {
let converted_reserve_info = convert_profile_reserved_info(v, profile_converter);
if v.contains("%RecordInformation%") || v.contains("%Details%") {
tmp_converted_info.insert(k.to_owned(), converted_reserve_info);
} else {
tmp_converted_info.insert(
k.to_owned(),
parse_message(event_record, &converted_reserve_info),
);
}
}
for (k, v) in tmp_converted_info {
detect_info.ext_field.insert(k, v);
}
insert_message(detect_info, time)
}
fn parse_message(event_record: &Value, output: String) -> String {
let mut return_message: String = output;
/// profileで用いられる予約語の情報を変換する関数
fn convert_profile_reserved_info(
output: &String,
config_reserved_info: &HashMap<String, String>,
) -> String {
let mut ret = output.to_owned();
config_reserved_info.iter().for_each(|(k, v)| {
ret = ret.replace(k, v);
});
ret
}
/// メッセージ内の%で囲まれた箇所をエイリアスとしてをレコード情報を参照して置き換える関数
fn parse_message(event_record: &Value, output: &String) -> String {
let mut return_message = output.to_owned();
let mut hash_map: HashMap<String, String> = HashMap::new();
for caps in ALIASREGEX.captures_iter(&return_message) {
let full_target_str = &caps[0];
@@ -146,7 +208,7 @@ fn parse_message(event_record: &Value, output: String) -> String {
{
_array_str.to_string()
} else {
"Event.EventData.".to_owned() + &target_str
format!("Event.EventData.{}", target_str)
};
let split: Vec<&str> = array_str.split('.').collect();
@@ -184,7 +246,6 @@ fn parse_message(event_record: &Value, output: String) -> String {
for (k, v) in &hash_map {
return_message = return_message.replace(k, v);
}
return_message
}
@@ -196,20 +257,6 @@ pub fn get(time: DateTime<Utc>) -> Vec<DetectInfo> {
}
}
/// 最後に表示を行う
pub fn print() {
let mut detect_count = 0;
for multi in MESSAGES.iter() {
let (key, detect_infos) = multi.pair();
for detect_info in detect_infos.iter() {
println!("{} <{}> {}", key, detect_info.alert, detect_info.detail);
}
detect_count += detect_infos.len();
}
println!();
println!("Total events:{:?}", detect_count);
}
pub fn get_event_time(event_record: &Value) -> Option<DateTime<Utc>> {
let system_time = &event_record["Event"]["System"]["TimeCreated_attributes"]["SystemTime"];
return utils::str_time_to_datetime(system_time.as_str().unwrap_or(""));
@@ -365,7 +412,7 @@ mod tests {
assert_eq!(
parse_message(
&event_record,
"commandline:%CommandLine% computername:%ComputerName%".to_owned()
&"commandline:%CommandLine% computername:%ComputerName%".to_owned()
),
expected,
);
@@ -386,7 +433,7 @@ mod tests {
let event_record: Value = serde_json::from_str(json_str).unwrap();
let expected = "alias:no_alias";
assert_eq!(
parse_message(&event_record, "alias:%NoAlias%".to_owned()),
parse_message(&event_record, &"alias:%NoAlias%".to_owned()),
expected,
);
}
@@ -412,7 +459,7 @@ mod tests {
let event_record: Value = serde_json::from_str(json_str).unwrap();
let expected = "NoExistAlias:n/a";
assert_eq!(
parse_message(&event_record, "NoExistAlias:%NoAliasNoHit%".to_owned()),
parse_message(&event_record, &"NoExistAlias:%NoAliasNoHit%".to_owned()),
expected,
);
}
@@ -439,7 +486,7 @@ mod tests {
assert_eq!(
parse_message(
&event_record,
"commandline:%CommandLine% computername:%ComputerName%".to_owned()
&"commandline:%CommandLine% computername:%ComputerName%".to_owned()
),
expected,
);
@@ -472,7 +519,7 @@ mod tests {
assert_eq!(
parse_message(
&event_record,
"commandline:%CommandLine% data:%Data%".to_owned()
&"commandline:%CommandLine% data:%Data%".to_owned()
),
expected,
);
@@ -505,7 +552,7 @@ mod tests {
assert_eq!(
parse_message(
&event_record,
"commandline:%CommandLine% data:%Data[2]%".to_owned()
&"commandline:%CommandLine% data:%Data[2]%".to_owned()
),
expected,
);
@@ -538,7 +585,7 @@ mod tests {
assert_eq!(
parse_message(
&event_record,
"commandline:%CommandLine% data:%Data[0]%".to_owned()
&"commandline:%CommandLine% data:%Data[0]%".to_owned()
),
expected,
);

View File

@@ -1,5 +1,4 @@
use hashbrown::HashMap;
use hashbrown::HashSet;
use hashbrown::{HashMap, HashSet};
use lazy_static::lazy_static;
use serde_json::Value;
use std::sync::RwLock;

View File

@@ -4,9 +4,11 @@ extern crate regex;
use crate::detections::configs;
use crate::detections::configs::CURRENT_EXE_PATH;
use hashbrown::HashMap;
use std::path::Path;
use std::path::PathBuf;
use chrono::Local;
use termcolor::Color;
use tokio::runtime::Builder;
@@ -218,8 +220,8 @@ pub fn create_rec_info(data: Value, path: String, keys: &[String]) -> EvtxRecord
// この処理を高速化するため、rec.key_2_valueというhashmapに"Event.System.EventID"というキーで値を設定しておく。
// これなら、"Event.System.EventID"というキーを1回指定するだけで値を取得できるようになるので、高速化されるはず。
// あと、serde_jsonのValueからvalue["Event"]みたいな感じで値を取得する処理がなんか遅いので、そういう意味でも早くなるかも
// それと、serde_jsonでは内部的に標準ライブラリのhashmapを使用しているが、hashbrownを使った方が早くなるらしい。
let mut key_2_values = hashbrown::HashMap::new();
// それと、serde_jsonでは内部的に標準ライブラリのhashmapを使用しているが、hashbrownを使った方が早くなるらしい。標準ライブラリがhashbrownを採用したためserde_jsonについても高速化した。
let mut key_2_values = HashMap::new();
for key in keys {
let val = get_event_value(key, &data);
if val.is_none() {
@@ -386,6 +388,58 @@ pub fn check_setting_path(base_path: &Path, path: &str) -> PathBuf {
}
}
///タイムゾーンに合わせた情報を情報を取得する関数
pub fn format_time(time: &DateTime<Utc>, date_only: bool) -> String {
if configs::CONFIG.read().unwrap().args.utc {
format_rfc(time, date_only)
} else {
format_rfc(&time.with_timezone(&Local), date_only)
}
}
/// return rfc time format string by option
fn format_rfc<Tz: TimeZone>(time: &DateTime<Tz>, date_only: bool) -> String
where
Tz::Offset: std::fmt::Display,
{
let time_args = &configs::CONFIG.read().unwrap().args;
if time_args.rfc_2822 {
if date_only {
time.format("%a, %e %b %Y").to_string()
} else {
time.format("%a, %e %b %Y %H:%M:%S %:z").to_string()
}
} else if time_args.rfc_3339 {
if date_only {
time.format("%Y-%m-%d").to_string()
} else {
time.format("%Y-%m-%d %H:%M:%S%.6f%:z").to_string()
}
} else if time_args.us_time {
if date_only {
time.format("%m-%d-%Y").to_string()
} else {
time.format("%m-%d-%Y %I:%M:%S%.3f %p %:z").to_string()
}
} else if time_args.us_military_time {
if date_only {
time.format("%m-%d-%Y").to_string()
} else {
time.format("%m-%d-%Y %H:%M:%S%.3f %:z").to_string()
}
} else if time_args.european_time {
if date_only {
time.format("%d-%m-%Y").to_string()
} else {
time.format("%d-%m-%Y %H:%M:%S%.3f %:z").to_string()
}
} else if date_only {
time.format("%Y-%m-%d").to_string()
} else {
time.format("%Y-%m-%d %H:%M:%S%.3f %:z").to_string()
}
}
#[cfg(test)]
mod tests {
use std::path::Path;

View File

@@ -18,6 +18,7 @@ use hayabusa::detections::pivot::PivotKeyword;
use hayabusa::detections::pivot::PIVOT_KEYWORD;
use hayabusa::detections::rule::{get_detection_keys, RuleNode};
use hayabusa::omikuji::Omikuji;
use hayabusa::options::profile::PROFILES;
use hayabusa::options::{level_tuning::LevelTuning, update_rules::UpdateRules};
use hayabusa::{afterfact::after_fact, detections::utils};
use hayabusa::{detections::configs, timeline::timelines::Timeline};
@@ -84,7 +85,9 @@ impl App {
.unwrap(),
);
}
if PROFILES.is_none() {
return;
}
let analysis_start_time: DateTime<Local> = Local::now();
// Show usage when no arguments.
if std::env::args().len() == 1 {

View File

@@ -2,7 +2,7 @@ use crate::detections::utils::write_color_buffer;
use crate::detections::{configs, utils};
use crate::filter::RuleExclude;
use crate::yaml::ParseYaml;
use std::collections::HashMap;
use hashbrown::HashMap;
use std::fs::{self, File};
use std::io::Write;
use termcolor::{BufferWriter, ColorChoice};

View File

@@ -1,2 +1,3 @@
pub mod level_tuning;
pub mod profile;
pub mod update_rules;

307
src/options/profile.rs Normal file
View File

@@ -0,0 +1,307 @@
use crate::detections::configs::{self, CURRENT_EXE_PATH};
use crate::detections::message::AlertMessage;
use crate::detections::utils::check_setting_path;
use crate::yaml;
use hashbrown::HashSet;
use lazy_static::lazy_static;
use linked_hash_map::LinkedHashMap;
use regex::RegexSet;
use std::fs::OpenOptions;
use std::io::{BufWriter, Write};
use std::path::Path;
use yaml_rust::{Yaml, YamlEmitter, YamlLoader};
lazy_static! {
pub static ref PROFILES: Option<LinkedHashMap<String, String>> = load_profile(
check_setting_path(
&CURRENT_EXE_PATH.to_path_buf(),
"config/default_profile.yaml"
)
.to_str()
.unwrap(),
check_setting_path(&CURRENT_EXE_PATH.to_path_buf(), "config/profiles.yaml")
.to_str()
.unwrap()
);
pub static ref LOAEDED_PROFILE_ALIAS: HashSet<String> = HashSet::from_iter(
PROFILES
.as_ref()
.unwrap_or(&LinkedHashMap::default())
.values()
.cloned()
);
pub static ref PRELOAD_PROFILE: Vec<&'static str> = vec![
"%Timestamp%",
"%Computer%",
"%Channel%",
"%Level%",
"%EventID%",
"%MitreAttack%",
"%RecordID%",
"%RuleTitle%",
"%RecordInformation%",
"%RuleFile%",
"%EvtxFile%"
];
pub static ref PRELOAD_PROFILE_REGEX: RegexSet = RegexSet::new(&*PRELOAD_PROFILE).unwrap();
}
// 指定されたパスのprofileを読み込む処理
fn read_profile_data(profile_path: &str) -> Result<Vec<Yaml>, String> {
let yml = yaml::ParseYaml::new();
if let Ok(loaded_profile) = yml.read_file(Path::new(profile_path).to_path_buf()) {
match YamlLoader::load_from_str(&loaded_profile) {
Ok(profile_yml) => Ok(profile_yml),
Err(e) => Err(format!("Parse error: {}. {}", profile_path, e)),
}
} else {
Err(format!(
"The profile file({}) does not exist. Please check your default profile.",
profile_path
))
}
}
/// プロファイル情報`を読み込む関数
pub fn load_profile(
default_profile_path: &str,
profile_path: &str,
) -> Option<LinkedHashMap<String, String>> {
let conf = &configs::CONFIG.read().unwrap().args;
if conf.set_default_profile.is_some() {
if let Err(e) = set_default_profile(default_profile_path, profile_path) {
AlertMessage::alert(&e).ok();
} else {
println!("Successfully updated the default profile.");
};
}
let profile_all: Vec<Yaml> = if conf.profile.is_none() {
match read_profile_data(default_profile_path) {
Ok(data) => data,
Err(e) => {
AlertMessage::alert(&e).ok();
vec![]
}
}
} else {
match read_profile_data(profile_path) {
Ok(data) => data,
Err(e) => {
AlertMessage::alert(&e).ok();
vec![]
}
}
};
// profileを読み込んで何も結果がない場合はAlert出しているためプログラム終了のためにNoneを出力する。
if profile_all.is_empty() {
return None;
}
let profile_data = &profile_all[0];
let mut ret: LinkedHashMap<String, String> = LinkedHashMap::new();
if let Some(profile_name) = &conf.profile {
let target_data = &profile_data[profile_name.as_str()];
if !target_data.is_badvalue() {
target_data
.as_hash()
.unwrap()
.into_iter()
.for_each(|(k, v)| {
ret.insert(
k.as_str().unwrap().to_string(),
v.as_str().unwrap().to_string(),
);
});
Some(ret)
} else {
let profile_names: Vec<&str> = profile_data
.as_hash()
.unwrap()
.keys()
.map(|k| k.as_str().unwrap())
.collect();
AlertMessage::alert(&format!(
"Invalid profile specified: {}\nPlease specify one of the following profiles:\n {}",
profile_name,
profile_names.join(", ")
))
.ok();
None
}
} else {
profile_data
.as_hash()
.unwrap()
.into_iter()
.for_each(|(k, v)| {
ret.insert(
k.as_str().unwrap().to_string(),
v.as_str().unwrap().to_string(),
);
});
Some(ret)
}
}
/// デフォルトプロファイルを設定する関数
pub fn set_default_profile(default_profile_path: &str, profile_path: &str) -> Result<(), String> {
let profile_data: Vec<Yaml> = match read_profile_data(profile_path) {
Ok(data) => data,
Err(e) => {
AlertMessage::alert(&e).ok();
return Err("Failed to set the default profile.".to_string());
}
};
// デフォルトプロファイルを設定する処理
if let Some(profile_name) = &configs::CONFIG.read().unwrap().args.set_default_profile {
if let Ok(mut buf_wtr) = OpenOptions::new()
.write(true)
.truncate(true)
.open(default_profile_path)
.map(BufWriter::new)
{
let prof_all_data = &profile_data[0];
let overwrite_default_data = &prof_all_data[profile_name.as_str()];
if !overwrite_default_data.is_badvalue() {
let mut out_str = String::default();
let mut yml_writer = YamlEmitter::new(&mut out_str);
let dump_result = yml_writer.dump(overwrite_default_data);
match dump_result {
Ok(_) => match buf_wtr.write_all(out_str.as_bytes()) {
Err(e) => Err(format!(
"Failed to set the default profile file({}). {}",
profile_path, e
)),
_ => {
buf_wtr.flush().ok();
Ok(())
}
},
Err(e) => Err(format!(
"Failed to set the default profile file({}). {}",
profile_path, e
)),
}
} else {
let profile_names: Vec<&str> = prof_all_data
.as_hash()
.unwrap()
.keys()
.map(|k| k.as_str().unwrap())
.collect();
Err(format!(
"Invalid profile specified: {}\nPlease specify one of the following profiles:\n{}",
profile_name,
profile_names.join(", ")
))
}
} else {
Err(format!(
"Failed to set the default profile file({}).",
profile_path
))
}
} else {
Err("Not specified: --set-default-profile".to_string())
}
}
#[cfg(test)]
mod tests {
use linked_hash_map::LinkedHashMap;
use crate::detections::configs;
use crate::options::profile::load_profile;
#[test]
///オプションの設定が入ると値の冪等性が担保できないためテストを逐次的に処理する
fn test_load_profile() {
test_load_profile_without_profile_option();
test_load_profile_no_exist_profile_files();
test_load_profile_with_profile_option();
}
/// プロファイルオプションが設定されていないときにロードをした場合のテスト
fn test_load_profile_without_profile_option() {
configs::CONFIG.write().unwrap().args.profile = None;
let mut expect: LinkedHashMap<String, String> = LinkedHashMap::new();
expect.insert("Timestamp".to_owned(), "%Timestamp%".to_owned());
expect.insert("Computer".to_owned(), "%Computer%".to_owned());
expect.insert("Channel".to_owned(), "%Channel%".to_owned());
expect.insert("Level".to_owned(), "%Level%".to_owned());
expect.insert("EventID".to_owned(), "%EventID%".to_owned());
expect.insert("MitreAttack".to_owned(), "%MitreAttack%".to_owned());
expect.insert("RecordID".to_owned(), "%RecordID%".to_owned());
expect.insert("RuleTitle".to_owned(), "%RuleTitle%".to_owned());
expect.insert("Details".to_owned(), "%Details%".to_owned());
expect.insert(
"RecordInformation".to_owned(),
"%RecordInformation%".to_owned(),
);
expect.insert("RuleFile".to_owned(), "%RuleFile%".to_owned());
expect.insert("EvtxFile".to_owned(), "%EvtxFile%".to_owned());
expect.insert("Tags".to_owned(), "%MitreAttack%".to_owned());
assert_eq!(
Some(expect),
load_profile(
"test_files/config/default_profile.yaml",
"test_files/config/profiles.yaml"
)
);
}
/// プロファイルオプションが設定されて`おり、そのオプションに該当するプロファイルが存在する場合のテスト
fn test_load_profile_with_profile_option() {
configs::CONFIG.write().unwrap().args.profile = Some("minimal".to_string());
let mut expect: LinkedHashMap<String, String> = LinkedHashMap::new();
expect.insert("Timestamp".to_owned(), "%Timestamp%".to_owned());
expect.insert("Computer".to_owned(), "%Computer%".to_owned());
expect.insert("Channel".to_owned(), "%Channel%".to_owned());
expect.insert("EventID".to_owned(), "%EventID%".to_owned());
expect.insert("Level".to_owned(), "%Level%".to_owned());
expect.insert("RuleTitle".to_owned(), "%RuleTitle%".to_owned());
expect.insert("Details".to_owned(), "%Details%".to_owned());
assert_eq!(
Some(expect),
load_profile(
"test_files/config/default_profile.yaml",
"test_files/config/profiles.yaml"
)
);
}
/// プロファイルオプションが設定されているが、対象のオプションが存在しない場合のテスト
fn test_load_profile_no_exist_profile_files() {
configs::CONFIG.write().unwrap().args.profile = Some("not_exist".to_string());
//両方のファイルが存在しない場合
assert_eq!(
None,
load_profile(
"test_files/config/no_exist_default_profile.yaml",
"test_files/config/no_exist_profiles.yaml"
)
);
//デフォルトプロファイルは存在しているがprofileオプションが指定されているため読み込み失敗の場合
assert_eq!(
None,
load_profile(
"test_files/config/profile/default_profile.yaml",
"test_files/config/profile/no_exist_profiles.yaml"
)
);
//オプション先のターゲットのプロファイルファイルが存在しているが、profileオプションで指定されたオプションが存在しない場合
assert_eq!(
None,
load_profile(
"test_files/config/no_exist_default_profile.yaml",
"test_files/config/profiles.yaml"
)
);
}
}

View File

@@ -111,7 +111,7 @@ impl EventStatistics {
continue;
}
let username = utils::get_event_value("TargetUserName", &record.record);
let idnum = evtid.unwrap();
let idnum = evtid.unwrap().as_i64().unwrap();
let countlist: [usize; 2] = [0, 0];
if idnum == 4624 {
let count: &mut [usize; 2] = self

View File

@@ -0,0 +1,13 @@
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
Level: "%Level%"
EventID: "%EventID%"
MitreAttack: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
RecordInformation: "%RecordInformation%"
RuleFile: "%RuleFile%"
EvtxFile: "%EvtxFile%"
Tags: "%MitreAttack%"

View File

@@ -0,0 +1,44 @@
minimal:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
standard:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
verbose-1:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
RuleFile: "%RuleFile%"
EvtxFile: "%EvtxFile%"
verbose-2:
Timestamp: "%Timestamp%"
Computer: "%Computer%"
Channel: "%Channel%"
EventID: "%EventID%"
Level: "%Level%"
Tags: "%MitreAttack%"
RecordID: "%RecordID%"
RuleTitle: "%RuleTitle%"
Details: "%Details%"
AllFieldInfo: "%RecordInformation%"