Compare commits

...

397 Commits

Author SHA1 Message Date
renovate[bot]
8f804305cd Update dependency core-js to v3.25.4 (#475)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-10-02 23:45:08 +02:00
Jonas Plum
2626d156bc Change roles in user editor (#478)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 23:45:00 +02:00
Jonas Plum
b25f3f4708 Fix version info (#477)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 23:36:34 +02:00
Jonas Plum
5f4fd667a9 Update readme (#476)
* Update readme

Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 23:18:12 +02:00
Jonas Plum
fc42d4043b Fix version (#474)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 20:10:57 +02:00
Jonas Plum
e987e46cbd Remove unneeded CI commands (#472)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 19:40:12 +02:00
Jonas Plum
b085ef4f1b Fix permissions (#471)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 06:40:01 +02:00
Jonas Plum
9915a10ca2 Map userdata (#470)
* Map userdata
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-02 05:41:48 +02:00
renovate[bot]
94ebcade12 Update golang.org/x/exp digest to 540bb73 (#469)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-10-02 04:41:16 +02:00
Jonas Plum
f73e91d142 Add maut (#468)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-01 21:38:13 +02:00
Jonas Plum
4eb0658888 Configure OIDC (#467)
Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-01 03:36:39 +02:00
renovate[bot]
215e56deb1 Update golang.org/x/exp digest to ec3f013 (#435)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-10-01 03:24:48 +02:00
Jonas Plum
a50133f6fd Add auth url (#466)
* Add auth url

Co-authored-by: Jonas Plum <git@jonasplum.de>
2022-10-01 03:05:07 +02:00
renovate[bot]
5b5bba30ca Update module github.com/aws/aws-sdk-go to v1.44.109 2022-10-01 01:03:44 +00:00
Jonas Plum
2a6183b368 Enable OIDC by default (#432)
* Enable OIDC by default
2022-09-30 21:27:43 +02:00
renovate[bot]
3f6cd5b366 Update dependency @koumoul/vjsf to v2.20.3 (#463)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 14:43:01 +02:00
renovate[bot]
24eb325058 Update dependency json-schema-editor-vue to v2.1.0 (#459)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:22:04 +02:00
renovate[bot]
210ab54a8c Update dependency core-js to v3.25.3 (#457)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:21:56 +02:00
renovate[bot]
a59dc977d0 Update dependency cypress to v10.9.0 (#458)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:21:49 +02:00
renovate[bot]
88191fd7e6 Update dependency sass to v1.55.0 (#460)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:21:37 +02:00
renovate[bot]
1ecba482ef Update typescript-eslint monorepo to v5.38.1 (#462)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:21:30 +02:00
renovate[bot]
213ecd03d3 Update dependency typescript to v4.8.4 (#461)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 12:21:20 +02:00
renovate[bot]
3525582f3f Update dependency vue-router to v3.6.5 (#452)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:39:40 +02:00
renovate[bot]
b14a0c5e77 Update dependency antlr4 to v4.11.0 (#456)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:39:28 +02:00
renovate[bot]
b3f9789801 Update dependency @vue/test-utils to v2.1.0 (#455)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:39:18 +02:00
renovate[bot]
7c800ec8f2 Update dependency eslint-plugin-jest to v27 (#450)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:00:47 +02:00
renovate[bot]
654f188f11 Update dependency @types/jest to v29 (#449)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:00:25 +02:00
renovate[bot]
8338ba9f69 Update dependency vuetify to v2.6.10 (#453)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 10:00:06 +02:00
renovate[bot]
9dd8116dd8 Update dependency vue-cli-plugin-vuetify to v2.5.8 2022-09-30 05:32:06 +00:00
renovate[bot]
ba689f2482 Update dependency @vue/compiler-sfc to v3.2.40 2022-09-30 02:44:44 +00:00
renovate[bot]
255f668757 Update dependency luxon to v3.0.4 (#448) 2022-09-30 01:44:06 +02:00
renovate[bot]
29ea4145a2 Update dependency @koumoul/vjsf to v2.20.2 (#442)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 01:12:13 +02:00
Jonas Plum
efaf0ed266 Multiple updates (#445) 2022-09-30 00:50:47 +02:00
renovate[bot]
522e93c8f1 Update dependency @types/lodash to v4.14.186 (#438)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-09-30 00:43:49 +02:00
Jonas Plum
79c2450958 Merge pull request #437 from SecurityBrewery/renovate/jest-monorepo
Update dependency @types/jest to v28.1.8
2022-09-30 00:25:26 +02:00
renovate[bot]
06e57b6e4c Update dependency @types/jest to v28.1.8 2022-09-29 22:00:19 +00:00
Jonas Plum
7275202f63 Merge pull request #431 from SecurityBrewery/renovate/golang.org-x-crypto-digest
Update golang.org/x/crypto digest to eccd636
2022-09-29 23:56:03 +02:00
renovate[bot]
b2d2fe25e1 Update golang.org/x/crypto digest to eccd636 2022-09-29 21:46:20 +00:00
Jonas Plum
59fcd3ce35 Merge pull request #433 from SecurityBrewery/server-timeout
Add server timeout
2022-09-29 23:43:25 +02:00
Jonas Plum
52cebf8d0a Add server timeout 2022-09-29 23:30:22 +02:00
renovate[bot]
979212f16a Update vue monorepo to v2.7.10 2022-08-23 05:47:39 +00:00
renovate[bot]
6093019c4a Update golang.org/x/oauth2 digest to 0ebed06 2022-08-23 03:05:19 +00:00
renovate[bot]
0f45821394 Update typescript-eslint monorepo to v5.34.0 (#424)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-22 21:27:15 +02:00
renovate[bot]
c93853a2bd Update module github.com/aws/aws-sdk-go to v1.44.82 (#423)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-22 21:27:03 +02:00
renovate[bot]
dc94bc59fb Update dependency vue-router to v3.6.0 (#422)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-22 17:39:45 +02:00
renovate[bot]
4d299554c5 Update dependency @types/luxon to v3 (#353)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-22 10:26:25 +02:00
renovate[bot]
f661b548d6 Update dependency eslint-plugin-jest to v26.8.7 2022-08-22 02:29:02 +00:00
renovate[bot]
91775bd09b Update nginx Docker tag to v1.23 (#417)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 23:28:27 +02:00
renovate[bot]
1fc358a989 Update typescript-eslint monorepo to v5.33.1 (#418)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 23:28:15 +02:00
renovate[bot]
20e8285816 Update dependency eslint-plugin-jest to v26.8.6 (#415)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 23:03:29 +02:00
Jonas Plum
fd8e793361 Fix sorting on multiple ticket fields (#412) 2022-08-21 22:23:27 +02:00
Jonas Plum
2b7be7c212 Add stale.yml (#413) 2022-08-21 22:21:35 +02:00
renovate[bot]
ae913f7cd4 Update dependency cypress to v10.6.0 (#411)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 21:40:23 +02:00
renovate[bot]
f9f2c17709 Update vue monorepo to v2.7.9 (#409)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 21:40:04 +02:00
renovate[bot]
6354fd2735 Update golang.org/x/oauth2 digest to 8227340 (#407)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 21:08:05 +02:00
renovate[bot]
4e49835add Update module github.com/aws/aws-sdk-go to v1.44.81 (#405)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 21:07:58 +02:00
renovate[bot]
26246ce9f3 Update dependency @types/lodash to v4.14.184 (#401)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 21:07:52 +02:00
renovate[bot]
bcb5d2bf78 Update dependency eslint-plugin-jest to v26.8.5 (#392)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 20:38:59 +02:00
renovate[bot]
d80da3e77d Update dependency vue-cli-plugin-vuetify to v2.5.4 (#403)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 20:34:58 +02:00
renovate[bot]
8693ccd489 Update dependency @types/jest to v28.1.7 (#400)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 20:34:45 +02:00
renovate[bot]
ce77fbad47 Update golang.org/x/crypto digest to bc19a97 (#398)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 20:34:28 +02:00
Jonas Plum
c93ec52986 Add test retries (#406) 2022-08-21 20:16:56 +02:00
renovate[bot]
cee31465c7 Update github.com/icza/dyno digest to f0b6f8a (#397)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 20:05:21 +02:00
renovate[bot]
4baf1358f1 Update dependency sass to v1.54.5 (#402)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 19:35:03 +02:00
renovate[bot]
603029909d Update dependency vuetify to v2.6.9 (#404)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 19:34:50 +02:00
renovate[bot]
add7730036 Update dependency @koumoul/vjsf to v2.18.2 (#399)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 19:34:42 +02:00
renovate[bot]
cda8295c67 Update github.com/antlr/antlr4/runtime/Go/antlr digest to bc8df83 2022-08-21 15:04:16 +00:00
renovate[bot]
77cd336377 Update dependency just-kebab-case to v4.1.1 2022-08-21 12:25:53 +00:00
renovate[bot]
459fb2f5e7 Update dependency @mdi/font to v7 (#356)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 12:46:27 +02:00
renovate[bot]
e84ba818c5 Update module github.com/tidwall/sjson to v1.2.5 2022-08-21 10:35:05 +00:00
Jonas Plum
fb1de82382 Improve cypress (#395) 2022-08-21 12:23:59 +02:00
renovate[bot]
900d0e8693 Update module github.com/tidwall/gjson to v1.14.3 (#390)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-21 11:53:45 +02:00
Jonas Plum
0a172c0290 Fmt with 1.19 (#394) 2022-08-21 11:33:25 +02:00
renovate[bot]
baa74761ad Update module github.com/aws/aws-sdk-go to v1.44.70 2022-08-04 21:39:24 +00:00
renovate[bot]
c12c5fe768 Update module github.com/aws/aws-sdk-go to v1.44.69 2022-08-03 22:28:06 +00:00
renovate[bot]
e063ab0936 Update module go to 1.19 (#384)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-03 10:08:38 +02:00
renovate[bot]
b4c20f670c Update dependency sass to v1.54.1 2022-08-03 05:43:16 +00:00
renovate[bot]
b86c2b2efe Update dependency cypress to v10.4.0 (#383)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-02 20:30:00 +02:00
renovate[bot]
09800662ff Update module github.com/aws/aws-sdk-go to v1.44.67 (#380)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-01 22:50:38 +02:00
renovate[bot]
40d63d8dee Update typescript-eslint monorepo to v5.32.0 (#381)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-08-01 22:49:44 +02:00
renovate[bot]
98b36f3a58 Update dependency eslint-plugin-jest to v26.7.0 2022-07-30 04:06:11 +00:00
renovate[bot]
9c9c23fa0d Update module github.com/aws/aws-sdk-go to v1.44.66 2022-07-30 01:51:34 +00:00
renovate[bot]
44b7744bb7 Update dependency vuetify to v2.6.8 2022-07-29 23:16:51 +00:00
renovate[bot]
e490deff76 Update dependency core-js to v3.24.1 2022-07-29 20:54:31 +00:00
renovate[bot]
3fbbc2ff65 Update dependency core-js to v3.24.0 2022-07-29 03:37:12 +00:00
renovate[bot]
6b21bc72e6 Update dependency json-schema-editor-vue to v2.0.4 2022-07-29 01:53:36 +00:00
renovate[bot]
f3cfa6688d Update dependency vuetify-loader to v1.9.2 2022-07-28 23:44:22 +00:00
renovate[bot]
bc287b7600 Update typescript-eslint monorepo to v5.31.0 (#373)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-28 23:52:37 +02:00
renovate[bot]
8e5e50c85a Update module github.com/aws/aws-sdk-go to v1.44.65 (#374)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-28 23:52:25 +02:00
renovate[bot]
ca5f02d94a Update module github.com/arangodb/go-driver to v1.3.3 2022-07-28 21:14:13 +00:00
renovate[bot]
b283717321 Update module github.com/tus/tusd to v1.9.1 2022-07-28 18:31:20 +00:00
renovate[bot]
edb575c2aa Update golang.org/x/exp digest to a9213ee 2022-07-28 15:47:43 +00:00
renovate[bot]
c8f32ff0a7 Update module github.com/aws/aws-sdk-go to v1.44.64 2022-07-28 13:23:07 +00:00
renovate[bot]
e77a311340 Update vue monorepo to v2.7.8 2022-07-23 06:13:42 +00:00
renovate[bot]
b8da0734ec Update dependency sass to v1.54.0 2022-07-23 04:01:57 +00:00
renovate[bot]
9744b8ba96 Update github.com/antlr/antlr4/runtime/Go/antlr digest to 14703f2 2022-07-23 01:43:13 +00:00
renovate[bot]
3d95fece81 Update golang.org/x/oauth2 digest to 128564f 2022-07-22 23:59:45 +00:00
renovate[bot]
210fdeb88d Update dependency @koumoul/vjsf to v2.18.0 2022-07-22 22:01:16 +00:00
renovate[bot]
8914c97c57 Update golang.org/x/crypto digest to 630584e 2022-07-22 19:39:47 +00:00
renovate[bot]
d75d4efdd1 Update module github.com/aws/aws-sdk-go to v1.44.60 2022-07-22 00:00:43 +00:00
renovate[bot]
6cafad44c5 Update dependency cypress to v10.3.1 2022-07-21 01:01:13 +00:00
renovate[bot]
25d67f40af Update dependency swagger-ui to v4.13.0 2022-07-20 22:59:13 +00:00
renovate[bot]
f01e1b197f Update module github.com/aws/aws-sdk-go to v1.44.59 (#357)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-20 22:24:08 +02:00
renovate[bot]
6798427d65 Update dependency @types/luxon to v2.4.0 2022-07-20 17:59:50 +00:00
renovate[bot]
16e95df060 Update golang.org/x/oauth2 digest to c8730f7 2022-07-20 15:19:28 +00:00
renovate[bot]
664e079016 Update typescript-eslint monorepo to v5.30.7 2022-07-20 12:54:29 +00:00
renovate[bot]
5e67816351 Update module github.com/aws/aws-sdk-go to v1.44.58 2022-07-20 10:32:20 +00:00
renovate[bot]
abd6632578 Update dependency core-js to v3.23.5 2022-07-17 21:50:01 +00:00
renovate[bot]
50cc19ddf7 Update dependency @testing-library/vue to v6.6.1 (#346)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-17 00:27:13 +02:00
renovate[bot]
7cef009e48 Update vue monorepo to v2.7.7 2022-07-16 19:03:57 +00:00
renovate[bot]
bba0b82034 Update dependency @types/jest to v28.1.6 2022-07-16 03:12:44 +00:00
renovate[bot]
91d96b45e3 Update github.com/antlr/antlr4/runtime/Go/antlr digest to f1df316 2022-07-16 00:47:14 +00:00
renovate[bot]
0fe5ef0784 Update module github.com/aws/aws-sdk-go to v1.44.56 2022-07-15 22:22:19 +00:00
renovate[bot]
0652512d86 Update vue monorepo to v2.7.6 2022-07-15 20:07:06 +00:00
renovate[bot]
7d19621807 Update module github.com/aws/aws-sdk-go to v1.44.55 (#339)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-15 08:27:34 +02:00
renovate[bot]
3a18988ee6 Update dependency eslint-plugin-jest to v26.6.0 2022-07-15 03:07:19 +00:00
renovate[bot]
7fe6f779e1 Update module github.com/aws/aws-sdk-go to v1.44.54 (#338)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-14 00:06:07 +02:00
renovate[bot]
bc31882e16 Update golang.org/x/exp digest to 79cabaa 2022-07-13 18:43:54 +00:00
renovate[bot]
67339e18a4 Update vue monorepo to v2.7.5 (#336)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-13 10:31:49 +02:00
renovate[bot]
eb373c3773 Update dependency @types/jest to v28.1.5 2022-07-13 04:35:25 +00:00
renovate[bot]
42d4d68320 Update module github.com/aws/aws-sdk-go to v1.44.53 (#334)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-12 23:17:20 +02:00
renovate[bot]
8f65b99f26 Update dependency vuetify-loader to v1.9.1 2022-07-12 13:46:02 +00:00
renovate[bot]
4b19642a26 Update module github.com/aws/aws-sdk-go to v1.44.52 2022-07-11 21:24:00 +00:00
renovate[bot]
26f5e11b61 Update typescript-eslint monorepo to v5.30.6 (#331)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-11 20:48:32 +02:00
renovate[bot]
4c6f17670a Update dependency luxon to v3 (#329)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-11 14:13:24 +02:00
renovate[bot]
c85b507c28 Update dependency json-schema-editor-vue to v2.0.3 (#330)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-11 13:59:19 +02:00
renovate[bot]
736cc24f74 Update dependency luxon to v2.5.0 2022-07-09 22:55:24 +00:00
renovate[bot]
37fea143d1 Update dependency core-js to v3.23.4 2022-07-09 20:48:44 +00:00
renovate[bot]
947bb4ba34 Update module github.com/aws/aws-sdk-go to v1.44.51 2022-07-08 22:00:20 +00:00
renovate[bot]
49ad9f74f2 Update vue monorepo to v2.7.4 (#325)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-08 12:36:03 +02:00
renovate[bot]
c0b6626301 Update module github.com/aws/aws-sdk-go to v1.44.50 2022-07-07 23:05:52 +00:00
renovate[bot]
2f3acc36f2 Update module github.com/aws/aws-sdk-go to v1.44.49 2022-07-07 01:15:38 +00:00
renovate[bot]
c1b05b9775 Update golang.org/x/exp digest to b4a6d95 2022-07-06 22:16:30 +00:00
renovate[bot]
3445d4ed8e Update vue monorepo to v2.7.3 2022-07-06 12:32:36 +00:00
renovate[bot]
b9b98dad7d Update module github.com/aws/aws-sdk-go to v1.44.48 2022-07-05 23:06:43 +00:00
renovate[bot]
d5fbff5042 Update vue monorepo to v2.7.2 2022-07-05 20:31:21 +00:00
renovate[bot]
97b30b2abb Update dependency @koumoul/vjsf to v2.17.0 (#317)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-05 19:56:33 +02:00
renovate[bot]
07ae6fba2d Update dependency vuetify-loader to v1.9.0 (#318)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-05 19:56:19 +02:00
renovate[bot]
f66897289b Update typescript-eslint monorepo to v5.30.5 2022-07-04 20:34:40 +00:00
renovate[bot]
a3e33edec6 Update dependency @vue/test-utils to v2.0.2 2022-07-04 15:47:28 +00:00
renovate[bot]
10240b559c Update typescript-eslint monorepo to v5.30.4 2022-07-03 16:06:49 +00:00
renovate[bot]
c4cb632185 Update module github.com/aws/aws-sdk-go to v1.44.47 (#313)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-07-01 23:32:36 +02:00
renovate[bot]
9a0fad5f3f Update typescript-eslint monorepo to v5.30.3 2022-07-01 19:48:52 +00:00
renovate[bot]
ae5b7305c8 Update module github.com/aws/aws-sdk-go to v1.44.46 2022-07-01 02:37:50 +00:00
renovate[bot]
3d4401cdcc Update dependency @types/jest to v28.1.4 2022-06-30 23:34:24 +00:00
renovate[bot]
a4148f673b Update golang.org/x/oauth2 digest to 2104d58 2022-06-30 21:00:07 +00:00
renovate[bot]
63cca38fcb Update dependency @koumoul/vjsf to v2.16.1 2022-06-30 17:52:11 +00:00
renovate[bot]
812bd6f782 Update dependency @mdi/font to v6.9.96 (#306)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-30 08:10:11 +02:00
renovate[bot]
8d3cae82a5 Update module github.com/aws/aws-sdk-go to v1.44.45 (#305)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-30 00:23:03 +02:00
Jonas Plum
f21dde77b6 Downgrade to 10.2.0 (#304) 2022-06-29 22:52:46 +02:00
renovate[bot]
1a9215673b Update module github.com/stretchr/testify to v1.8.0 (#303)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-29 20:49:41 +02:00
renovate[bot]
d4c73b603f Update dependency cypress to v10.3.0 (#298)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-29 15:44:06 +02:00
renovate[bot]
69631c7062 Update dependency vuetify to v2.6.7 (#301)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-29 15:43:53 +02:00
renovate[bot]
7318f34cbb Update dependency @koumoul/vjsf to v2.16.0 (#302)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-29 15:43:28 +02:00
renovate[bot]
88b1776ad9 Update dependency @mdi/font to v6.8.96 (#300)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-29 15:04:37 +02:00
renovate[bot]
461e501267 Update golang.org/x/oauth2 digest to 02e64fa 2022-06-29 01:03:40 +00:00
renovate[bot]
23e85d5c9e Update module github.com/aws/aws-sdk-go to v1.44.44 2022-06-28 22:39:02 +00:00
renovate[bot]
42eb593f5a Update vue monorepo to v4.5.19 2022-06-28 10:48:35 +00:00
renovate[bot]
676ef788b1 Update module github.com/aws/aws-sdk-go to v1.44.43 2022-06-28 00:01:51 +00:00
renovate[bot]
2e341f8d55 Update typescript-eslint monorepo to v5.30.0 2022-06-27 20:42:03 +00:00
renovate[bot]
0040c1b6e1 Update github.com/antlr/antlr4/runtime/Go/antlr digest to 9abda18 (#293)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-27 00:19:08 +03:00
renovate[bot]
1c59dfdb44 Update dependency core-js to v3.23.3 2022-06-25 23:24:53 +00:00
renovate[bot]
3943474c09 Update github.com/antlr/antlr4/runtime/Go/antlr digest to e4cec20 2022-06-25 20:43:03 +00:00
Jonas Plum
e679781981 Fix static path (#289) 2022-06-25 01:04:42 +02:00
Jonas Plum
b2fde8f26a Update antlr (#287) 2022-06-24 22:30:28 +02:00
renovate[bot]
9ff10e1f34 Update module github.com/aws/aws-sdk-go to v1.44.42 (#285)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-24 21:38:14 +02:00
renovate[bot]
227c3f4f4e Update module github.com/stretchr/testify to v1.7.5 (#284)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-24 09:43:28 +03:00
renovate[bot]
c7e9957749 Update module github.com/aws/aws-sdk-go to v1.44.41 2022-06-24 05:22:56 +00:00
renovate[bot]
ba8e39e8b1 Update dependency sass to v1.53.0 (#280)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-23 08:42:33 +03:00
renovate[bot]
2895630770 Update module github.com/aws/aws-sdk-go to v1.44.40 2022-06-23 04:04:04 +00:00
renovate[bot]
ea0672d8bc Update golang.org/x/crypto digest to 0559593 2022-06-23 01:35:39 +00:00
renovate[bot]
c6000ab54c Update golang.org/x/oauth2 digest to fd043fe 2022-06-22 22:25:24 +00:00
renovate[bot]
ba7b6e685a Update dependency @koumoul/vjsf to v2.15.0 2022-06-22 19:58:12 +00:00
renovate[bot]
6be51a40d6 Update dependency cypress to v10.2.0 2022-06-22 06:34:52 +00:00
renovate[bot]
4d3a8fb857 Update module github.com/aws/aws-sdk-go to v1.44.39 2022-06-22 03:56:12 +00:00
renovate[bot]
edb462592d Update dependency @types/jest to v28.1.3 2022-06-22 00:55:38 +00:00
renovate[bot]
6de65c75f1 Update dependency vuetify-loader to v1.8.0 2022-06-21 14:48:22 +00:00
renovate[bot]
0fd4bd4919 Update typescript-eslint monorepo to v5.29.0 2022-06-21 04:49:16 +00:00
renovate[bot]
0867671a15 Update module github.com/stretchr/testify to v1.7.4 2022-06-21 01:39:11 +00:00
renovate[bot]
ee37b68604 Update dependency core-js to v3.23.2 2022-06-20 22:20:49 +00:00
renovate[bot]
5af2fa9cf2 Update module github.com/aws/aws-sdk-go to v1.44.38 2022-06-20 21:40:14 +00:00
renovate[bot]
9671eccd2b Update module github.com/stretchr/testify to v1.7.3 2022-06-20 19:05:08 +00:00
renovate[bot]
d186abe251 Update dependency typescript to v4.7.4 (#264)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-18 13:06:30 +02:00
renovate[bot]
f10eba7f90 Update dependency @types/jest to v28.1.2 (#263)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-18 13:03:51 +02:00
renovate[bot]
9b60c44e4d Update module github.com/aws/aws-sdk-go to v1.44.37 (#265)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2022-06-18 13:03:43 +02:00
Renovate Bot
1976ed403a Update module github.com/aws/aws-sdk-go to v1.44.36 2022-06-16 22:29:26 +00:00
Renovate Bot
299b094f54 Update vue monorepo to v4.5.18 2022-06-16 19:43:26 +00:00
renovate[bot]
d1f619b861 Update module github.com/aws/aws-sdk-go to v1.44.35 (#260)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-15 23:50:51 +02:00
renovate[bot]
8343f29562 Update module github.com/alecthomas/kong to v0.6.1 (#259)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-15 17:13:31 +02:00
Jonas Plum
2026cb3c6a More e2d tests (#258) 2022-06-15 04:03:22 +02:00
Renovate Bot
2ac1dd29ad Update module github.com/aws/aws-sdk-go to v1.44.34 2022-06-15 00:47:32 +00:00
renovate[bot]
ed5d3d2cf9 Update dependency @vue/test-utils to v2.0.1 (#257)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-15 02:32:10 +02:00
renovate[bot]
12ed0d0f30 Update dependency @types/jest to v28 (#252)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:46:04 +02:00
renovate[bot]
885a4e3c13 Update dependency swagger-ui to v4.12.0 (#254)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:45:56 +02:00
renovate[bot]
d5b944e00d Update typescript-eslint monorepo to v5.28.0 (#255)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:45:40 +02:00
renovate[bot]
0577e7c347 Update module github.com/alecthomas/kong to v0.6.0 (#251)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:20:30 +02:00
renovate[bot]
af690832eb Update dependency eslint-plugin-jest to v26.5.3 (#250)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:18:57 +02:00
renovate[bot]
6a5f6b3320 Update dependency core-js to v3.23.1 (#249)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:16:42 +02:00
Renovate Bot
043460d4e5 Update dependency @testing-library/vue to v6.6.0 2022-06-14 14:15:22 +00:00
Jonas Plum
4b36b8eb1f Enable renovate dashboard again 2022-06-14 16:04:01 +02:00
renovate[bot]
a9178ed44c Update module github.com/stretchr/testify to v1.7.2 (#246)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:02:00 +02:00
renovate[bot]
9bff8d2d09 Update dependency vue-cli-plugin-vuetify to v2.5.1 (#243)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:01:04 +02:00
renovate[bot]
004ff933ba Update dependency @koumoul/vjsf to v2.14.0 (#247)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 16:00:56 +02:00
Jonas Plum
7caf676571 Upgrade Cypress (#245) 2022-06-14 15:59:35 +02:00
renovate[bot]
48459b8a8b Update module github.com/aws/aws-sdk-go to v1.44.33 (#244)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 15:20:07 +02:00
renovate[bot]
2abbb482da Update dependency typescript to v4.7.3 (#242)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 15:19:47 +02:00
renovate[bot]
7ab37efc0c Update dependency sass to v1.52.3 (#241)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 15:19:37 +02:00
Renovate Bot
f304d1e492 Update dependency less to v4.1.3 2022-06-14 07:00:21 +00:00
Renovate Bot
38d3252b2e Update dependency just-kebab-case to v4.0.3 2022-06-14 03:33:34 +00:00
renovate[bot]
a91fd8cccd Update dependency @types/jest to v27.5.2 (#237)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 01:20:59 +02:00
renovate[bot]
1a3c690f79 Update dependency @vue/compiler-sfc to v3.2.37 (#238)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 01:20:48 +02:00
renovate[bot]
7e7290393c Update golang.org/x/oauth2 digest to d0670ef (#236)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-14 00:48:54 +02:00
renovate[bot]
e403cb34f9 Update dependency nginx to v1.22 (#224)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-13 23:30:39 +02:00
renovate[bot]
8c26dc72b4 Update dependency arangodb/arangodb to v3.9.1 (#56)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-13 23:30:19 +02:00
renovate[bot]
224b8c5c42 Update dependency @vue/test-utils to v2 (#206)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-13 23:29:47 +02:00
renovate[bot]
705f0cadea Update golang.org/x/crypto digest to 793ad66 (#235)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-06-13 23:29:26 +02:00
Renovate Bot
be2ab900dc Update golang.org/x/exp digest to b0d7811 2022-06-13 21:10:27 +00:00
Jonas Plum
9f1041d7ef Add simple auth (#186) 2022-06-13 18:13:31 +02:00
Renovate Bot
4883646f39 Update dependency eslint-plugin-jest to v26.4.5 2022-05-29 23:46:07 +00:00
Renovate Bot
6b381d46a8 Update dependency eslint-plugin-jest to v26.4.2 2022-05-28 23:28:29 +00:00
Renovate Bot
6a7b28c294 Update dependency eslint-plugin-jest to v26.3.0 2022-05-28 09:58:50 +00:00
Renovate Bot
2a46041f07 Update module github.com/aws/aws-sdk-go to v1.44.24 2022-05-27 20:44:24 +00:00
Renovate Bot
781f16286d Update module gopkg.in/yaml.v3 to v3.0.1 2022-05-27 13:25:11 +00:00
Renovate Bot
6ad2c83fa0 Update module github.com/aws/aws-sdk-go to v1.44.23 2022-05-26 21:52:57 +00:00
Renovate Bot
efd63a0151 Update module github.com/imdario/mergo to v0.3.13 2022-05-26 00:27:24 +00:00
Renovate Bot
cef947d2f8 Update module github.com/aws/aws-sdk-go to v1.44.22 2022-05-25 21:59:14 +00:00
Renovate Bot
baaa6c989f Update dependency @koumoul/vjsf to v2.13.1 2022-05-25 18:57:20 +00:00
Renovate Bot
6e8ed2cab6 Update dependency typescript to v4.7.2 2022-05-25 04:42:35 +00:00
Renovate Bot
f374a927e4 Update golang.org/x/oauth2 digest to 622c5d5 2022-05-25 02:24:25 +00:00
Renovate Bot
288dfeae5c Update module github.com/aws/aws-sdk-go to v1.44.21 2022-05-24 23:19:58 +00:00
Renovate Bot
86bc0b085e Update dependency core-js to v3.22.7 2022-05-24 20:58:28 +00:00
Renovate Bot
b492897999 Update typescript-eslint monorepo to v5.26.0 2022-05-24 03:59:22 +00:00
Renovate Bot
648ad78e07 Update dependency cypress to v9.7.0 2022-05-24 01:54:55 +00:00
Renovate Bot
76322cc757 Update module github.com/aws/aws-sdk-go to v1.44.20 2022-05-23 23:37:05 +00:00
Renovate Bot
8bc6c473f4 Update dependency @vue/compiler-sfc to v3.2.36 2022-05-23 10:24:22 +00:00
Renovate Bot
298906cae0 Update dependency core-js to v3.22.6 2022-05-22 22:52:56 +00:00
Renovate Bot
9a658d6206 Update dependency vue-cli-plugin-vuetify to v2.5.0 2022-05-22 17:01:38 +00:00
Renovate Bot
e819d917ac Update module gopkg.in/yaml.v3 to v3.0.0 2022-05-21 15:57:53 +00:00
Renovate Bot
529c5eb4c1 Update dependency sass to v1.52.1 2022-05-21 05:07:33 +00:00
Renovate Bot
3d150ac002 Update dependency @vue/compiler-sfc to v3.2.35 2022-05-21 02:18:42 +00:00
Renovate Bot
390318c038 Update module github.com/aws/aws-sdk-go to v1.44.19 2022-05-21 00:03:44 +00:00
Renovate Bot
bbcd66eec8 Update dependency @koumoul/vjsf to v2.13.0 2022-05-20 18:31:23 +00:00
Renovate Bot
0f427c059d Update dependency sass to v1.52.0 2022-05-20 03:08:22 +00:00
Renovate Bot
8eb8b37abd Update module github.com/aws/aws-sdk-go to v1.44.18 2022-05-19 21:45:50 +00:00
renovate[bot]
9585ec23c5 Update dependency swagger-ui to v4 (#146)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-19 08:48:03 +02:00
renovate[bot]
36d1ba049a Update dependency less-loader to v11 (#198)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-19 08:47:47 +02:00
renovate[bot]
d0df4d77f3 Update dependency @vue/compiler-sfc to v3.2.34 (#203)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-19 08:47:26 +02:00
Renovate Bot
e994907970 Update module github.com/aws/aws-sdk-go to v1.44.17 2022-05-19 01:57:53 +00:00
Renovate Bot
99134ee3ae Update golang.org/x/exp digest to 0b5c67f 2022-05-18 23:30:32 +00:00
Renovate Bot
3d27413b8f Update dependency vuetify to v2.6.6 2022-05-18 03:27:03 +00:00
Renovate Bot
7acb853e1a Update module github.com/aws/aws-sdk-go to v1.44.16 2022-05-17 22:19:01 +00:00
Renovate Bot
65bec75b6b Update typescript-eslint monorepo to v5.25.0 2022-05-17 19:18:05 +00:00
Renovate Bot
d1511db529 Update module github.com/arangodb/go-driver to v1.3.2 2022-05-17 13:57:54 +00:00
Renovate Bot
7aee08ed32 Update dependency @mdi/font to v6.7.96 2022-05-17 10:58:34 +00:00
Renovate Bot
3a984f5591 Update module github.com/aws/aws-sdk-go to v1.44.15 2022-05-17 04:47:40 +00:00
Renovate Bot
01ed124f98 Update dependency vue-router to v3.5.4 2022-05-17 02:45:13 +00:00
Renovate Bot
2ccf3ce351 Update dependency @koumoul/vjsf to v2.12.1 2022-05-16 23:55:28 +00:00
Renovate Bot
4468012c9b Update golang.org/x/exp digest to 24438e5 2022-05-16 20:12:02 +00:00
Renovate Bot
0614e146ad Update dependency eslint-plugin-jest to v26.2.2 2022-05-15 01:52:10 +00:00
Renovate Bot
0f3c8dc344 Update dependency eslint-plugin-jest to v26.2.0 2022-05-14 01:46:47 +00:00
Jonas Plum
dfb501f8b9 Remove emitter (#184)
* Remove emitter
2022-05-14 01:08:37 +02:00
Renovate Bot
894e607efb Update module github.com/aws/aws-sdk-go to v1.44.14 2022-05-13 22:52:09 +00:00
Renovate Bot
d40ee1047c Update dependency @koumoul/vjsf to v2.12.0 2022-05-13 14:00:15 +00:00
Renovate Bot
2e176374a2 Update module github.com/aws/aws-sdk-go to v1.44.13 2022-05-12 23:15:17 +00:00
Renovate Bot
3122af7263 Update dependency @types/jest to v27.5.1 2022-05-12 03:41:50 +00:00
Renovate Bot
395d730dbf Update module github.com/coreos/go-oidc/v3 to v3.2.0 2022-05-12 01:19:27 +00:00
Renovate Bot
c9fa3ef456 Update module github.com/aws/aws-sdk-go to v1.44.12 2022-05-11 23:05:42 +00:00
Renovate Bot
fe2a86ba55 Update dependency core-js to v3.22.5 2022-05-11 00:20:20 +00:00
Renovate Bot
30f963a23e Update module github.com/aws/aws-sdk-go to v1.44.11 2022-05-10 21:41:13 +00:00
Renovate Bot
51d7079534 Update typescript-eslint monorepo to v5.23.0 2022-05-10 02:06:52 +00:00
Renovate Bot
b2476b420b Update module github.com/aws/aws-sdk-go to v1.44.10 2022-05-09 23:17:47 +00:00
Renovate Bot
d423943439 Update dependency cypress to v9.6.1 2022-05-09 21:03:11 +00:00
Renovate Bot
41d5994a02 Update dependency luxon to v2.4.0 2022-05-09 10:36:19 +00:00
Renovate Bot
8956fe6033 Update dependency just-kebab-case to v4.0.2 2022-05-09 01:55:44 +00:00
renovate[bot]
246bd17228 Update docker/build-push-action action to v3 (#166)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-07 11:51:16 +02:00
renovate[bot]
2d7d6bff3d Update docker/login-action action to v2 (#167)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-07 11:51:06 +02:00
Renovate Bot
fc8646041e Update module github.com/aws/aws-sdk-go to v1.44.9 2022-05-06 22:50:02 +00:00
Renovate Bot
c6ba604d61 Update module github.com/aws/aws-sdk-go to v1.44.8 2022-05-05 22:20:51 +00:00
renovate[bot]
7916149cf2 Update docker/metadata-action action to v4 (#165)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-05 16:55:42 +02:00
Renovate Bot
d96a090538 Update module github.com/aws/aws-sdk-go to v1.44.7 2022-05-04 22:39:43 +00:00
Renovate Bot
6755602bb5 Update module github.com/aws/aws-sdk-go to v1.44.6 2022-05-04 02:08:09 +00:00
Renovate Bot
5ef778271e Update typescript-eslint monorepo to v5.22.0 2022-05-03 06:32:01 +00:00
Renovate Bot
62f0ac2f38 Update dependency @types/jest to v27.5.0 2022-05-03 03:16:20 +00:00
renovate[bot]
c222674b47 Update module github.com/aws/aws-sdk-go to v1.44.5 (#158)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 23:36:02 +02:00
renovate[bot]
67901ef8dc Update dependency core-js to v3.22.4 (#157)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 23:35:54 +02:00
renovate[bot]
8be35384e1 Update dependency vuetify to v2.6.5 (#156)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 23:35:42 +02:00
Renovate Bot
b0707c0213 Update dependency @types/luxon to v2.3.2 2022-05-02 21:01:48 +00:00
renovate[bot]
58f20c5b1a Update typescript-eslint monorepo to v5 (#151)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 16:44:17 +02:00
renovate[bot]
60c32433a4 Update dependency yaml to v2 (#150)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 16:44:07 +02:00
renovate[bot]
86bc9b779c Update dependency vue-cropperjs to v5 (#148)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 10:50:45 +02:00
renovate[bot]
5d9f790002 Update dependency less-loader to v10 (#143)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 00:35:28 +02:00
renovate[bot]
70169b70aa Update dependency luxon to v2 (#144)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 00:35:14 +02:00
renovate[bot]
65833dfd52 Update dependency just-kebab-case to v4 (#140)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-02 00:22:01 +02:00
renovate[bot]
cefa556f79 Update module github.com/aws/aws-sdk-go to v1.44.4 (#141)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-01 23:46:35 +02:00
renovate[bot]
8489a2d8ff Update dependency less to v4 (#142)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-01 23:46:08 +02:00
renovate[bot]
8a275fb6b9 Update dependency eslint-plugin-jest to v26 (#137)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-01 23:33:29 +02:00
renovate[bot]
4bfdbffbeb Update dependency json-schema-editor-vue to v2 (#139)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-05-01 23:33:09 +02:00
renovate[bot]
2b60558abb Update dependency @vue/eslint-config-typescript to v10 (#133)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 11:37:20 +02:00
renovate[bot]
24b9f54cc5 Update dependency @types/jest to v27 (#132)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 11:37:03 +02:00
renovate[bot]
11f4882ad4 Update dependency @testing-library/vue to v6 (#131)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:40:25 +02:00
renovate[bot]
5f8845d02a Update dependency typescript to v4.6.4 (#130)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:39:23 +02:00
renovate[bot]
43a136137c Update dependency vue-cli-plugin-vuetify to v2.4.8 (#127)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:12:50 +02:00
renovate[bot]
d45ffc5ec4 Update module github.com/aws/aws-sdk-go to v1.44.3 (#128)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:12:34 +02:00
renovate[bot]
6b21a283d4 Update dependency @mdi/font to v6 (#129)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:12:25 +02:00
renovate[bot]
f934516908 Update dependency splitpanes to v2.4.1 (#124)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-29 00:12:17 +02:00
Renovate Bot
1c20ed9552 Update dependency ajv to v8.11.0 2022-04-28 20:04:45 +00:00
renovate[bot]
fbcc3e1943 Update dependency antlr4 to v4.10.1 (#121)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-28 19:55:26 +02:00
renovate[bot]
1fa4b9f613 Update dependency core-js to v3.22.3 (#123)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-28 19:55:01 +02:00
renovate[bot]
48b3f877ee Update dependency typescript to v4.6.3 (#125)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-28 19:54:47 +02:00
renovate[bot]
5bd4a9db2d Update golang.org/x/exp digest to 39d4317 (#126)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-28 19:54:33 +02:00
Renovate Bot
785a47f2b9 Update dependency @vue/test-utils to v1.3.0 2022-04-28 17:02:30 +00:00
Renovate Bot
28c6136d80 Update dependency @types/prismjs to v1.26.0 2022-04-28 14:56:46 +00:00
Renovate Bot
ebcb6dc4a2 Update vue monorepo 2022-04-28 12:38:43 +00:00
Renovate Bot
48324c7d1d Update module github.com/aws/aws-sdk-go to v1.44.2 2022-04-28 11:45:47 +00:00
Renovate Bot
47303dec84 Update dependency vue-axios to v3.4.1 2022-04-28 05:54:15 +00:00
Renovate Bot
b757e5b7ed Update dependency @types/lodash to v4.14.182 2022-04-28 03:22:42 +00:00
Jonas Plum
89a33bc8f8 Remove renovate schedule 2022-04-28 02:14:25 +02:00
Jonas Plum
f0d9e43414 Automerge npm minor changes 2022-04-28 02:08:19 +02:00
renovate[bot]
c98b11d9e8 Update npm (#62)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-28 02:04:14 +02:00
Jonas Plum
41e4b091f8 Setup cypress (#112) 2022-04-28 01:04:54 +02:00
Renovate Bot
951c968694 Update module github.com/aws/aws-sdk-go to v1.44.1 2022-04-27 01:16:12 +00:00
Renovate Bot
a7dca29c19 Update golang.org/x/exp digest to 3bcf042 2022-04-26 22:41:45 +00:00
Jonas Plum
6f6c615d16 Disable renovate dependency dashboard 2022-04-26 21:25:36 +02:00
Jonas Plum
2227f85db5 Auto merge digest updates 2022-04-26 21:23:58 +02:00
Jonas Plum
c4b32e22ae Fix multiple comments (#109) 2022-04-26 01:09:02 +02:00
renovate[bot]
373cc52c05 Update module github.com/aws/aws-sdk-go to v1.44.0 (#108)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-26 00:31:24 +02:00
Renovate Bot
7c75bff214 Update module github.com/aws/aws-sdk-go to v1.43.45 2022-04-23 00:45:19 +00:00
Renovate Bot
252b4bde2d Update module github.com/aws/aws-sdk-go to v1.43.44 2022-04-21 22:04:21 +00:00
Renovate Bot
0fb30ca51f Update module github.com/aws/aws-sdk-go to v1.43.43 2022-04-21 01:24:23 +00:00
Renovate Bot
ec3774d5aa Update module github.com/tidwall/gjson to v1.14.1 2022-04-20 05:27:58 +00:00
Renovate Bot
dd259d56a4 Update module github.com/aws/aws-sdk-go to v1.43.42 2022-04-20 01:00:46 +00:00
Renovate Bot
352c4ee7a0 Update module github.com/go-chi/cors to v1.2.1 2022-04-19 21:37:44 +00:00
Renovate Bot
3b9d37bbc9 Update module github.com/aws/aws-sdk-go to v1.43.41 2022-04-15 20:55:10 +00:00
Renovate Bot
61e0d40e5a Update module github.com/aws/aws-sdk-go to v1.43.40 2022-04-15 00:00:59 +00:00
renovate[bot]
3865d760ba Update golang.org/x/exp digest to bcd2187 (#94)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-14 23:09:54 +02:00
Renovate Bot
d2e7048d02 Update module github.com/aws/aws-sdk-go to v1.43.39 2022-04-13 21:45:28 +00:00
Renovate Bot
b914b58b23 Update module github.com/aws/aws-sdk-go to v1.43.38 2022-04-12 22:01:09 +00:00
renovate[bot]
2011dae77c Update golang.org/x/oauth2 digest to 9780585 (#88)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-12 06:50:56 +02:00
Renovate Bot
3389389881 Update module github.com/aws/aws-sdk-go to v1.43.37 2022-04-12 03:16:38 +00:00
Renovate Bot
549274083b Update module github.com/aws/aws-sdk-go to v1.43.36 2022-04-08 22:46:50 +00:00
renovate[bot]
0dfe1ddf6a Update golang.org/x/exp digest to 7b9b53b (#82)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-08 17:05:18 +02:00
Renovate Bot
986860e054 Update module github.com/aws/aws-sdk-go to v1.43.35 2022-04-07 23:08:40 +00:00
Renovate Bot
9ea7c8db21 Update module github.com/aws/aws-sdk-go to v1.43.34 2022-04-06 21:57:38 +00:00
Renovate Bot
f8d30fc8a8 Update module github.com/tus/tusd to v1.9.0 2022-04-06 01:42:29 +00:00
Renovate Bot
9a7326cd0a Update module github.com/aws/aws-sdk-go to v1.43.33 2022-04-05 22:46:51 +00:00
renovate[bot]
de61f2385f Update codecov/codecov-action action to v3 (#78)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-04-05 18:19:49 +02:00
Renovate Bot
2a21492f96 Update module github.com/aws/aws-sdk-go to v1.43.32 2022-04-04 22:04:46 +00:00
Renovate Bot
4895e0e8ba Update module github.com/aws/aws-sdk-go to v1.43.31 2022-04-01 23:26:00 +00:00
Renovate Bot
77dba21d63 Update module github.com/aws/aws-sdk-go to v1.43.30 2022-03-31 21:58:29 +00:00
Jonas Plum
dee2827e3f Add gocap check (#73) 2022-03-31 21:43:18 +02:00
Renovate Bot
3c50d4608e Update module github.com/aws/aws-sdk-go to v1.43.29 2022-03-31 02:14:17 +00:00
Renovate Bot
d58182dd9f Update module github.com/blevesearch/bleve/v2 to v2.3.2 2022-03-29 22:14:34 +00:00
renovate[bot]
3c9d98b4ef Update module github.com/tidwall/gjson to v1.14.0 (#61)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:03:27 +02:00
renovate[bot]
b6bb875af4 Update actions/upload-artifact action to v3 (#67)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:02:35 +02:00
renovate[bot]
7627d187c8 Update actions/checkout action to v3 (#64)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:02:23 +02:00
renovate[bot]
a55bba4d0c Update actions/download-artifact action to v3 (#65)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:02:11 +02:00
renovate[bot]
187d3eb8fd Update actions/setup-node action to v3 (#66)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:02:03 +02:00
renovate[bot]
4eb0e95032 Update actions/cache action to v3 (#63)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 22:01:45 +02:00
Renovate Bot
84eb1b07cd Update module github.com/aws/aws-sdk-go to v1.43.28 2022-03-29 19:48:42 +00:00
Renovate Bot
4dcb2e5c7b Update module github.com/arangodb/go-driver to v1.3.1 2022-03-29 19:38:26 +00:00
Renovate Bot
86f6cd72aa Update module github.com/alecthomas/kong to v0.5.0 2022-03-29 19:26:25 +00:00
renovate[bot]
3da58b6eee Update golang.org/x/oauth2 digest to 6242fa9 (#54)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 16:42:31 +02:00
Renovate Bot
19adc38247 Update module github.com/stretchr/testify to v1.7.1 2022-03-29 14:23:01 +00:00
renovate[bot]
f15ce29d7e Update github.com/antlr/antlr4/runtime/Go/antlr digest to 97c793e (#51)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 15:58:03 +02:00
renovate[bot]
7bf9a03eec Update golang.org/x/exp digest to 053ad81 (#52)
Co-authored-by: Renovate Bot <bot@renovateapp.com>
2022-03-29 15:57:48 +02:00
Jonas Plum
3fe863a735 Add codecov test (#50) 2022-03-29 15:55:53 +02:00
Jonas Plum
9c8ed2a089 Setup renovate (#49) 2022-03-29 15:49:14 +02:00
Jonas Plum
2158899983 Improve user info (#47) 2022-03-20 12:57:15 +01:00
Jonas Plum
68618d2bdb Setup CI cache (#46) 2022-03-20 03:40:42 +01:00
Jonas Plum
2bad1f5f28 Migrate to Go 1.18 (#45)
* Migrate to Go 1.18 and add linters
2022-03-20 03:17:18 +01:00
Jonas Plum
03a4806d45 Fix timeformat (#44) 2022-03-19 14:26:36 +01:00
Jonas Plum
e6baead486 Fix routing (#43) 2022-03-19 13:41:34 +01:00
Jonas Plum
3618f9784d Fix home screen (#42) 2022-03-16 00:27:46 +01:00
Jonas Plum
02c7da91da Add Dashboards (#41) 2022-03-14 00:23:29 +01:00
Jonas Plum
18a4dc54e7 Add global settings (#40) 2022-03-13 13:45:10 +01:00
Jonas Plum
86daadc73d Cleanup content (#39) 2022-03-12 22:45:58 +01:00
Jonas Plum
687be41994 Fix artifact enrichment (#38) 2022-03-12 22:08:51 +01:00
Jonas Plum
d353268cf2 Update generator (#37) 2022-03-12 21:09:10 +01:00
Jonas Plum
eced5df7c8 Add admin user config (#36) 2022-03-01 01:01:59 +01:00
Jonas Plum
ffba7b4f5f Add test for jobs (#34) 2022-02-27 18:33:50 +01:00
Jonas Plum
fd18458f3d Remove TheHive automation (#35) 2022-02-27 15:31:17 +01:00
Jonas Plum
338aba8342 Check input schema (#33) 2022-02-27 12:25:41 +01:00
Jonas Plum
54312893a2 Fix dashboard redirect (#32) 2022-02-17 21:51:54 +01:00
Jonas Plum
599b9d3c6e Set network (#31)
* Set network for automation docker container
2022-02-13 20:44:16 +01:00
Jonas Plum
2d817318f2 Better file api (#30)
* Better file api
2022-01-23 04:27:31 +01:00
Jonas Plum
8ca57c14d9 Add handbook links (#29)
* Add handbook links
2022-01-20 11:12:29 +01:00
Jonas Plum
ad02113d99 Improve Job UI (#28) 2022-01-20 10:59:57 +01:00
Jonas Plum
6fbc6f9837 Autocreate links to other tickets (#27) 2022-01-20 10:18:45 +01:00
Jonas Plum
8e8a7ec2a2 Create three panel view (#26)
* Create three panel view
2022-01-20 10:08:37 +01:00
Jonas Plum
a152b6663b Add breadcrumb menu (#25)
* Add breadcrumbs
2022-01-19 12:05:06 +01:00
Jonas Plum
5653ecb5b0 Add timeformat edit (#6) 2022-01-19 01:30:27 +01:00
Jonas Plum
27da64a470 Change license (#5) 2022-01-19 01:00:50 +01:00
Jonas Plum
8333ea88a8 Change code generator (#4)
* Change code generator
* Remove gin
2022-01-08 00:48:44 +01:00
Jonas Plum
b5dd0cfacd Improve bus (#3)
* Improve bus
* Add ticket log
2021-12-27 19:08:07 +01:00
Jonas Plum
1fade14ba5 Mock time (#2) 2021-12-27 00:17:44 +01:00
Jonas Plum
0286574692 Add backup and restore test (#1)
* Add backup and restore test
* Update arango binaries
2021-12-24 00:22:08 +01:00
Jonas Plum
c27e61b875 Add social media links 2021-12-20 17:18:23 +01:00
278 changed files with 20750 additions and 39720 deletions

36
.github/renovate.json vendored Normal file
View File

@@ -0,0 +1,36 @@
{
"extends": [
"config:base"
],
"packageRules": [
{
"datasources": [
"go"
],
"extends": [
":automergeDigest",
":automergeMinor",
":automergePr"
],
"postUpdateOptions": [
"gomodTidy"
]
},
{
"datasources": [
"npm"
],
"extends": [
":automergeDigest",
":automergeMinor",
":automergePr"
]
}
],
"assignees": [
"cugu"
],
"ignoreDeps": [
"sass-loader"
]
}

18
.github/stale.yml vendored Normal file
View File

@@ -0,0 +1,18 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- feature
- bug
- enhancement
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@@ -9,69 +9,139 @@ env:
IMAGE_NAME: ${{ github.repository }}
jobs:
test:
name: Test
lint:
name: Lint
runs-on: ubuntu-latest
env: { GIN_MODE: test }
steps:
- uses: actions/setup-go@v2
with: { go-version: '1.17' }
- uses: actions/setup-node@v2
with: { node-version: '14' }
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with: { go-version: '1.19', cache: true }
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: docker-compose up -d
- uses: golangci/golangci-lint-action@v3
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with: { node-version: '14', cache: 'yarn', cache-dependency-path: 'ui/yarn.lock' }
- uses: actions/setup-go@v3
with: { go-version: '1.19', cache: true }
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: docker compose -f docker-compose-with-keycloak.yml up --quiet-pull --detach
working-directory: dev
- name: Install ArangoDB
run: |
curl -OL https://download.arangodb.com/arangodb34/DEBIAN/Release.key
curl -OL https://download.arangodb.com/arangodb38/DEBIAN/Release.key
sudo apt-key add Release.key
sudo apt-add-repository 'deb https://download.arangodb.com/arangodb34/DEBIAN/ /'
sudo apt-add-repository 'deb https://download.arangodb.com/arangodb38/DEBIAN/ /'
sudo apt-get update -y && sudo apt-get -y install arangodb3
- run: go test -coverprofile=cover.out -coverpkg=./... ./...
- run: go tool cover -func=cover.out
- uses: codecov/codecov-action@v3
cypress:
strategy:
matrix:
test: [ tickets, templates, playbooks ]
auth: [ keycloak ] # simple
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with: { go-version: '1.18' }
- uses: actions/setup-node@v3
with: { node-version: '14' }
# run UI
- run: |
yarn install
yarn serve &
working-directory: ui
- run: curl --head -X GET --retry 60 --retry-connrefused --retry-delay 10 http://localhost:8080
# run containers
- run: |
sed -i 's/host.docker.internal/172.17.0.1/g' dev/nginx.conf
sed -i 's/host.docker.internal/172.17.0.1/g' dev/nginx-with-keycloak.conf
- run: docker compose up --quiet-pull --detach
working-directory: dev
if: matrix.auth == 'simple'
- run: docker compose -f docker-compose-with-keycloak.yml up --quiet-pull --detach
working-directory: dev
if: matrix.auth == 'keycloak'
- run: curl --head -X GET --retry 60 --retry-connrefused --retry-delay 10 http://localhost:9002/auth/realms/catalyst
if: matrix.auth == 'keycloak'
# run catalyst
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: go mod download
- run: bash start_dev.sh &
working-directory: dev
if: matrix.auth == 'simple'
- run: bash start_dev_with_keycloak.sh &
working-directory: dev
if: matrix.auth == 'keycloak'
- run: curl --head -X GET --retry 60 --retry-connrefused --retry-delay 10 http://localhost:8000
# run cypress
- uses: cypress-io/github-action@v4
env:
CYPRESS_AUTH: ${{ matrix.auth }}
CYPRESS_TEST: ${{ matrix.test }}
with:
browser: chrome
working-directory: ui
- uses: actions/upload-artifact@v3
if: always() && matrix.auth == 'simple'
with:
name: cypress-videos
path: ui/cypress/videos
retention-days: 1
build-npm:
name: Build npm
runs-on: ubuntu-latest
steps:
- uses: actions/setup-node@v2
with: { node-version: '14' }
- uses: actions/checkout@v2
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with: { node-version: '14', cache: 'yarn', cache-dependency-path: 'ui/yarn.lock' }
- run: yarn install && yarn build
working-directory: ui
- uses: actions/upload-artifact@v2
- uses: actions/upload-artifact@v3
with: { name: ui, path: ui/dist, retention-days: 1 }
build:
if: github.event_name != 'pull_request'
name: Build
runs-on: ubuntu-latest
needs: [ build-npm, test ]
steps:
- uses: actions/setup-go@v2
with: { go-version: '1.17' }
- uses: actions/checkout@v2
- uses: actions/download-artifact@v2
- uses: actions/checkout@v3
- uses: actions/setup-go@v3
with: { go-version: '1.19', cache: true }
- uses: actions/download-artifact@v3
with: { name: ui, path: ui/dist }
- name: Version
if: github.ref_type == 'tag' && github.ref_name != ''
run: |
echo ${{ github.ref_name }}
echo ${{ github.ref_name }} > VERSION
- run: go build -o catalyst ./cmd/catalyst/.
- uses: docker/login-action@v1
- uses: docker/login-action@v2
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Version
if: ${{ github.ref != '' }}
run: |
echo ${{ github.ref_name }}
echo ${{ github.ref_name }} > VERSION
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v3
uses: docker/metadata-action@v4
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- uses: docker/build-push-action@v2
- uses: docker/build-push-action@v3
with:
context: .
push: true

121
.golangci.yml Normal file
View File

@@ -0,0 +1,121 @@
run:
go: "1.19"
timeout: 5m
skip-dirs:
- generated
- internal
linters:
enable:
- asciicheck
- containedctx
- decorder
- depguard
- dogsled
- durationcheck
- errchkjson
- errname
- errorlint
- exhaustive
- exportloopref
- forbidigo
- forcetypeassert
- gci
- gocritic
- godot
- gofmt
- gofumpt
- goheader
- goimports
- gomodguard
- goprintffuncname
- gosec
- grouper
- importas
- ireturn
- misspell
- nakedret
- nilnil
- nlreturn
- nolintlint
- paralleltest
- predeclared
- promlinter
- revive
- tenv
- thelper
- unconvert
- whitespace
disable:
# go 1.18
- bodyclose
- contextcheck
- gosimple
- ifshort
- nilerr
- noctx
- rowserrcheck
- sqlclosecheck
- staticcheck
- stylecheck
- tparallel
- unparam
- unused
- wastedassign
# complexity
- cyclop
- gocognit
- gocyclo
- maintidx
- nestif
# disable
- dupl
- exhaustivestruct
- funlen
- gochecknoglobals
- gochecknoinits
- goconst
- godox
- goerr113
- gomnd
- gomoddirectives
- lll
- makezero
- prealloc
- structcheck
- tagliatelle
- testpackage
- varnamelen
- wrapcheck
- wsl
linters-settings:
gci:
sections:
- standard
- default
- prefix(github.com/SecurityBrewery/catalyst)
ireturn:
allow:
- error
- context.Context
- go-driver.Cursor
- go-driver.Collection
- go-driver.Database
- go-driver.Client
- chi.Router
issues:
exclude-rules:
- path: caql
text: "var-naming: don't use underscores"
- path: database/user.go
text: "G404"
linters: [ gosec ]
- path: caql/function.go
text: "G404"
linters: [ gosec ]
- path: caql
linters: [ forcetypeassert ]
- text: github.com/go-chi/chi/v5.Router
linters: [ ireturn ]

661
LICENSE Normal file
View File

@@ -0,0 +1,661 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

View File

@@ -1,104 +0,0 @@
Copyright (c) 2021-present Jonas Plum
Portions of this software are licensed as follows:
* All third party components incorporated into Catalyst are licensed under the
original license provided by the owner of the applicable component. Those
files contain a license notice on top of the file and are listed in the
[NOTICE](NOTICE) file.
* Content outside of the above mentioned files above is
available under the "Elastic License 2.0" license as defined below.
# Elastic License 2.0
URL: https://www.elastic.co/licensing/elastic-license
## Acceptance
By using the software, you agree to all of the terms and conditions below.
## Copyright License
The licensor grants you a non-exclusive, royalty-free, worldwide,
non-sublicensable, non-transferable license to use, copy, distribute, make
available, and prepare derivative works of the software, in each case subject to
the limitations and conditions below.
## Limitations
You may not provide the software to third parties as a hosted or managed
service, where the service provides users with access to any substantial set of
the features or functionality of the software.
You may not move, change, disable, or circumvent the license key functionality
in the software, and you may not remove or obscure any functionality in the
software that is protected by the license key.
You may not alter, remove, or obscure any licensing, copyright, or other notices
of the licensor in the software. Any use of the licensors trademarks is subject
to applicable law.
## Patents
The licensor grants you a license, under any patent claims the licensor can
license, or becomes able to license, to make, have made, use, sell, offer for
sale, import and have imported the software, in each case subject to the
limitations and conditions in this license. This license does not cover any
patent claims that you cause to be infringed by modifications or additions to
the software. If you or your company make any written claim that the software
infringes or contributes to infringement of any patent, your patent license for
the software granted under these terms ends immediately. If your company makes
such a claim, your patent license ends immediately for work on behalf of your
company.
## Notices
You must ensure that anyone who gets a copy of any part of the software from you
also gets a copy of these terms.
If you modify the software, you must include in any modified copies of the
software prominent notices stating that you have modified the software.
## No Other Rights
These terms do not imply any licenses other than those expressly granted in
these terms.
## Termination
If you use the software in violation of these terms, such use is not licensed,
and your licenses will automatically terminate. If the licensor provides you
with a notice of your violation, and you cease all violation of this license no
later than 30 days after you receive that notice, your licenses will be
reinstated retroactively. However, if you violate these terms after such
reinstatement, any additional violation of these terms will cause your licenses
to terminate automatically and permanently.
## No Liability
*As far as the law allows, the software comes as is, without any warranty or
condition, and the licensor will not be liable to you for any damages arising
out of these terms or the use or nature of the software, under any kind of
legal claim.*
## Definitions
The **licensor** is the entity offering these terms, and the **software** is the
software the licensor makes available under these terms, including any portion
of it.
**you** refers to the individual or entity agreeing to these terms.
**your company** is any legal entity, sole proprietorship, or other kind of
organization that you work for, plus all organizations that have control over,
are under the control of, or are under common control with that
organization. **control** means ownership of substantially all the assets of an
entity, or the power to direct its management and policies by vote, contract, or
otherwise. Control can be direct or indirect.
**your licenses** are all the licenses granted to you for the software under
these terms.
**use** means anything you do with the software requiring one of your licenses.
**trademark** means trademarks, service marks, and similar rights.

View File

@@ -7,8 +7,14 @@
-
<a href="https://catalyst-soar.com/docs/category/catalyst-handbook">The Catalyst Handbook (Documentation)</a>
-
<a href="https://try.catalyst-soar.com">Try online</a> (user: bob, password: bob)
<a href="https://try.catalyst-soar.com">Try online</a>
</h4>
<h4 align="center">
<a href="https://twitter.com/securitybrewery">Twitter</a>
-
<a href="https://discord.gg/nrmpveWvZX">Discord</a>
</h4>
Catalyst is an incident response platform or SOAR (Security Orchestration, Automation and Response) system. It can help
you to automate your alert handling and incident response procedures.
@@ -23,7 +29,7 @@ Tickets are the core of Catalyst. They represent alerts, incidents, forensics
investigations, threat hunts or any other event you want to handle in your
organisation.
## Ticket Templates
### Ticket Templates
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/details.png" />
@@ -34,7 +40,7 @@ tickets like title, creation date or closing status is kept quite minimal and ot
information like criticality, description or MITRE ATT&CK information can be
added individually.
## Conditional Custom Fields
### Conditional Custom Fields
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/conditional_custom_field_a.png" />
@@ -46,7 +52,7 @@ Custom Fields can be dependent on each other. So if you, for example choose
ransomware, worm, etc. which a "phishing" incident would ask for the number
of received mails in that campaign.
## Playbooks
### Playbooks
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/phishing_playbook.png" />
@@ -56,7 +62,7 @@ Playbooks represent processes that can be attached to tickets. Playbooks can
contain manual and automated tasks. Complex workflows with different workflow
branches, parallel tasks and task dependencies can be modeled.
## Automations
### Automations
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/script.png" />
@@ -66,13 +72,31 @@ Automations are scripts that automate tasks or enrich artifacts. Automations are
run in their own Docker containers. This enables them to be created in different
scripting languages and run securely in their own environment.
## Users
### Dashboards
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/roles.png" />
<img alt="Screenshot of the dashboard editor" src="docs/screenshots/dashboard.png" />
</center>
Catalyst comes with a dashboard editor that allows you to create custom dashboards
for your organisation. Dashboards can be created with line, bar, and pie charts.
### Users
Catalyst has two different types of users, normal users accessing the platform
via OIDC authentication and API keys for external script. A
fine-grained access model is available for both types and allows to define
possible actions for each user.
## License
Copyright (c) 2021-present Jonas Plum
Portions of this software are licensed as follows:
* All third party components incorporated into Catalyst are licensed under the
original license provided by the owner of the applicable component. Those
files contain a license notice on top of the file and are listed in the
[NOTICE](NOTICE) file.
* Content outside the above-mentioned files above is
available under the [GNU Affero General Public License v3.0](LICENSE).

435
auth.go
View File

@@ -3,401 +3,106 @@ package catalyst
import (
"context"
"crypto/sha256"
"encoding/base64"
"errors"
"fmt"
"log"
"math/rand"
"net/http"
"strings"
"time"
"github.com/coreos/go-oidc/v3/oidc"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"golang.org/x/oauth2"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type AuthConfig struct {
OIDCIssuer string
OAuth2 *oauth2.Config
OIDCClaimUsername string
OIDCClaimEmail string
// OIDCClaimGroups string
OIDCClaimName string
AuthBlockNew bool
AuthDefaultRoles []role.Role
provider *oidc.Provider
type catalystResolver struct {
database *database.Database
}
func (c *AuthConfig) Verifier(ctx context.Context) (*oidc.IDTokenVerifier, error) {
if c.provider == nil {
err := c.Load(ctx)
func newCatalystResolver(db *database.Database) *catalystResolver {
return &catalystResolver{
database: db,
}
}
func (c *catalystResolver) UserCreateIfNotExists(ctx context.Context, user *maut.User, password string) (err error) {
if user != nil {
if _, err := c.database.UserGet(ctx, user.ID); err == nil {
return nil
}
}
if user == nil || user.APIKey {
_, err = c.database.UserCreateSetupAPIKey(ctx, password)
} else {
_, err = c.database.UserCreate(ctx, &model.UserForm{
Apikey: user.APIKey,
Blocked: user.Blocked,
ID: user.ID,
Password: &password,
Roles: user.Roles,
})
if err != nil {
return nil, err
return err
}
err = c.database.UserDataCreate(ctx, user.ID, &model.UserData{
Email: user.Email,
Image: nil,
Name: user.Name,
Timeformat: nil,
})
}
return c.provider.Verifier(&oidc.Config{SkipClientIDCheck: true}), nil
return err
}
func (c *AuthConfig) Load(ctx context.Context) error {
provider, err := oidc.NewProvider(ctx, c.OIDCIssuer)
func (c *catalystResolver) User(ctx context.Context, userID string) (*maut.User, error) {
user, err := c.database.UserGet(ctx, userID)
if err != nil {
return err
return nil, err
}
c.provider = provider
c.OAuth2.Endpoint = provider.Endpoint()
return nil
return mapMautUser(user), nil
}
const (
SessionName = "catalyst-session"
stateSession = "state"
userSession = "user"
)
func Authenticate(db *database.Database, config *AuthConfig) gin.HandlerFunc {
return func(ctx *gin.Context) {
iss := config.OIDCIssuer
keyHeader := ctx.Request.Header.Get("PRIVATE-TOKEN")
if keyHeader != "" {
keyAuth(db, keyHeader)(ctx)
return
}
authHeader := ctx.Request.Header.Get("User")
if authHeader != "" {
bearerAuth(db, authHeader, iss, config)(ctx)
return
}
sessionAuth(db, config)(ctx)
}
}
func oidcCtx(ctx *gin.Context) (context.Context, context.CancelFunc) {
/*
if config.TLSCertFile != "" && config.TLSKeyFile != "" {
cert, err := tls.LoadX509KeyPair(config.TLSCertFile, config.TLSKeyFile)
if err != nil {
return nil, err
}
rootCAs, _ := x509.SystemCertPool()
if rootCAs == nil {
rootCAs = x509.NewCertPool()
}
for _, c := range cert.Certificate {
rootCAs.AppendCertsFromPEM(c)
}
return oidc.ClientContext(ctx, &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
RootCAs: rootCAs,
InsecureSkipVerify: true,
},
},
}), nil
}
*/
cctx, cancel := context.WithTimeout(ctx, time.Minute)
return cctx, cancel
}
func bearerAuth(db *database.Database, authHeader string, iss string, config *AuthConfig) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
if !strings.HasPrefix(authHeader, "Bearer ") {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "no bearer token"})
return
}
oidcCtx, cancel := oidcCtx(ctx)
defer cancel()
verifier, err := config.Verifier(oidcCtx)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "could not verify: " + err.Error()})
return
}
authToken, err := verifier.Verify(oidcCtx, authHeader[7:])
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not verify bearer token: %v", err)})
return
}
var claims map[string]interface{}
if err := authToken.Claims(&claims); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("failed to parse claims: %v", err)})
return
}
// if claims.Iss != iss {
// ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "wrong issuer"})
// return
// }
session := sessions.Default(ctx)
session.Set(userSession, claims)
if err = session.Save(); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, fmt.Sprintf("could not set session: %v", err))
return
}
if err = setContextClaims(ctx, db, claims, config); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not load user: %s", err)})
return
}
ctx.Next()
}
}
func keyAuth(db *database.Database, keyHeader string) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
h := fmt.Sprintf("%x", sha256.Sum256([]byte(keyHeader)))
key, err := db.UserByHash(ctx, h)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not verify private token: %v", err)})
return
}
setContextUser(ctx, key, db.Hooks)
ctx.Next()
}
}
func sessionAuth(db *database.Database, config *AuthConfig) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
session := sessions.Default(ctx)
user := session.Get(userSession)
if user == nil {
redirectToLogin(ctx, session, config.OAuth2)
return
}
claims, ok := user.(map[string]interface{})
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "claims not in session"})
return
}
if err := setContextClaims(ctx, db, claims, config); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not load user: %s", err)})
return
}
ctx.Next()
}
}
func setContextClaims(ctx *gin.Context, db *database.Database, claims map[string]interface{}, config *AuthConfig) error {
newUser, newSetting, err := mapUserAndSettings(claims, config)
func (c *catalystResolver) UserAPIKeyByHash(ctx context.Context, key string) (*maut.User, error) {
sha256Hash := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
user, err := c.database.UserAPIKeyByHash(ctx, sha256Hash)
if err != nil {
return err
return nil, err
}
if _, ok := busdb.UserFromContext(ctx); !ok {
busdb.SetContext(ctx, &models.UserResponse{ID: "auth", Roles: []string{role.Admin}, Apikey: false, Blocked: false})
}
return mapMautUser(user), nil
}
user, err := db.UserGetOrCreate(ctx, newUser)
func (c *catalystResolver) UserByIDAndPassword(ctx context.Context, username string, password string) (*maut.User, error) {
user, err := c.database.UserByIDAndPassword(ctx, username, password)
if err != nil {
return err
return nil, err
}
_, err = db.UserDataGetOrCreate(ctx, newUser.ID, newSetting)
if err != nil {
return err
}
setContextUser(ctx, user, db.Hooks)
return nil
return mapMautUser(user), nil
}
func setContextUser(ctx *gin.Context, user *models.UserResponse, hooks *hooks.Hooks) {
groups, err := hooks.GetGroups(ctx, user.ID)
if err == nil {
busdb.SetGroupContext(ctx, groups)
func (c *catalystResolver) Role(ctx context.Context, roleID string) (r *maut.Role, err error) {
switch roleID {
case "admin":
return Admin, nil
case "engineer":
return engineer, nil
case "analyst":
return analyst, nil
}
busdb.SetContext(ctx, user)
return nil, errors.New("role not found")
}
func mapUserAndSettings(claims map[string]interface{}, config *AuthConfig) (*models.UserForm, *models.UserData, error) {
// handle Bearer tokens
// if typ, ok := claims["typ"]; ok && typ == "Bearer" {
// return &models.User{
// Username: "bot",
// Blocked: false,
// Email: pointer.String("bot@example.org"),
// Roles: []string{"user:read", "settings:read", "ticket", "backup:read", "backup:restore"},
// Name: pointer.String("Bot"),
// }, nil
// }
username, err := getString(claims, config.OIDCClaimUsername)
if err != nil {
return nil, nil, err
}
email, err := getString(claims, config.OIDCClaimEmail)
if err != nil {
email = ""
}
name, err := getString(claims, config.OIDCClaimName)
if err != nil {
name = ""
}
return &models.UserForm{
ID: username,
Blocked: config.AuthBlockNew,
Roles: role.Strings(config.AuthDefaultRoles),
}, &models.UserData{
Email: &email,
Name: &name,
}, nil
}
func getString(m map[string]interface{}, key string) (string, error) {
if v, ok := m[key]; ok {
if s, ok := v.(string); ok {
return s, nil
}
return "", fmt.Errorf("mapping of %s failed, wrong type (%T)", key, v)
}
return "", fmt.Errorf("mapping of %s failed, missing value", key)
}
func redirectToLogin(ctx *gin.Context, session sessions.Session, oauth2Config *oauth2.Config) {
state, err := state()
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "generating state failed"})
return
}
session.Set(stateSession, state)
err = session.Save()
if err != nil {
log.Println(err)
}
ctx.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
log.Println("abort", ctx.Request.URL.String())
ctx.Abort()
}
func AuthorizeBlockedUser(ctx *gin.Context) {
user, ok := busdb.UserFromContext(ctx)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "no user in context"})
return
}
if user.Blocked {
ctx.AbortWithStatusJSON(http.StatusForbidden, gin.H{"error": "user is blocked"})
return
}
ctx.Next()
}
func AuthorizeRole(roles []role.Role) gin.HandlerFunc {
return func(ctx *gin.Context) {
user, ok := busdb.UserFromContext(ctx)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "no user in context"})
return
}
if !role.UserHasRoles(user, roles) {
ctx.AbortWithStatusJSON(http.StatusForbidden, gin.H{"error": fmt.Sprintf("missing role %s has %s", roles, user.Roles)})
return
}
ctx.Next()
func mapMautUser(user *model.UserResponse) *maut.User {
return &maut.User{
ID: user.ID,
APIKey: user.Apikey,
Blocked: user.Blocked,
// Email: user.Email, // TODO
// Groups: user.Groups, // TODO
// Name: user.Name, // TODO
Roles: user.Roles,
}
}
func callback(config *AuthConfig) gin.HandlerFunc {
return func(ctx *gin.Context) {
session := sessions.Default(ctx)
state := session.Get(stateSession)
if state == "" {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "state missing"})
return
}
if state != ctx.Query("state") {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "state mismatch"})
return
}
oauth2Token, err := config.OAuth2.Exchange(ctx, ctx.Query("code"))
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": gin.H{"error": fmt.Sprintf("oauth2 exchange failed: %s", err)}})
return
}
// Extract the ID Token from OAuth2 token.
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "missing id token"})
return
}
oidcCtx, cancel := oidcCtx(ctx)
defer cancel()
verifier, err := config.Verifier(oidcCtx)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "could not verify: " + err.Error()})
return
}
// Parse and verify ID Token payload.
idToken, err := verifier.Verify(oidcCtx, rawIDToken)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "token verification failed: " + err.Error()})
return
}
// Extract custom claims
var claims map[string]interface{}
if err := idToken.Claims(&claims); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "claim extraction failed"})
return
}
session.Set(userSession, claims)
err = session.Save()
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not save session %s", err)})
return
}
ctx.Redirect(http.StatusFound, "/")
}
}
func state() (string, error) {
rnd := make([]byte, 32)
if _, err := rand.Read(rnd); err != nil {
return "", err
}
return base64.URLEncoding.EncodeToString(rnd), nil
}

View File

@@ -1,26 +0,0 @@
package automation
import (
"context"
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/role"
)
func New(apiurl, apikey string, bus *bus.Bus, db *database.Database) error {
if err := jobAutomation(jobContext(), apiurl, apikey, bus, db); err != nil {
log.Fatal(err)
}
return resultAutomation(bus, db)
}
func jobContext() context.Context {
// TODO: change roles?
bot := &models.UserResponse{ID: "bot", Roles: []string{role.Admin}}
return busdb.UserContext(context.Background(), bot)
}

View File

@@ -1,116 +0,0 @@
package automation
import (
"encoding/json"
"fmt"
"log"
"golang.org/x/net/context"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func jobAutomation(ctx context.Context, apiurl, apikey string, catalystBus *bus.Bus, db *database.Database) error {
return catalystBus.SubscribeJob(func(automationMsg *bus.JobMsg) {
job, err := db.JobCreate(ctx, automationMsg.ID, &models.JobForm{
Automation: automationMsg.Automation,
Payload: automationMsg.Message.Payload,
Origin: automationMsg.Origin,
})
if err != nil {
log.Println(err)
return
}
automation, err := db.AutomationGet(ctx, automationMsg.Automation)
if err != nil {
log.Println(err)
return
}
if automation.Script == "" {
log.Println("automation is empty")
return
}
if automationMsg.Message.Secrets == nil {
automationMsg.Message.Secrets = map[string]string{}
}
automationMsg.Message.Secrets["catalyst_apikey"] = apikey
automationMsg.Message.Secrets["catalyst_apiurl"] = apiurl
scriptMessage, _ := json.Marshal(automationMsg.Message)
containerID, logs, err := createContainer(ctx, automation.Image, automation.Script, string(scriptMessage))
if err != nil {
log.Println(err)
return
}
if _, err := db.JobUpdate(ctx, automationMsg.ID, &models.Job{
Automation: job.Automation,
Container: &containerID,
Origin: job.Origin,
Output: job.Output,
Log: &logs,
Payload: job.Payload,
Status: job.Status,
}); err != nil {
log.Println(err)
return
}
var result map[string]interface{}
stdout, _, err := runDocker(ctx, automationMsg.ID, containerID, db)
if err != nil {
result = map[string]interface{}{"error": fmt.Sprintf("error running script %s %s", err, string(stdout))}
} else {
var data map[string]interface{}
if err := json.Unmarshal(stdout, &data); err != nil {
result = map[string]interface{}{"error": string(stdout)}
} else {
result = data
}
}
if err := catalystBus.PublishResult(automationMsg.Automation, result, automationMsg.Origin); err != nil {
log.Println(err)
}
if err := db.JobComplete(ctx, automationMsg.ID, result); err != nil {
log.Println(err)
return
}
})
}
/*
func getAutomation(automationID string, config *Config) (*models.AutomationResponse, error) {
req, err := http.NewRequest(http.MethodGet, config.CatalystAPIUrl+"/automations/"+automationID, nil)
if err != nil {
return nil, err
}
req.Header.Set("PRIVATE-TOKEN", config.CatalystAPIKey)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
b, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var automation models.AutomationResponse
if err := json.Unmarshal(b, &automation); err != nil {
return nil, err
}
return &automation, nil
}
*/

View File

@@ -1,38 +0,0 @@
package automation
import (
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func resultAutomation(catalystBus *bus.Bus, db *database.Database) error {
return catalystBus.SubscribeResult(func(resultMsg *bus.ResultMsg) {
if resultMsg.Target != nil {
ctx := jobContext()
switch {
case resultMsg.Target.TaskOrigin != nil:
if _, err := db.TaskComplete(
ctx,
resultMsg.Target.TaskOrigin.TicketId,
resultMsg.Target.TaskOrigin.PlaybookId,
resultMsg.Target.TaskOrigin.TaskId,
resultMsg.Data,
); err != nil {
log.Println(err)
}
case resultMsg.Target.ArtifactOrigin != nil:
enrichment := &models.EnrichmentForm{
Data: resultMsg.Data,
Name: resultMsg.Automation,
}
_, err := db.EnrichArtifact(ctx, resultMsg.Target.ArtifactOrigin.TicketId, resultMsg.Target.ArtifactOrigin.Artifact, enrichment)
if err != nil {
log.Println(err)
}
}
}
})
}

View File

@@ -5,7 +5,6 @@ import (
"bytes"
"io"
"io/fs"
"log"
"net/http"
"os"
"os/exec"
@@ -13,20 +12,19 @@ import (
"strings"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/storage"
)
func BackupHandler(catalystStorage *storage.Storage, c *database.Config) gin.HandlerFunc {
return func(context *gin.Context) {
context.Header("Content-Disposition", "attachment; filename=backup.zip")
context.Header("Content-Type", "application/zip")
err := Backup(catalystStorage, c, context.Writer)
func backupHandler(catalystStorage *storage.Storage, c *database.Config) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Disposition", "attachment; filename=backup.zip")
w.Header().Set("Content-Type", "application/zip")
err := Backup(catalystStorage, c, w)
if err != nil {
log.Println(err)
context.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
api.JSONError(w, err)
}
}
}
@@ -43,7 +41,10 @@ func Backup(catalystStorage *storage.Storage, c *database.Config, writer io.Writ
archive := zip.NewWriter(writer)
defer archive.Close()
archive.SetComment(GetVersion())
err := archive.SetComment(GetVersion())
if err != nil {
return err
}
// S3
if err := backupS3(catalystStorage, archive); err != nil {
@@ -59,6 +60,7 @@ func backupS3(catalystStorage *storage.Storage, archive *zip.Writer) error {
if err != nil {
return err
}
for _, bucket := range buckets.Buckets {
objects, err := catalystStorage.S3().ListObjectsV2(&s3.ListObjectsV2Input{
Bucket: bucket.Name,
@@ -87,6 +89,7 @@ func backupS3(catalystStorage *storage.Storage, archive *zip.Writer) error {
}
}
}
return nil
}
@@ -106,6 +109,7 @@ func backupArango(c *database.Config, archive *zip.Writer) error {
func zipDump(dir string, archive *zip.Writer) error {
fsys := os.DirFS(dir)
return fs.WalkDir(fsys, ".", func(p string, d fs.DirEntry, err error) error {
if err != nil {
return err
@@ -128,6 +132,7 @@ func zipDump(dir string, archive *zip.Writer) error {
if _, err := io.Copy(a, f); err != nil {
return err
}
return nil
})
}
@@ -145,5 +150,6 @@ func arangodump(dir string, config *database.Config) error {
"--server.database", name,
}
cmd := exec.Command("arangodump", args...)
return cmd.Run()
}

View File

@@ -1,139 +1,69 @@
package bus
import (
"encoding/json"
"log"
"github.com/arangodb/go-driver"
emitter "github.com/emitter-io/go/v2"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
const (
channelUpdate = "data"
channelJob = "job"
channelResult = "result"
)
type Bus struct {
config *Config
client *emitter.Client
type ResultMsg struct {
Automation string `json:"automation"`
Data map[string]any `json:"data,omitempty"`
Target *model.Origin `json:"target"`
}
type Config struct {
Host string
Key string
resultBusKey string
jobBusKey string
dataBusKey string
APIUrl string
type RequestMsg struct {
IDs []driver.DocumentID `json:"ids"`
Function string `json:"function"`
User string `json:"user"`
}
type JobMsg struct {
ID string `json:"id"`
Automation string `json:"automation"`
Origin *models.Origin `json:"origin"`
Message *models.Message `json:"message"`
ID string `json:"id"`
Automation string `json:"automation"`
Origin *model.Origin `json:"origin"`
Message *model.Message `json:"message"`
}
type ResultMsg struct {
Automation string `json:"automation"`
Data map[string]interface{} `json:"data,omitempty"`
Target *models.Origin `json:"target"`
type DatabaseUpdateType string
const (
DatabaseEntryRead DatabaseUpdateType = "read"
DatabaseEntryCreated DatabaseUpdateType = "created"
DatabaseEntryUpdated DatabaseUpdateType = "updated"
)
type DatabaseUpdateMsg struct {
IDs []driver.DocumentID `json:"ids"`
Type DatabaseUpdateType `json:"type"`
}
func New(c *Config) (*Bus, error) {
client, err := emitter.Connect(c.Host, func(_ *emitter.Client, msg emitter.Message) {
log.Printf("received: '%s' topic: '%s'\n", msg.Payload(), msg.Topic())
})
if err != nil {
return nil, err
type Bus struct {
ResultChannel *Channel[*ResultMsg]
RequestChannel *Channel[*RequestMsg]
JobChannel *Channel[*JobMsg]
DatabaseChannel *Channel[*DatabaseUpdateMsg]
}
func New() *Bus {
return &Bus{
ResultChannel: &Channel[*ResultMsg]{},
RequestChannel: &Channel[*RequestMsg]{},
JobChannel: &Channel[*JobMsg]{},
DatabaseChannel: &Channel[*DatabaseUpdateMsg]{},
}
}
c.dataBusKey, err = client.GenerateKey(c.Key, channelUpdate+"/", "rwls", 0)
if err != nil {
return nil, err
type Channel[T any] struct {
Subscriber []func(T)
}
func (c *Channel[T]) Publish(msg T) {
for _, s := range c.Subscriber {
go s(msg)
}
c.jobBusKey, err = client.GenerateKey(c.Key, channelJob+"/", "rwls", 0)
if err != nil {
return nil, err
}
c.resultBusKey, err = client.GenerateKey(c.Key, channelResult+"/", "rwls", 0)
if err != nil {
return nil, err
}
return &Bus{config: c, client: client}, err
}
func (b *Bus) PublishUpdate(ids []driver.DocumentID) error {
return b.jsonPublish(ids, channelUpdate, b.config.dataBusKey)
}
func (b *Bus) PublishJob(id, automation string, payload interface{}, context *models.Context, origin *models.Origin) error {
return b.jsonPublish(&JobMsg{
ID: id,
Automation: automation,
Origin: origin,
Message: &models.Message{
Context: context,
Payload: payload,
},
}, channelJob, b.config.jobBusKey)
}
func (b *Bus) PublishResult(automation string, data map[string]interface{}, target *models.Origin) error {
return b.jsonPublish(&ResultMsg{Automation: automation, Data: data, Target: target}, channelResult, b.config.resultBusKey)
}
func (b *Bus) jsonPublish(msg interface{}, channel, key string) error {
payload, err := json.Marshal(msg)
if err != nil {
return err
}
return b.client.Publish(key, channel, payload)
}
func (b *Bus) SubscribeUpdate(f func(ids []driver.DocumentID)) error {
return b.safeSubscribe(b.config.dataBusKey, channelUpdate, func(c *emitter.Client, m emitter.Message) {
var msg []driver.DocumentID
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(msg)
})
}
func (b *Bus) SubscribeJob(f func(msg *JobMsg)) error {
return b.safeSubscribe(b.config.jobBusKey, channelJob, func(c *emitter.Client, m emitter.Message) {
var msg JobMsg
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(&msg)
})
}
func (b *Bus) SubscribeResult(f func(msg *ResultMsg)) error {
return b.safeSubscribe(b.config.resultBusKey, channelResult, func(c *emitter.Client, m emitter.Message) {
var msg ResultMsg
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(&msg)
})
}
func (b *Bus) safeSubscribe(key, channel string, handler func(c *emitter.Client, m emitter.Message)) error {
defer func() {
if r := recover(); r != nil {
log.Printf("Recovered %s in channel %s\n", r, channel)
}
}()
return b.client.Subscribe(key, channel, handler)
func (c *Channel[T]) Subscribe(handler func(T)) {
c.Subscriber = append(c.Subscriber, handler)
}

53
busservice/busservice.go Normal file
View File

@@ -0,0 +1,53 @@
package busservice
import (
"context"
"log"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
type busService struct {
db *database.Database
apiURL string
apiKey string
catalystBus *bus.Bus
network string
}
func New(apiURL, apikey, network string, catalystBus *bus.Bus, db *database.Database) {
h := &busService{db: db, apiURL: apiURL, apiKey: apikey, network: network, catalystBus: catalystBus}
catalystBus.RequestChannel.Subscribe(h.logRequest)
catalystBus.ResultChannel.Subscribe(h.handleResult)
catalystBus.JobChannel.Subscribe(h.handleJob)
}
func busContext() context.Context {
// TODO: change roles?
bot := &maut.User{ID: "bot", Roles: []string{maut.AdminRole}}
return maut.UserContext(context.Background(), bot, nil) // TODO add permissions ?
}
func (h *busService) logRequest(msg *bus.RequestMsg) {
var logEntries []*model.LogEntry
for _, i := range msg.IDs {
logEntries = append(logEntries, &model.LogEntry{
Type: "request",
Reference: i.String(),
Creator: msg.User,
Message: msg.Function,
Created: time.Now().UTC(),
})
}
if err := h.db.LogBatchCreate(busContext(), logEntries); err != nil {
log.Println(err)
}
}

View File

@@ -1,4 +1,4 @@
package automation
package busservice
import (
"archive/tar"
@@ -17,7 +17,7 @@ import (
"github.com/SecurityBrewery/catalyst/database"
)
func createContainer(ctx context.Context, image, script, data string) (string, string, error) {
func createContainer(ctx context.Context, image, script, data, network string) (string, string, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return "", "", err
@@ -29,10 +29,16 @@ func createContainer(ctx context.Context, image, script, data string) (string, s
}
config := &container.Config{
Image: image, Cmd: []string{"/script", data}, WorkingDir: "/home",
AttachStderr: true, AttachStdout: true,
Image: image,
Cmd: []string{"/script", data},
WorkingDir: "/home",
AttachStderr: true,
AttachStdout: true,
}
resp, err := cli.ContainerCreate(ctx, config, nil, nil, "")
hostConfig := &container.HostConfig{
NetworkMode: container.NetworkMode(network),
}
resp, err := cli.ContainerCreate(ctx, config, hostConfig, nil, "")
if err != nil {
return "", logs, err
}
@@ -53,13 +59,15 @@ func pullImage(ctx context.Context, cli *client.Client, image string) (string, e
buf := &bytes.Buffer{}
_, err = io.Copy(buf, reader)
return buf.String(), err
}
func copyFile(ctx context.Context, cli *client.Client, path string, contentString string, id string) error {
tarBuf := &bytes.Buffer{}
tw := tar.NewWriter(tarBuf)
if err := tw.WriteHeader(&tar.Header{Name: path, Mode: 0755, Size: int64(len(contentString))}); err != nil {
header := &tar.Header{Name: path, Mode: 0o755, Size: int64(len(contentString))}
if err := tw.WriteHeader(header); err != nil {
return err
}
@@ -84,7 +92,12 @@ func runDocker(ctx context.Context, jobID, containerID string, db *database.Data
return nil, nil, err
}
defer cli.ContainerRemove(ctx, containerID, types.ContainerRemoveOptions{Force: true})
defer func(cli *client.Client, ctx context.Context, containerID string, options types.ContainerRemoveOptions) {
err := cli.ContainerRemove(ctx, containerID, options)
if err != nil {
log.Println(err)
}
}(cli, ctx, containerID, types.ContainerRemoveOptions{Force: true})
if err := cli.ContainerStart(ctx, containerID, types.ContainerStartOptions{}); err != nil {
return nil, nil, err
@@ -117,13 +130,16 @@ func streamStdErr(ctx context.Context, cli *client.Client, jobID, containerID st
err := scanLines(ctx, jobID, containerLogs, stderrBuf, db)
if err != nil {
log.Println(err)
return
}
if err := containerLogs.Close(); err != nil {
log.Println(err)
return
}
}()
return stderrBuf, nil
}
@@ -133,24 +149,28 @@ func scanLines(ctx context.Context, jobID string, input io.ReadCloser, output io
_, err := stdcopy.StdCopy(w, w, input)
if err != nil {
log.Println(err)
return
}
if err := w.Close(); err != nil {
log.Println(err)
return
}
}()
s := bufio.NewScanner(r)
for s.Scan() {
b := s.Bytes()
output.Write(b)
output.Write([]byte("\n"))
_, _ = output.Write(b)
_, _ = output.Write([]byte("\n"))
if err := db.JobLogAppend(ctx, jobID, string(b)+"\n"); err != nil {
log.Println(err)
continue
}
}
return s.Err()
}
@@ -166,6 +186,7 @@ func waitForContainer(ctx context.Context, cli *client.Client, containerID strin
return fmt.Errorf("container returned status code %d: stderr: %s", exitStatus.StatusCode, stderrBuf.String())
}
}
return nil
}

120
busservice/job.go Normal file
View File

@@ -0,0 +1,120 @@
package busservice
import (
"encoding/json"
"fmt"
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (h *busService) handleJob(automationMsg *bus.JobMsg) {
ctx := busContext()
job, err := h.db.JobCreate(ctx, automationMsg.ID, &model.JobForm{
Automation: automationMsg.Automation,
Payload: automationMsg.Message.Payload,
Origin: automationMsg.Origin,
})
if err != nil {
log.Println(err)
return
}
automation, err := h.db.AutomationGet(ctx, automationMsg.Automation)
if err != nil {
log.Println(err)
return
}
if automation.Script == "" {
log.Println("automation is empty")
return
}
if automationMsg.Message.Secrets == nil {
automationMsg.Message.Secrets = map[string]string{}
}
automationMsg.Message.Secrets["catalyst_apikey"] = h.apiKey
automationMsg.Message.Secrets["catalyst_apiurl"] = h.apiURL
scriptMessage, err := json.Marshal(automationMsg.Message)
if err != nil {
log.Println(err)
return
}
containerID, logs, err := createContainer(ctx, automation.Image, automation.Script, string(scriptMessage), h.network)
if err != nil {
log.Println(err)
return
}
if _, err := h.db.JobUpdate(ctx, automationMsg.ID, &model.JobUpdate{
Container: &containerID,
Running: true,
Output: job.Output,
Log: &logs,
Status: job.Status,
}); err != nil {
log.Println(err)
return
}
var result map[string]any
stdout, _, err := runDocker(ctx, automationMsg.ID, containerID, h.db)
if err != nil {
result = map[string]any{"error": fmt.Sprintf("error running script %s %s", err, string(stdout))}
} else {
var data map[string]any
if err := json.Unmarshal(stdout, &data); err != nil {
result = map[string]any{"error": string(stdout)}
} else {
result = data
}
}
h.catalystBus.ResultChannel.Publish(&bus.ResultMsg{Automation: automationMsg.Automation, Data: result, Target: automationMsg.Origin})
if err := h.db.JobComplete(ctx, automationMsg.ID, result); err != nil {
log.Println(err)
return
}
}
/*
func getAutomation(automationID string, config *Config) (*model.AutomationResponse, error) {
req, err := http.NewRequest(http.MethodGet, config.CatalystAPIUrl+"/automations/"+automationID, nil)
if err != nil {
return nil, err
}
req.Header.Set("PRIVATE-TOKEN", config.CatalystAPIKey)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
b, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var automation model.AutomationResponse
if err := json.Unmarshal(b, &automation); err != nil {
return nil, err
}
return &automation, nil
}
*/

35
busservice/result.go Normal file
View File

@@ -0,0 +1,35 @@
package busservice
import (
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (h *busService) handleResult(resultMsg *bus.ResultMsg) {
if resultMsg.Target != nil {
ctx := busContext()
switch {
case resultMsg.Target.TaskOrigin != nil:
if _, err := h.db.TaskComplete(
ctx,
resultMsg.Target.TaskOrigin.TicketId,
resultMsg.Target.TaskOrigin.PlaybookId,
resultMsg.Target.TaskOrigin.TaskId,
resultMsg.Data,
); err != nil {
log.Println(err)
}
case resultMsg.Target.ArtifactOrigin != nil:
enrichment := &model.EnrichmentForm{
Data: resultMsg.Data,
Name: resultMsg.Automation,
}
_, err := h.db.EnrichArtifact(ctx, resultMsg.Target.ArtifactOrigin.TicketId, resultMsg.Target.ArtifactOrigin.Artifact, enrichment)
if err != nil {
log.Println(err)
}
}
}
}

View File

@@ -8,7 +8,7 @@ import (
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
var TooComplexError = errors.New("unsupported features for index queries, use advanced search instead")
var ErrTooComplex = errors.New("unsupported features for index queries, use advanced search instead")
type bleveBuilder struct {
*parser.BaseCAQLParserListener
@@ -35,8 +35,9 @@ func (s *bleveBuilder) pop() (n string) {
return
}
func (s *bleveBuilder) binaryPop() (interface{}, interface{}) {
func (s *bleveBuilder) binaryPop() (any, any) {
right, left := s.pop(), s.pop()
return left, right
}
@@ -48,9 +49,7 @@ func (s *bleveBuilder) ExitExpression(ctx *parser.ExpressionContext) {
case ctx.Reference() != nil:
// pass
case ctx.Operator_unary() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_PLUS() != nil:
fallthrough
case ctx.T_MINUS() != nil:
@@ -60,13 +59,9 @@ func (s *bleveBuilder) ExitExpression(ctx *parser.ExpressionContext) {
case ctx.T_DIV() != nil:
fallthrough
case ctx.T_MOD() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_RANGE() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:<%s", left, right))
@@ -79,64 +74,46 @@ func (s *bleveBuilder) ExitExpression(ctx *parser.ExpressionContext) {
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:>=%s", left, right))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:%s", left, right))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("-%s:%s", left, right))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_LIKE() != nil:
s.err = errors.New("index queries are like queries by default")
return
case ctx.T_REGEX_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.err = TooComplexError
return
s.err = ErrTooComplex
} else {
s.push(fmt.Sprintf("%s:/%s/", left, right))
}
case ctx.T_REGEX_NON_MATCH() != nil:
s.err = errors.New("index query cannot contain regex non matches, use advanced search instead")
return
case ctx.T_AND() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s %s", left, right))
case ctx.T_OR() != nil:
s.err = errors.New("index query cannot contain OR, use advanced search instead")
return
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
return
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
return
default:
panic("unknown expression")
}
@@ -152,17 +129,13 @@ func (s *bleveBuilder) ExitReference(ctx *parser.ReferenceContext) {
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.Compound_value() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.Function_call() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_OPEN() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
case ctx.T_ARRAY_OPEN() != nil:
s.err = TooComplexError
return
s.err = ErrTooComplex
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}

View File

@@ -1,10 +1,14 @@
package caql
package caql_test
import (
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
func TestBleveBuilder(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
@@ -18,15 +22,20 @@ func TestBleveBuilder(t *testing.T) {
{name: "Search 4", saql: `title == 'malware' AND 'wannacry'`, wantBleve: `title:"malware" "wannacry"`},
}
for _, tt := range tests {
parser := &Parser{}
tt := tt
parser := &caql.Parser{}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
@@ -37,6 +46,7 @@ func TestBleveBuilder(t *testing.T) {
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {

View File

@@ -5,6 +5,8 @@ import (
"strconv"
"strings"
"golang.org/x/exp/slices"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
@@ -40,6 +42,7 @@ func (s *aqlBuilder) pop() (n string) {
func (s *aqlBuilder) binaryPop() (string, string) {
right, left := s.pop(), s.pop()
return left, right
}
@@ -181,8 +184,10 @@ func (s *aqlBuilder) toBoolString(v string) string {
if err != nil {
panic("invalid search " + err.Error())
}
return fmt.Sprintf(`d._key IN ["%s"]`, strings.Join(ids, `","`))
}
return v
}
@@ -246,7 +251,7 @@ func (s *aqlBuilder) ExitFunction_call(ctx *parser.Function_callContext) {
}
parameter := strings.Join(array, ", ")
if !stringSliceContains(functionNames, strings.ToUpper(ctx.T_STRING().GetText())) {
if !slices.Contains(functionNames, strings.ToUpper(ctx.T_STRING().GetText())) {
panic("unknown function")
}

View File

@@ -16,7 +16,6 @@ import (
func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
switch strings.ToUpper(ctx.T_STRING().GetText()) {
default:
s.appendErrors(errors.New("unknown function"))
@@ -26,8 +25,8 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
seen := map[interface{}]bool{}
values, anyArray := s.pop().([]interface{}), s.pop().([]interface{})
seen := map[any]bool{}
values, anyArray := s.pop().([]any), s.pop().([]any)
if u {
for _, e := range anyArray {
@@ -45,18 +44,18 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
s.push(anyArray)
case "COUNT_DISTINCT", "COUNT_UNIQUE":
count := 0
seen := map[interface{}]bool{}
array := s.pop().([]interface{})
seen := map[any]bool{}
array := s.pop().([]any)
for _, e := range array {
_, ok := seen[e]
if !ok {
seen[e] = true
count += 1
count++
}
}
s.push(float64(count))
case "FIRST":
array := s.pop().([]interface{})
array := s.pop().([]any)
if len(array) == 0 {
s.push(nil)
} else {
@@ -65,16 +64,16 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
// case "FLATTEN":
// case "INTERLEAVE":
case "INTERSECTION":
iset := New(s.pop().([]interface{})...)
iset := NewSet(s.pop().([]any)...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Intersection(New(s.pop().([]interface{})...))
iset = iset.Intersection(NewSet(s.pop().([]any)...))
}
s.push(iset.Values())
// case "JACCARD":
case "LAST":
array := s.pop().([]interface{})
array := s.pop().([]any)
if len(array) == 0 {
s.push(nil)
} else {
@@ -94,9 +93,9 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
s.push(float64(len(fmt.Sprint(v))))
case string:
s.push(float64(utf8.RuneCountInString(v)))
case []interface{}:
case []any:
s.push(float64(len(v)))
case map[string]interface{}:
case map[string]any:
s.push(float64(len(v)))
default:
panic("unknown type")
@@ -104,7 +103,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
case "MINUS":
var sets []*Set
for i := 0; i < len(ctx.AllExpression()); i++ {
sets = append(sets, New(s.pop().([]interface{})...))
sets = append(sets, NewSet(s.pop().([]any)...))
}
iset := sets[len(sets)-1]
@@ -116,7 +115,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
s.push(iset.Values())
case "NTH":
pos := s.pop().(float64)
array := s.pop().([]interface{})
array := s.pop().([]any)
if int(pos) >= len(array) || pos < 0 {
s.push(nil)
} else {
@@ -124,16 +123,16 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
// case "OUTERSECTION":
// array := s.pop().([]interface{})
// union := New(array...)
// intersection := New(s.pop().([]interface{})...)
// union := NewSet(array...)
// intersection := NewSet(s.pop().([]interface{})...)
// for i := 1; i < len(ctx.AllExpression()); i++ {
// array = s.pop().([]interface{})
// union = union.Union(New(array...))
// intersection = intersection.Intersection(New(array...))
// union = union.Union(NewSet(array...))
// intersection = intersection.Intersection(NewSet(array...))
// }
// s.push(union.Minus(intersection).Values())
case "POP":
array := s.pop().([]interface{})
array := s.pop().([]any)
s.push(array[:len(array)-1])
case "POSITION", "CONTAINS_ARRAY":
returnIndex := false
@@ -141,7 +140,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
returnIndex = s.pop().(bool)
}
search := s.pop()
array := s.pop().([]interface{})
array := s.pop().([]any)
for idx, e := range array {
if e == search {
@@ -164,7 +163,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]interface{})
array := s.pop().([]any)
if u && contains(array, element) {
s.push(array)
@@ -173,13 +172,13 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
case "REMOVE_NTH":
position := s.pop().(float64)
anyArray := s.pop().([]interface{})
anyArray := s.pop().([]any)
if position < 0 {
position = float64(len(anyArray) + int(position))
}
result := []interface{}{}
result := []any{}
for idx, e := range anyArray {
if idx != int(position) {
result = append(result, e)
@@ -193,7 +192,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
replaceValue := s.pop().(string)
position := s.pop().(float64)
anyArray := s.pop().([]interface{})
anyArray := s.pop().([]any)
if position < 0 {
position = float64(len(anyArray) + int(position))
@@ -224,8 +223,8 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
limit = s.pop().(float64)
}
value := s.pop()
array := s.pop().([]interface{})
result := []interface{}{}
array := s.pop().([]any)
result := []any{}
for idx, e := range array {
if e != value || float64(idx) > limit {
result = append(result, e)
@@ -233,9 +232,9 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
s.push(result)
case "REMOVE_VALUES":
values := s.pop().([]interface{})
array := s.pop().([]interface{})
result := []interface{}{}
values := s.pop().([]any)
array := s.pop().([]any)
result := []any{}
for _, e := range array {
if !contains(values, e) {
result = append(result, e)
@@ -243,14 +242,14 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
s.push(result)
case "REVERSE":
array := s.pop().([]interface{})
var reverse []interface{}
array := s.pop().([]any)
var reverse []any
for _, e := range array {
reverse = append([]interface{}{e}, reverse...)
reverse = append([]any{e}, reverse...)
}
s.push(reverse)
case "SHIFT":
s.push(s.pop().([]interface{})[1:])
s.push(s.pop().([]any)[1:])
case "SLICE":
length := float64(-1)
full := true
@@ -259,7 +258,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
full = false
}
start := int64(s.pop().(float64))
array := s.pop().([]interface{})
array := s.pop().([]any)
if start < 0 {
start = int64(len(array)) + start
@@ -276,43 +275,43 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
s.push(array[start:end])
case "SORTED":
array := s.pop().([]interface{})
array := s.pop().([]any)
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "SORTED_UNIQUE":
array := s.pop().([]interface{})
array := s.pop().([]any)
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(unique(array))
case "UNION":
array := s.pop().([]interface{})
array := s.pop().([]any)
for i := 1; i < len(ctx.AllExpression()); i++ {
array = append(array, s.pop().([]interface{})...)
array = append(array, s.pop().([]any)...)
}
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "UNION_DISTINCT":
iset := New(s.pop().([]interface{})...)
iset := NewSet(s.pop().([]any)...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Union(New(s.pop().([]interface{})...))
iset = iset.Union(NewSet(s.pop().([]any)...))
}
s.push(unique(iset.Values()))
case "UNIQUE":
s.push(unique(s.pop().([]interface{})))
s.push(unique(s.pop().([]any)))
case "UNSHIFT":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]interface{})
array := s.pop().([]any)
if u && contains(array, element) {
s.push(array)
} else {
s.push(append([]interface{}{element}, array...))
s.push(append([]any{element}, array...))
}
// Bit https://www.arangodb.com/docs/stable/aql/functions-bit.html
@@ -367,8 +366,8 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
if len(ctx.AllExpression()) >= 2 {
removeInternal = s.pop().(bool)
}
var keys []interface{}
for k := range s.pop().(map[string]interface{}) {
var keys []any
for k := range s.pop().(map[string]any) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
keys = append(keys, k)
@@ -379,20 +378,20 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
// case "COUNT":
case "HAS":
right, left := s.pop(), s.pop()
_, ok := left.(map[string]interface{})[right.(string)]
_, ok := left.(map[string]any)[right.(string)]
s.push(ok)
// case "KEEP":
// case "LENGTH":
// case "MATCHES":
case "MERGE":
var docs []map[string]interface{}
var docs []map[string]any
if len(ctx.AllExpression()) == 1 {
for _, doc := range s.pop().([]interface{}) {
docs = append([]map[string]interface{}{doc.(map[string]interface{})}, docs...)
for _, doc := range s.pop().([]any) {
docs = append([]map[string]any{doc.(map[string]any)}, docs...)
}
} else {
for i := 0; i < len(ctx.AllExpression()); i++ {
docs = append(docs, s.pop().(map[string]interface{}))
docs = append(docs, s.pop().(map[string]any))
}
}
@@ -404,9 +403,9 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
}
s.push(doc)
case "MERGE_RECURSIVE":
var doc map[string]interface{}
var doc map[string]any
for i := 0; i < len(ctx.AllExpression()); i++ {
err := mergo.Merge(&doc, s.pop().(map[string]interface{}))
err := mergo.Merge(&doc, s.pop().(map[string]any))
if err != nil {
panic(err)
}
@@ -421,8 +420,8 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
if len(ctx.AllExpression()) == 2 {
removeInternal = s.pop().(bool)
}
var values []interface{}
for k, v := range s.pop().(map[string]interface{}) {
var values []any
for k, v := range s.pop().(map[string]any) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
values = append(values, v)
@@ -458,10 +457,10 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
case "AVERAGE", "AVG":
count := 0
sum := float64(0)
array := s.pop().([]interface{})
array := s.pop().([]any)
for _, element := range array {
if element != nil {
count += 1
count++
sum += toNumber(element)
}
}
@@ -506,7 +505,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
case "MAX":
var set bool
var max float64
array := s.pop().([]interface{})
array := s.pop().([]any)
for _, element := range array {
if element != nil {
if !set || toNumber(element) > max {
@@ -521,7 +520,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
s.push(nil)
}
case "MEDIAN":
array := s.pop().([]interface{})
array := s.pop().([]any)
var numbers []float64
for _, element := range array {
if f, ok := element.(float64); ok {
@@ -544,7 +543,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
case "MIN":
var set bool
var min float64
array := s.pop().([]interface{})
array := s.pop().([]any)
for _, element := range array {
if element != nil {
if !set || toNumber(element) < min {
@@ -566,7 +565,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
s.push(math.Pow(left.(float64), right.(float64)))
case "PRODUCT":
product := float64(1)
array := s.pop().([]interface{})
array := s.pop().([]any)
for _, element := range array {
if element != nil {
product *= toNumber(element)
@@ -578,7 +577,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
case "RAND":
s.push(rand.Float64())
case "RANGE":
var array []interface{}
var array []any
var start, end, step float64
if len(ctx.AllExpression()) == 2 {
right, left := s.pop(), s.pop()
@@ -612,7 +611,7 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
// case "STDDEV":
case "SUM":
sum := float64(0)
array := s.pop().([]interface{})
array := s.pop().([]any)
for _, element := range array {
sum += toNumber(element)
}
@@ -691,7 +690,6 @@ func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
// case "IS_IPV4":
// case "IS_KEY":
// case "TYPENAME":
}
}
@@ -705,6 +703,7 @@ func unique(array []interface{}) []interface{} {
filtered = append(filtered, e)
}
}
return filtered
}
@@ -714,15 +713,7 @@ func contains(values []interface{}, e interface{}) bool {
return true
}
}
return false
}
func stringSliceContains(values []string, e string) bool {
for _, v := range values {
if e == v {
return true
}
}
return false
}
@@ -747,4 +738,5 @@ var functionNames = []string{
"REGEX_REPLACE", "REVERSE", "RIGHT", "RTRIM", "SHA1", "SHA512", "SOUNDEX", "SPLIT", "STARTS_WITH", "SUBSTITUTE",
"SUBSTRING", "TOKENS", "TO_BASE64", "TO_HEX", "TRIM", "UPPER", "UUID", "TO_BOOL", "TO_NUMBER", "TO_STRING",
"TO_ARRAY", "TO_LIST", "IS_NULL", "IS_BOOL", "IS_NUMBER", "IS_STRING", "IS_ARRAY", "IS_LIST", "IS_OBJECT",
"IS_DOCUMENT", "IS_DATESTRING", "IS_IPV4", "IS_KEY", "TYPENAME"}
"IS_DOCUMENT", "IS_DATESTRING", "IS_IPV4", "IS_KEY", "TYPENAME",
}

View File

@@ -1,18 +1,22 @@
package caql
package caql_test
import (
"encoding/json"
"math"
"reflect"
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
func TestFunctions(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
wantRebuild string
wantValue interface{}
wantValue any
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
@@ -266,13 +270,13 @@ func TestFunctions(t *testing.T) {
{name: "RADIANS", saql: `RADIANS(0)`, wantRebuild: `RADIANS(0)`, wantValue: 0},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.3503170117504508},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.6138226173882478},
{name: "RANGE", saql: `RANGE(1, 4)`, wantRebuild: `RANGE(1, 4)`, wantValue: []interface{}{float64(1), float64(2), float64(3), float64(4)}},
{name: "RANGE", saql: `RANGE(1, 4, 2)`, wantRebuild: `RANGE(1, 4, 2)`, wantValue: []interface{}{float64(1), float64(3)}},
{name: "RANGE", saql: `RANGE(1, 4, 3)`, wantRebuild: `RANGE(1, 4, 3)`, wantValue: []interface{}{float64(1), float64(4)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5)`, wantRebuild: `RANGE(1.5, 2.5)`, wantValue: []interface{}{float64(1), float64(2)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 1)`, wantRebuild: `RANGE(1.5, 2.5, 1)`, wantValue: []interface{}{1.5, 2.5}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 0.5)`, wantRebuild: `RANGE(1.5, 2.5, 0.5)`, wantValue: []interface{}{1.5, 2.0, 2.5}},
{name: "RANGE", saql: `RANGE(-0.75, 1.1, 0.5)`, wantRebuild: `RANGE(-0.75, 1.1, 0.5)`, wantValue: []interface{}{-0.75, -0.25, 0.25, 0.75}},
{name: "RANGE", saql: `RANGE(1, 4)`, wantRebuild: `RANGE(1, 4)`, wantValue: []any{float64(1), float64(2), float64(3), float64(4)}},
{name: "RANGE", saql: `RANGE(1, 4, 2)`, wantRebuild: `RANGE(1, 4, 2)`, wantValue: []any{float64(1), float64(3)}},
{name: "RANGE", saql: `RANGE(1, 4, 3)`, wantRebuild: `RANGE(1, 4, 3)`, wantValue: []any{float64(1), float64(4)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5)`, wantRebuild: `RANGE(1.5, 2.5)`, wantValue: []any{float64(1), float64(2)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 1)`, wantRebuild: `RANGE(1.5, 2.5, 1)`, wantValue: []any{1.5, 2.5}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 0.5)`, wantRebuild: `RANGE(1.5, 2.5, 0.5)`, wantValue: []any{1.5, 2.0, 2.5}},
{name: "RANGE", saql: `RANGE(-0.75, 1.1, 0.5)`, wantRebuild: `RANGE(-0.75, 1.1, 0.5)`, wantValue: []any{-0.75, -0.25, 0.25, 0.75}},
{name: "ROUND", saql: `ROUND(2.49)`, wantRebuild: `ROUND(2.49)`, wantValue: 2},
{name: "ROUND", saql: `ROUND(2.50)`, wantRebuild: `ROUND(2.50)`, wantValue: 3},
{name: "ROUND", saql: `ROUND(-2.50)`, wantRebuild: `ROUND(-2.50)`, wantValue: -2},
@@ -299,15 +303,20 @@ func TestFunctions(t *testing.T) {
{name: "Function Error 3", saql: `ABS("abs")`, wantRebuild: `ABS("abs")`, wantEvalErr: true},
}
for _, tt := range tests {
parser := &Parser{}
tt := tt
parser := &caql.Parser{}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
@@ -318,6 +327,7 @@ func TestFunctions(t *testing.T) {
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
@@ -327,18 +337,19 @@ func TestFunctions(t *testing.T) {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJson map[string]interface{}
var myJSON map[string]any
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJson)
err = json.Unmarshal([]byte(tt.values), &myJSON)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJson)
value, err := expr.Eval(myJSON)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
@@ -367,14 +378,15 @@ func TestFunctions(t *testing.T) {
}
}
func jsonParse(s string) interface{} {
func jsonParse(s string) any {
if s == "" {
return nil
}
var j interface{}
var j any
err := json.Unmarshal([]byte(s), &j)
if err != nil {
panic(s + err.Error())
}
return j
}

View File

@@ -10,22 +10,23 @@ import (
type aqlInterpreter struct {
*parser.BaseCAQLParserListener
values map[string]interface{}
stack []interface{}
values map[string]any
stack []any
errs []error
}
// push is a helper function for pushing new node to the listener Stack.
func (s *aqlInterpreter) push(i interface{}) {
func (s *aqlInterpreter) push(i any) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *aqlInterpreter) pop() (n interface{}) {
func (s *aqlInterpreter) pop() (n any) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
s.appendErrors(ErrStack)
return
}
@@ -35,8 +36,9 @@ func (s *aqlInterpreter) pop() (n interface{}) {
return
}
func (s *aqlInterpreter) binaryPop() (interface{}, interface{}) {
func (s *aqlInterpreter) binaryPop() (any, any) {
right, left := s.pop(), s.pop()
return left, right
}
@@ -54,17 +56,14 @@ func (s *aqlInterpreter) ExitExpression(ctx *parser.ExpressionContext) {
s.push(plus(s.binaryPop()))
case ctx.T_MINUS() != nil:
s.push(minus(s.binaryPop()))
case ctx.T_TIMES() != nil:
s.push(times(s.binaryPop()))
case ctx.T_DIV() != nil:
s.push(div(s.binaryPop()))
case ctx.T_MOD() != nil:
s.push(mod(s.binaryPop()))
case ctx.T_RANGE() != nil:
s.push(aqlrange(s.binaryPop()))
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
s.push(lt(s.binaryPop()))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
@@ -73,35 +72,30 @@ func (s *aqlInterpreter) ExitExpression(ctx *parser.ExpressionContext) {
s.push(le(s.binaryPop()))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
s.push(ge(s.binaryPop()))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.push(maybeNot(ctx, in(s.binaryPop())))
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
s.push(eq(s.binaryPop()))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
s.push(ne(s.binaryPop()))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
s.push(all(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(any(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
s.push(anyElement(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
s.push(none(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]interface{}), in, right))
s.push(all(left.([]any), in, right))
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(any(left.([]interface{}), in, right))
s.push(anyElement(left.([]any), in, right))
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]interface{}), in, right))
s.push(none(left.([]any), in, right))
case ctx.T_LIKE() != nil:
m, err := like(s.binaryPop())
s.appendErrors(err)
@@ -114,21 +108,18 @@ func (s *aqlInterpreter) ExitExpression(ctx *parser.ExpressionContext) {
m, err := regexNonMatch(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_AND() != nil:
s.push(and(s.binaryPop()))
case ctx.T_OR() != nil:
s.push(or(s.binaryPop()))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
right, middle, left := s.pop(), s.pop(), s.pop()
s.push(ternary(left, middle, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
right, left := s.pop(), s.pop()
s.push(ternary(left, nil, right))
default:
panic("unkown expression")
panic("unknown expression")
}
}
@@ -159,7 +150,7 @@ func (s *aqlInterpreter) ExitReference(ctx *parser.ReferenceContext) {
case ctx.DOT() != nil:
reference := s.pop()
s.push(reference.(map[string]interface{})[ctx.T_STRING().GetText()])
s.push(reference.(map[string]any)[ctx.T_STRING().GetText()])
case ctx.T_STRING() != nil:
s.push(s.getVar(ctx.T_STRING().GetText()))
case ctx.Compound_value() != nil:
@@ -175,14 +166,15 @@ func (s *aqlInterpreter) ExitReference(ctx *parser.ReferenceContext) {
if f, ok := key.(float64); ok {
index := int(f)
if index < 0 {
index = len(reference.([]interface{})) + index
index = len(reference.([]any)) + index
}
s.push(reference.([]interface{})[index])
s.push(reference.([]any)[index])
return
}
s.push(reference.(map[string]interface{})[key.(string)])
s.push(reference.(map[string]any)[key.(string)])
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
@@ -239,17 +231,17 @@ func (s *aqlInterpreter) ExitValue_literal(ctx *parser.Value_literalContext) {
// ExitArray is called when production array is exited.
func (s *aqlInterpreter) ExitArray(ctx *parser.ArrayContext) {
array := []interface{}{}
array := []any{}
for range ctx.AllExpression() {
// prepend element
array = append([]interface{}{s.pop()}, array...)
array = append([]any{s.pop()}, array...)
}
s.push(array)
}
// ExitObject is called when production object is exited.
func (s *aqlInterpreter) ExitObject(ctx *parser.ObjectContext) {
object := map[string]interface{}{}
object := map[string]any{}
for range ctx.AllObject_element() {
key, value := s.pop(), s.pop()
@@ -290,7 +282,7 @@ func (s *aqlInterpreter) ExitObject_element_name(ctx *parser.Object_element_name
}
}
func (s *aqlInterpreter) getVar(identifier string) interface{} {
func (s *aqlInterpreter) getVar(identifier string) any {
v, ok := s.values[identifier]
if !ok {
s.appendErrors(ErrUndefined)
@@ -303,10 +295,11 @@ func maybeNot(ctx *parser.ExpressionContext, m bool) bool {
if ctx.T_NOT() != nil {
return !m
}
return m
}
func getOp(tokenType int) func(left, right interface{}) bool {
func getOp(tokenType int) func(left, right any) bool {
switch tokenType {
case parser.CAQLLexerT_EQ:
return eq
@@ -323,33 +316,36 @@ func getOp(tokenType int) func(left, right interface{}) bool {
case parser.CAQLLexerT_IN:
return in
default:
panic("unkown token type")
panic("unknown token type")
}
}
func all(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
func all(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if !op(e, expr) {
return false
}
}
return true
}
func any(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
func anyElement(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if op(e, expr) {
return true
}
}
return false
}
func none(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
func none(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if op(e, expr) {
return false
}
}
return true
}

View File

@@ -10,21 +10,23 @@ import (
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
func or(left, right interface{}) interface{} {
func or(left, right any) any {
if toBool(left) {
return left
}
return right
}
func and(left, right interface{}) interface{} {
func and(left, right any) any {
if !toBool(left) {
return left
}
return right
}
func toBool(i interface{}) bool {
func toBool(i any) bool {
switch v := i.(type) {
case nil:
return false
@@ -36,9 +38,9 @@ func toBool(i interface{}) bool {
return v != 0
case string:
return v != ""
case []interface{}:
case []any:
return true
case map[string]interface{}:
case map[string]any:
return true
default:
panic("bool conversion failed")
@@ -47,15 +49,15 @@ func toBool(i interface{}) bool {
// Arithmetic operators https://www.arangodb.com/docs/3.7/aql/operators.html#arithmetic-operators
func plus(left, right interface{}) float64 {
func plus(left, right any) float64 {
return toNumber(left) + toNumber(right)
}
func minus(left, right interface{}) float64 {
func minus(left, right any) float64 {
return toNumber(left) - toNumber(right)
}
func times(left, right interface{}) float64 {
func times(left, right any) float64 {
return round(toNumber(left) * toNumber(right))
}
@@ -63,19 +65,20 @@ func round(r float64) float64 {
return math.Round(r*100000) / 100000
}
func div(left, right interface{}) float64 {
func div(left, right any) float64 {
b := toNumber(right)
if b == 0 {
return 0
}
return round(toNumber(left) / b)
}
func mod(left, right interface{}) float64 {
func mod(left, right any) float64 {
return math.Mod(toNumber(left), toNumber(right))
}
func toNumber(i interface{}) float64 {
func toNumber(i any) float64 {
switch v := i.(type) {
case nil:
return 0
@@ -83,6 +86,7 @@ func toNumber(i interface{}) float64 {
if v {
return 1
}
return 0
case float64:
switch {
@@ -91,22 +95,25 @@ func toNumber(i interface{}) float64 {
case math.IsInf(v, 0):
return 0
}
return v
case string:
f, err := strconv.ParseFloat(strings.TrimSpace(v), 64)
if err != nil {
return 0
}
return f
case []interface{}:
case []any:
if len(v) == 0 {
return 0
}
if len(v) == 1 {
return toNumber(v[0])
}
return 0
case map[string]interface{}:
case map[string]any:
return 0
default:
panic("number conversion error")
@@ -116,7 +123,7 @@ func toNumber(i interface{}) float64 {
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
// Order https://www.arangodb.com/docs/3.7/aql/fundamentals-type-value-order.html
func eq(left, right interface{}) bool {
func eq(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return false
@@ -126,15 +133,15 @@ func eq(left, right interface{}) bool {
return true
case bool, float64, string:
return left == right
case []interface{}:
ra := right.([]interface{})
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if len(l) > i {
li = l[i]
}
@@ -146,13 +153,14 @@ func eq(left, right interface{}) bool {
return false
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
@@ -164,17 +172,18 @@ func eq(left, right interface{}) bool {
return false
}
}
return true
default:
panic("unknown type")
}
}
func ne(left, right interface{}) bool {
func ne(left, right any) bool {
return !eq(left, right)
}
func lt(left, right interface{}) bool {
func lt(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV < rightV
@@ -190,15 +199,15 @@ func lt(left, right interface{}) bool {
return l < right.(float64)
case string:
return l < right.(string)
case []interface{}:
ra := right.([]interface{})
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if len(l) > i {
li = l[i]
}
@@ -210,13 +219,14 @@ func lt(left, right interface{}) bool {
return lt(li, rai)
}
}
return false
case map[string]interface{}:
ro := right.(map[string]interface{})
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
@@ -228,16 +238,17 @@ func lt(left, right interface{}) bool {
return lt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func keys(l map[string]interface{}, ro map[string]interface{}) []string {
func keys(l map[string]any, ro map[string]any) []string {
var keys []string
seen := map[string]bool{}
for _, a := range []map[string]interface{}{l, ro} {
for _, a := range []map[string]any{l, ro} {
for k := range a {
if _, ok := seen[k]; !ok {
seen[k] = true
@@ -246,10 +257,11 @@ func keys(l map[string]interface{}, ro map[string]interface{}) []string {
}
}
sort.Strings(keys)
return keys
}
func gt(left, right interface{}) bool {
func gt(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV > rightV
@@ -265,15 +277,15 @@ func gt(left, right interface{}) bool {
return l > right.(float64)
case string:
return l > right.(string)
case []interface{}:
ra := right.([]interface{})
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if len(l) > i {
li = l[i]
}
@@ -285,13 +297,14 @@ func gt(left, right interface{}) bool {
return gt(li, rai)
}
}
return false
case map[string]interface{}:
ro := right.(map[string]interface{})
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
@@ -303,13 +316,14 @@ func gt(left, right interface{}) bool {
return gt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func le(left, right interface{}) bool {
func le(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV <= rightV
@@ -325,15 +339,15 @@ func le(left, right interface{}) bool {
return l <= right.(float64)
case string:
return l <= right.(string)
case []interface{}:
ra := right.([]interface{})
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if len(l) > i {
li = l[i]
}
@@ -345,13 +359,14 @@ func le(left, right interface{}) bool {
return le(li, rai)
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
@@ -363,13 +378,14 @@ func le(left, right interface{}) bool {
return lt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func ge(left, right interface{}) bool {
func ge(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV >= rightV
@@ -385,15 +401,15 @@ func ge(left, right interface{}) bool {
return l >= right.(float64)
case string:
return l >= right.(string)
case []interface{}:
ra := right.([]interface{})
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if len(l) > i {
li = l[i]
}
@@ -405,13 +421,14 @@ func ge(left, right interface{}) bool {
return ge(li, rai)
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
@@ -423,14 +440,15 @@ func ge(left, right interface{}) bool {
return gt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func in(left, right interface{}) bool {
a, ok := right.([]interface{})
func in(left, right any) bool {
a, ok := right.([]any)
if !ok {
return false
}
@@ -439,23 +457,25 @@ func in(left, right interface{}) bool {
return true
}
}
return false
}
func like(left, right interface{}) (bool, error) {
func like(left, right any) (bool, error) {
return match(right.(string), left.(string))
}
func regexMatch(left, right interface{}) (bool, error) {
func regexMatch(left, right any) (bool, error) {
return regexp.Match(right.(string), []byte(left.(string)))
}
func regexNonMatch(left, right interface{}) (bool, error) {
func regexNonMatch(left, right any) (bool, error) {
m, err := regexp.Match(right.(string), []byte(left.(string)))
return !m, err
}
func typeValue(v interface{}) int {
func typeValue(v any) int {
switch v.(type) {
case nil:
return 0
@@ -465,9 +485,9 @@ func typeValue(v interface{}) int {
return 2
case string:
return 3
case []interface{}:
case []any:
return 4
case map[string]interface{}:
case map[string]any:
return 5
default:
panic("unknown type")
@@ -476,22 +496,25 @@ func typeValue(v interface{}) int {
// Ternary operator https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
func ternary(left, middle, right interface{}) interface{} {
func ternary(left, middle, right any) any {
if toBool(left) {
if middle != nil {
return middle
}
return left
}
return right
}
// Range operators https://www.arangodb.com/docs/3.7/aql/operators.html#range-operator
func aqlrange(left, right interface{}) []float64 {
func aqlrange(left, right any) []float64 {
var v []float64
for i := int(left.(float64)); i <= int(right.(float64)); i++ {
v = append(v, float64(i))
}
return v
}

View File

@@ -21,7 +21,7 @@ func (p *Parser) Parse(aql string) (t *Tree, err error) {
err = fmt.Errorf("%s", r)
}
}()
// Setup the input
// Set up the input
inputStream := antlr.NewInputStream(aql)
errorListener := &errorListener{}
@@ -52,7 +52,7 @@ type Tree struct {
prefix string
}
func (t *Tree) Eval(values map[string]interface{}) (i interface{}, err error) {
func (t *Tree) Eval(values map[string]any) (i any, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
@@ -65,6 +65,7 @@ func (t *Tree) Eval(values map[string]interface{}) (i interface{}, err error) {
if interpreter.errs != nil {
return nil, interpreter.errs[0]
}
return interpreter.stack[0], nil
}
@@ -103,7 +104,7 @@ type errorListener struct {
errs []error
}
func (el *errorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol interface{}, line, column int, msg string, e antlr.RecognitionException) {
func (el *errorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, line, column int, msg string, e antlr.RecognitionException) {
el.errs = append(el.errs, fmt.Errorf("line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg))
}

View File

@@ -1,9 +1,11 @@
package caql
package caql_test
import (
"encoding/json"
"reflect"
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
type MockSearcher struct{}
@@ -13,11 +15,13 @@ func (m MockSearcher) Search(_ string) (ids []string, err error) {
}
func TestParseSAQLEval(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
wantRebuild string
wantValue interface{}
wantValue any
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
@@ -89,15 +93,15 @@ func TestParseSAQLEval(t *testing.T) {
// {name: "String 9", saql: `'this is a longer string.'`, wantRebuild: `"this is a longer string."`, wantValue: "this is a longer string."},
// {name: "String 10", saql: `'the path separator on Windows is \\'`, wantRebuild: `"the path separator on Windows is \\"`, wantValue: `the path separator on Windows is \`},
{name: "Array 1", saql: "[]", wantRebuild: "[]", wantValue: []interface{}{}},
{name: "Array 2", saql: `[true]`, wantRebuild: `[true]`, wantValue: []interface{}{true}},
{name: "Array 3", saql: `[1, 2, 3]`, wantRebuild: `[1, 2, 3]`, wantValue: []interface{}{float64(1), float64(2), float64(3)}},
{name: "Array 1", saql: "[]", wantRebuild: "[]", wantValue: []any{}},
{name: "Array 2", saql: `[true]`, wantRebuild: `[true]`, wantValue: []any{true}},
{name: "Array 3", saql: `[1, 2, 3]`, wantRebuild: `[1, 2, 3]`, wantValue: []any{float64(1), float64(2), float64(3)}},
{
name: "Array 4", saql: `[-99, "yikes!", [false, ["no"], []], 1]`, wantRebuild: `[-99, "yikes!", [false, ["no"], []], 1]`,
wantValue: []interface{}{-99.0, "yikes!", []interface{}{false, []interface{}{"no"}, []interface{}{}}, float64(1)},
wantValue: []any{-99.0, "yikes!", []any{false, []any{"no"}, []any{}}, float64(1)},
},
{name: "Array 5", saql: `[["fox", "marshal"]]`, wantRebuild: `[["fox", "marshal"]]`, wantValue: []interface{}{[]interface{}{"fox", "marshal"}}},
{name: "Array 6", saql: `[1, 2, 3,]`, wantRebuild: `[1, 2, 3]`, wantValue: []interface{}{float64(1), float64(2), float64(3)}},
{name: "Array 5", saql: `[["fox", "marshal"]]`, wantRebuild: `[["fox", "marshal"]]`, wantValue: []any{[]any{"fox", "marshal"}}},
{name: "Array 6", saql: `[1, 2, 3,]`, wantRebuild: `[1, 2, 3]`, wantValue: []any{float64(1), float64(2), float64(3)}},
{name: "Array Error 1", saql: "(1,2,3)", wantParseErr: true},
{name: "Array Access 1", saql: "u.friends[0]", wantRebuild: "u.friends[0]", wantValue: 7, values: `{"u": {"friends": [7,8,9]}}`},
@@ -105,14 +109,14 @@ func TestParseSAQLEval(t *testing.T) {
{name: "Array Access 3", saql: "u.friends[-1]", wantRebuild: "u.friends[-1]", wantValue: 9, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 4", saql: "u.friends[-2]", wantRebuild: "u.friends[-2]", wantValue: 8, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Object 1", saql: "{}", wantRebuild: "{}", wantValue: map[string]interface{}{}},
{name: "Object 2", saql: `{a: 1}`, wantRebuild: "{a: 1}", wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 3", saql: `{'a': 1}`, wantRebuild: `{'a': 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 4", saql: `{"a": 1}`, wantRebuild: `{"a": 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 5", saql: `{'return': 1}`, wantRebuild: `{'return': 1}`, wantValue: map[string]interface{}{"return": float64(1)}},
{name: "Object 6", saql: `{"return": 1}`, wantRebuild: `{"return": 1}`, wantValue: map[string]interface{}{"return": float64(1)}},
{name: "Object 9", saql: `{a: 1,}`, wantRebuild: "{a: 1}", wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 10", saql: `{"a": 1,}`, wantRebuild: `{"a": 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 1", saql: "{}", wantRebuild: "{}", wantValue: map[string]any{}},
{name: "Object 2", saql: `{a: 1}`, wantRebuild: "{a: 1}", wantValue: map[string]any{"a": float64(1)}},
{name: "Object 3", saql: `{'a': 1}`, wantRebuild: `{'a': 1}`, wantValue: map[string]any{"a": float64(1)}},
{name: "Object 4", saql: `{"a": 1}`, wantRebuild: `{"a": 1}`, wantValue: map[string]any{"a": float64(1)}},
{name: "Object 5", saql: `{'return': 1}`, wantRebuild: `{'return': 1}`, wantValue: map[string]any{"return": float64(1)}},
{name: "Object 6", saql: `{"return": 1}`, wantRebuild: `{"return": 1}`, wantValue: map[string]any{"return": float64(1)}},
{name: "Object 9", saql: `{a: 1,}`, wantRebuild: "{a: 1}", wantValue: map[string]any{"a": float64(1)}},
{name: "Object 10", saql: `{"a": 1,}`, wantRebuild: `{"a": 1}`, wantValue: map[string]any{"a": float64(1)}},
// {"Object 8", "{`return`: 1}", `{"return": 1}`, true},
// {"Object 7", "{´return´: 1}", `{"return": 1}`, true},
{name: "Object Error 1: return is a keyword", saql: `{like: 1}`, wantParseErr: true},
@@ -272,7 +276,7 @@ func TestParseSAQLEval(t *testing.T) {
{name: "Arithmetic 17", saql: `23 * {}`, wantRebuild: `23 * {}`, wantValue: 0},
{name: "Arithmetic 18", saql: `5 * [7]`, wantRebuild: `5 * [7]`, wantValue: 35},
{name: "Arithmetic 19", saql: `24 / "12"`, wantRebuild: `24 / "12"`, wantValue: 2},
{name: "Arithmetic Error 1: Divison by zero", saql: `1 / 0`, wantRebuild: `1 / 0`, wantValue: 0},
{name: "Arithmetic Error 1: Division by zero", saql: `1 / 0`, wantRebuild: `1 / 0`, wantValue: 0},
// https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
{name: "Ternary 1", saql: `u.age > 15 || u.active == true ? u.userId : null`, wantRebuild: `u.age > 15 OR u.active == true ? u.userId : null`, wantValue: 45, values: `{"u": {"active": true, "age": 2, "userId": 45}}`},
@@ -287,20 +291,24 @@ func TestParseSAQLEval(t *testing.T) {
{name: "Security 2", saql: `doc.value == 1 || true INSERT {foo: "bar"} IN collection //`, wantParseErr: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#operator-precedence
{name: "Precendence", saql: `2 > 15 && "a" != ""`, wantRebuild: `2 > 15 AND "a" != ""`, wantValue: false},
{name: "Precedence", saql: `2 > 15 && "a" != ""`, wantRebuild: `2 > 15 AND "a" != ""`, wantValue: false},
}
for _, tt := range tests {
parser := &Parser{
tt := tt
parser := &caql.Parser{
Searcher: &MockSearcher{},
}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
@@ -311,6 +319,7 @@ func TestParseSAQLEval(t *testing.T) {
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
@@ -320,18 +329,19 @@ func TestParseSAQLEval(t *testing.T) {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJson map[string]interface{}
var myJSON map[string]any
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJson)
err = json.Unmarshal([]byte(tt.values), &myJSON)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJson)
value, err := expr.Eval(myJSON)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {

View File

@@ -22,19 +22,18 @@
package caql
import "sort"
type (
Set struct {
hash map[interface{}]nothing
}
nothing struct{}
import (
"sort"
)
// Create a new set
func New(initial ...interface{}) *Set {
s := &Set{make(map[interface{}]nothing)}
type Set struct {
hash map[any]nothing
}
type nothing struct{}
func NewSet(initial ...any) *Set {
s := &Set{make(map[any]nothing)}
for _, v := range initial {
s.Insert(v)
@@ -43,9 +42,8 @@ func New(initial ...interface{}) *Set {
return s
}
// Find the difference between two sets
func (s *Set) Difference(set *Set) *Set {
n := make(map[interface{}]nothing)
n := make(map[any]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; !exists {
@@ -56,27 +54,18 @@ func (s *Set) Difference(set *Set) *Set {
return &Set{n}
}
// Call f for each item in the set
func (s *Set) Do(f func(interface{})) {
for k := range s.hash {
f(k)
}
}
// Test to see whether or not the element is in the set
func (s *Set) Has(element interface{}) bool {
func (s *Set) Has(element any) bool {
_, exists := s.hash[element]
return exists
}
// Add an element to the set
func (s *Set) Insert(element interface{}) {
func (s *Set) Insert(element any) {
s.hash[element] = nothing{}
}
// Find the intersection of two sets
func (s *Set) Intersection(set *Set) *Set {
n := make(map[interface{}]nothing)
n := make(map[any]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; exists {
@@ -87,23 +76,20 @@ func (s *Set) Intersection(set *Set) *Set {
return &Set{n}
}
// Return the number of items in the set
func (s *Set) Len() int {
return len(s.hash)
}
// Test whether or not this set is a proper subset of "set"
func (s *Set) ProperSubsetOf(set *Set) bool {
return s.SubsetOf(set) && s.Len() < set.Len()
}
// Remove an element from the set
func (s *Set) Remove(element interface{}) {
func (s *Set) Remove(element any) {
delete(s.hash, element)
}
func (s *Set) Minus(set *Set) *Set {
n := make(map[interface{}]nothing)
n := make(map[any]nothing)
for k := range s.hash {
n[k] = nothing{}
}
@@ -115,7 +101,6 @@ func (s *Set) Minus(set *Set) *Set {
return &Set{n}
}
// Test whether or not this set is a subset of "set"
func (s *Set) SubsetOf(set *Set) bool {
if s.Len() > set.Len() {
return false
@@ -125,12 +110,12 @@ func (s *Set) SubsetOf(set *Set) bool {
return false
}
}
return true
}
// Find the union of two sets
func (s *Set) Union(set *Set) *Set {
n := make(map[interface{}]nothing)
n := make(map[any]nothing)
for k := range s.hash {
n[k] = nothing{}
@@ -142,8 +127,8 @@ func (s *Set) Union(set *Set) *Set {
return &Set{n}
}
func (s *Set) Values() []interface{} {
values := []interface{}{}
func (s *Set) Values() []any {
values := []any{}
for k := range s.hash {
values = append(values, k)

View File

@@ -27,7 +27,9 @@ import (
)
func Test(t *testing.T) {
s := New()
t.Parallel()
s := NewSet()
s.Insert(5)
@@ -50,8 +52,8 @@ func Test(t *testing.T) {
}
// Difference
s1 := New(1, 2, 3, 4, 5, 6)
s2 := New(4, 5, 6)
s1 := NewSet(1, 2, 3, 4, 5, 6)
s2 := NewSet(4, 5, 6)
s3 := s1.Difference(s2)
if s3.Len() != 3 {
@@ -73,7 +75,7 @@ func Test(t *testing.T) {
}
// Union
s4 := New(7, 8, 9)
s4 := NewSet(7, 8, 9)
s3 = s2.Union(s4)
if s3.Len() != 6 {
@@ -92,5 +94,4 @@ func Test(t *testing.T) {
if s1.ProperSubsetOf(s1) {
t.Errorf("set should not be a subset of itself")
}
}

View File

@@ -39,8 +39,10 @@ func unquote(s string) (string, error) {
buf = append(buf, s[i])
}
}
return string(buf), nil
}
return s, nil
}
if quote != '"' && quote != '\'' {
@@ -75,5 +77,6 @@ func unquote(s string) (string, error) {
buf = append(buf, runeTmp[:n]...)
}
}
return string(buf), nil
}

View File

@@ -8,26 +8,25 @@
package caql
import (
"errors"
"strconv"
"testing"
)
type quoteTest struct {
in string
out string
ascii string
graphic string
in string
out string
}
var quotetests = []quoteTest{
{in: "\a\b\f\r\n\t\v", out: `"\a\b\f\r\n\t\v"`, ascii: `"\a\b\f\r\n\t\v"`, graphic: `"\a\b\f\r\n\t\v"`},
{"\\", `"\\"`, `"\\"`, `"\\"`},
{"abc\xffdef", `"abc\xffdef"`, `"abc\xffdef"`, `"abc\xffdef"`},
{"\u263a", `"☺"`, `"\u263a"`, `"☺"`},
{"\U0010ffff", `"\U0010ffff"`, `"\U0010ffff"`, `"\U0010ffff"`},
{"\x04", `"\x04"`, `"\x04"`, `"\x04"`},
{in: "\a\b\f\r\n\t\v", out: `"\a\b\f\r\n\t\v"`},
{"\\", `"\\"`},
{"abc\xffdef", `"abc\xffdef"`},
{"\u263a", `"☺"`},
{"\U0010ffff", `"\U0010ffff"`},
{"\x04", `"\x04"`},
// Some non-printable but graphic runes. Final column is double-quoted.
{"!\u00a0!\u2000!\u3000!", `"!\u00a0!\u2000!\u3000!"`, `"!\u00a0!\u2000!\u3000!"`, "\"!\u00a0!\u2000!\u3000!\""},
{"!\u00a0!\u2000!\u3000!", `"!\u00a0!\u2000!\u3000!"`},
}
type unQuoteTest struct {
@@ -104,6 +103,8 @@ var misquoted = []string{
}
func TestUnquote(t *testing.T) {
t.Parallel()
for _, tt := range unquotetests {
if out, err := unquote(tt.in); err != nil || out != tt.out {
t.Errorf("unquote(%#q) = %q, %v want %q, nil", tt.in, out, err, tt.out)
@@ -118,7 +119,7 @@ func TestUnquote(t *testing.T) {
}
for _, s := range misquoted {
if out, err := unquote(s); out != "" || err != strconv.ErrSyntax {
if out, err := unquote(s); out != "" || !errors.Is(err, strconv.ErrSyntax) {
t.Errorf("unquote(%#q) = %q, %v want %q, %v", s, out, err, "", strconv.ErrSyntax)
}
}

View File

@@ -30,7 +30,6 @@ var ErrBadPattern = errors.New("syntax error in pattern")
// match requires pattern to match all of name, not just a substring.
// The only possible returned error is ErrBadPattern, when pattern
// is malformed.
//
func match(pattern, name string) (matched bool, err error) {
Pattern:
for len(pattern) > 0 {
@@ -48,6 +47,7 @@ Pattern:
// using the star
if ok && (len(t) == 0 || len(pattern) > 0) {
name = t
continue
}
if err != nil {
@@ -64,6 +64,7 @@ Pattern:
continue
}
name = t
continue Pattern
}
if err != nil {
@@ -79,8 +80,10 @@ Pattern:
return false, err
}
}
return false, nil
}
return len(name) == 0, nil
}
@@ -104,6 +107,7 @@ Scan:
break Scan
}
}
return star, pattern[0:i], pattern[i:]
}
@@ -120,7 +124,6 @@ func matchChunk(chunk, s string) (rest string, ok bool, err error) {
failed = true
}
switch chunk[0] {
case '_':
if !failed {
if s[0] == '/' {
@@ -130,14 +133,13 @@ func matchChunk(chunk, s string) (rest string, ok bool, err error) {
s = s[n:]
}
chunk = chunk[1:]
case '\\':
chunk = chunk[1:]
if len(chunk) == 0 {
return "", false, ErrBadPattern
}
fallthrough
fallthrough
default:
if !failed {
if chunk[0] != s[0] {
@@ -151,5 +153,6 @@ func matchChunk(chunk, s string) (rest string, ok bool, err error) {
if failed {
return "", false, nil
}
return s, true, nil
}

View File

@@ -7,7 +7,10 @@
package caql
import "testing"
import (
"errors"
"testing"
)
type MatchTest struct {
pattern, s string
@@ -41,9 +44,11 @@ var matchTests = []MatchTest{
}
func TestMatch(t *testing.T) {
t.Parallel()
for _, tt := range matchTests {
ok, err := match(tt.pattern, tt.s)
if ok != tt.match || err != tt.err {
if ok != tt.match || !errors.Is(err, tt.err) {
t.Errorf("match(%#q, %#q) = %v, %v want %v, %v", tt.pattern, tt.s, ok, err, tt.match, tt.err)
}
}

File diff suppressed because one or more lines are too long

View File

@@ -2,21 +2,20 @@ package main
import (
"context"
"fmt"
"log"
"net/http/httputil"
"net/url"
"net/http"
"time"
"github.com/arangodb/go-driver"
"github.com/gin-contrib/sessions"
"github.com/gin-contrib/sessions/cookie"
"github.com/gin-gonic/gin"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/test"
)
@@ -36,30 +35,45 @@ func main() {
log.Fatal(err)
}
demoUser := &models.UserResponse{ID: "demo", Roles: []string{role.Admin}}
ctx := busdb.UserContext(context.Background(), demoUser)
demoUser := &maut.User{ID: "demo", Roles: []string{maut.AdminRole}}
ctx := maut.UserContext(context.Background(), demoUser, catalyst.Admin.Permissions)
if err := test.SetupTestData(ctx, theCatalyst.DB); err != nil {
log.Fatal(err)
}
// proxy static requests
theCatalyst.Server.NoRoute(
sessions.Sessions(catalyst.SessionName, cookie.NewStore(config.Secret)),
catalyst.Authenticate(theCatalyst.DB, config.Auth),
catalyst.AuthorizeBlockedUser,
proxy,
)
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "eve", Roles: []string{"admin"}, Password: pointer.String("eve")})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "eve", &model.UserData{
Name: pointer.String("Eve"),
Email: pointer.String("eve@example.com"),
Image: &avatarEve,
})
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "kevin", Roles: []string{"admin"}, Password: pointer.String("kevin")})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "kevin", &model.UserData{
Name: pointer.String("Kevin"),
Email: pointer.String("kevin@example.com"),
Image: &avatarKevin,
})
if err = theCatalyst.Server.RunWithSigHandler(); err != nil {
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "tom", Roles: []string{"admin"}, Password: pointer.String("tom")})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "tom", &model.UserData{
Name: pointer.String("tom"),
Email: pointer.String("tom@example.com"),
Image: &avatarKevin,
})
// proxy static requests
theCatalyst.Server.Get("/ui/*", func(writer http.ResponseWriter, request *http.Request) {
log.Println("proxy request", request.URL.Path)
api.Proxy("http://localhost:8080/")(writer, request)
})
server := &http.Server{
Addr: fmt.Sprintf(":%d", config.Port),
ReadHeaderTimeout: 3 * time.Second,
Handler: theCatalyst.Server,
}
if err := server.ListenAndServe(); err != nil {
log.Fatal(err)
}
}
func proxy(ctx *gin.Context) {
u, _ := url.Parse("http://localhost:8080")
proxy := httputil.NewSingleHostReverseProxy(u)
ctx.Request.Host = ctx.Request.URL.Host
proxy.ServeHTTP(ctx.Writer, ctx.Request)
}

View File

@@ -1,11 +1,17 @@
package main
import (
"fmt"
"io/fs"
"log"
"net/http"
"time"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/ui"
)
func main() {
@@ -21,7 +27,16 @@ func main() {
log.Fatal(err)
}
if err = theCatalyst.Server.RunWithSigHandler(); err != nil {
fsys, _ := fs.Sub(ui.UI, "dist")
staticHandlerFunc := http.HandlerFunc(api.VueStatic(fsys))
theCatalyst.Server.Get("/ui/*", http.StripPrefix("/ui", staticHandlerFunc).ServeHTTP)
server := &http.Server{
Addr: fmt.Sprintf(":%d", config.Port),
ReadHeaderTimeout: 3 * time.Second,
Handler: theCatalyst.Server,
}
if err := server.ListenAndServe(); err != nil {
log.Fatal(err)
}
}

View File

@@ -1,19 +1,17 @@
package cmd
import (
"fmt"
"errors"
"github.com/alecthomas/kong"
kongyaml "github.com/alecthomas/kong-yaml"
"github.com/coreos/go-oidc/v3/oidc"
maut "github.com/jonas-plum/maut/auth"
"golang.org/x/exp/slices"
"golang.org/x/oauth2"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/storage"
)
@@ -21,16 +19,26 @@ type CLI struct {
Secret string `env:"SECRET" required:"" help:"A random secret value (can be created with 'openssl rand -hex 32')"`
ExternalAddress string `env:"EXTERNAL_ADDRESS" required:""`
CatalystAddress string `env:"CATALYST_ADDRESS" default:"http://catalyst:8000"`
Network string `env:"CATALYST_NETWORK" default:"catalyst"`
Port int `env:"PORT" default:"8000"`
OIDCIssuer string `env:"OIDC_ISSUER" required:""`
AuthBlockNew bool `env:"AUTH_BLOCK_NEW" default:"true" help:"Block newly created users"`
AuthDefaultRoles []string `env:"AUTH_DEFAULT_ROLES" help:"Default roles for new users"`
AuthAdminUsers []string `env:"AUTH_ADMIN_USERS" help:"Username of admins"`
InitialAPIKey string `env:"INITIAL_API_KEY"`
// SimpleAuthEnable bool `env:"SIMPLE_AUTH_ENABLE" default:"true"`
APIKeyAuthEnable bool `env:"API_KEY_AUTH_ENABLE" default:"true"`
OIDCEnable bool `env:"OIDC_ENABLE" default:"true"`
OIDCIssuer string `env:"OIDC_ISSUER"`
AuthURL string `env:"OIDC_AUTH_URL"`
OIDCClientID string `env:"OIDC_CLIENT_ID" default:"catalyst"`
OIDCClientSecret string `env:"OIDC_CLIENT_SECRET" required:""`
OIDCClientSecret string `env:"OIDC_CLIENT_SECRET"`
OIDCScopes []string `env:"OIDC_SCOPES" help:"Additional scopes, ['oidc', 'profile', 'email'] are always added." placeholder:"customscopes"`
OIDCClaimUsername string `env:"OIDC_CLAIM_USERNAME" default:"preferred_username" help:"username field in the OIDC claim"`
OIDCClaimEmail string `env:"OIDC_CLAIM_EMAIL" default:"email" help:"email field in the OIDC claim"`
OIDCClaimName string `env:"OIDC_CLAIM_NAME" default:"name" help:"name field in the OIDC claim"`
AuthBlockNew bool `env:"AUTH_BLOCK_NEW" default:"true" help:"Block newly created users"`
AuthDefaultRoles []string `env:"AUTH_DEFAULT_ROLES" help:"Default roles for new users"`
IndexPath string `env:"INDEX_PATH" default:"index.bleve" help:"Path for the bleve index"`
@@ -41,13 +49,6 @@ type CLI struct {
S3Host string `env:"S3_HOST" default:"http://minio:9000" name:"s3-host"`
S3User string `env:"S3_USER" default:"minio" name:"s3-user"`
S3Password string `env:"S3_PASSWORD" required:"" name:"s3-password"`
EmitterIOHost string `env:"EMITTER_IO_HOST" default:"tcp://emitter:8080"`
EmitterIORKey string `env:"EMITTER_IO_KEY" required:""`
Timeformat string `env:"TIMEFORMAT" default:"yyyy-MM-dd HH:mm:ss" help:""`
ArtifactStates []map[string]string `env:"ARTIFACT_STATES"`
InitialAPIKey string `env:"INITIAL_API_KEY"`
}
func ParseCatalystConfig() (*catalyst.Config, error) {
@@ -58,94 +59,58 @@ func ParseCatalystConfig() (*catalyst.Config, error) {
kong.Configuration(kongyaml.Loader, "/etc/catalyst.yaml", ".catalyst.yaml"),
)
if cli.OIDCEnable {
if cli.OIDCIssuer == "" {
return nil, errors.New("OIDC issuer not set")
}
if cli.OIDCClientSecret == "" {
return nil, errors.New("OIDC client secret is required")
}
}
return MapConfig(cli)
}
func MapConfig(cli CLI) (*catalyst.Config, error) {
roles := role.Explode(role.Analyst)
roles = append(roles, role.Explodes(cli.AuthDefaultRoles)...)
roles = role.Explodes(role.Strings(roles))
artifactStates, err := toTypes(cli.ArtifactStates)
if err != nil {
return nil, err
}
if len(artifactStates) == 0 {
artifactStates = []*models.Type{
{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(models.TypeColorInfo)},
{Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(models.TypeColorError)},
{Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(models.TypeColorSuccess)},
}
}
scopes := unique(append([]string{oidc.ScopeOpenID, "profile", "email"}, cli.OIDCScopes...))
scopes := slices.Compact(append([]string{oidc.ScopeOpenID, "profile", "email"}, cli.OIDCScopes...))
config := &catalyst.Config{
IndexPath: cli.IndexPath,
DB: &database.Config{Host: cli.ArangoDBHost, User: cli.ArangoDBUser, Password: cli.ArangoDBPassword},
IndexPath: cli.IndexPath,
Network: cli.Network,
DB: &database.Config{
Host: cli.ArangoDBHost,
User: cli.ArangoDBUser,
Password: cli.ArangoDBPassword,
},
Storage: &storage.Config{Host: cli.S3Host, User: cli.S3User, Password: cli.S3Password},
Secret: []byte(cli.Secret),
ExternalAddress: cli.ExternalAddress,
Auth: &catalyst.AuthConfig{
OIDCIssuer: cli.OIDCIssuer,
OAuth2: &oauth2.Config{ClientID: cli.OIDCClientID, ClientSecret: cli.OIDCClientSecret, RedirectURL: cli.ExternalAddress + "/callback", Scopes: scopes},
OIDCClaimUsername: cli.OIDCClaimUsername,
OIDCClaimEmail: cli.OIDCClaimEmail,
OIDCClaimName: cli.OIDCClaimName,
AuthBlockNew: cli.AuthBlockNew,
AuthDefaultRoles: roles,
InternalAddress: cli.CatalystAddress,
Port: cli.Port,
Auth: &maut.Config{
CookieSecret: []byte(cli.Secret),
SimpleAuthEnable: false, // cli.SimpleAuthEnable,
APIKeyAuthEnable: cli.APIKeyAuthEnable,
OIDCAuthEnable: cli.OIDCEnable,
// InitialUser: "",
// InitialPassword: "",
InitialAPIKey: cli.InitialAPIKey,
OIDCIssuer: cli.OIDCIssuer,
AuthURL: cli.AuthURL,
OAuth2: &oauth2.Config{
ClientID: cli.OIDCClientID,
ClientSecret: cli.OIDCClientSecret,
RedirectURL: cli.ExternalAddress + "/auth/callback",
Scopes: scopes,
},
UserCreateConfig: &maut.UserCreateConfig{
AuthBlockNew: cli.AuthBlockNew,
AuthDefaultRoles: cli.AuthDefaultRoles,
AuthAdminUsers: cli.AuthAdminUsers,
OIDCClaimUsername: cli.OIDCClaimUsername,
OIDCClaimEmail: cli.OIDCClaimEmail,
OIDCClaimName: cli.OIDCClaimName,
},
},
Bus: &bus.Config{Host: cli.EmitterIOHost, Key: cli.EmitterIORKey, APIUrl: cli.CatalystAddress + "/api"},
UISettings: &models.Settings{
ArtifactStates: artifactStates,
Timeformat: cli.Timeformat,
Version: catalyst.GetVersion(),
Tier: models.SettingsTierCommunity,
},
InitialAPIKey: cli.InitialAPIKey,
}
return config, nil
}
func toTypes(params []map[string]string) ([]*models.Type, error) {
var types []*models.Type
for _, param := range params {
t := &models.Type{}
icon, iconOK := param["icon"]
if iconOK {
t.Icon = icon
}
id, idOK := param["id"]
if idOK {
t.ID = id
}
name, nameOK := param["name"]
if nameOK {
t.Name = name
}
color, ok := param["color"]
if ok {
t.Color = pointer.String(color)
}
if iconOK && idOK && nameOK {
types = append(types, t)
} else {
return nil, fmt.Errorf("incomplete type: icon, id and name need to be provided (%s)", params)
}
}
return types, nil
}
func unique(l []string) []string {
keys := make(map[string]bool)
var list []string
for _, entry := range l {
if _, value := keys[entry]; !value {
keys[entry] = true
list = append(list, entry)
}
}
return list
}

View File

@@ -25,6 +25,9 @@ package dag
import (
"errors"
"sort"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)
type Graph struct {
@@ -52,6 +55,7 @@ func (g *Graph) AddNode(name string) error {
}
g.outputs[name] = make(map[string]struct{})
g.inputs[name] = 0
return nil
}
@@ -61,6 +65,7 @@ func (g *Graph) AddNodes(names ...string) error {
return err
}
}
return nil
}
@@ -101,7 +106,9 @@ func (g *Graph) Toposort() ([]string, error) {
L = append(L, n)
ms := make([]string, len(outputs[n]))
for _, k := range keys(outputs[n]) {
keys := maps.Keys(outputs[n])
slices.Sort(keys)
for _, k := range keys {
m := k
// i := outputs[n][m]
// ms[i-1] = m
@@ -130,15 +137,6 @@ func (g *Graph) Toposort() ([]string, error) {
return L, nil
}
func keys(m map[string]struct{}) []string {
var keys []string
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
func (g *Graph) GetParents(id string) []string {
var parents []string
for node, targets := range g.outputs {
@@ -147,6 +145,7 @@ func (g *Graph) GetParents(id string) []string {
}
}
sort.Strings(parents)
return parents
}
@@ -160,5 +159,6 @@ func (g *Graph) GetRoot() (string, error) {
if len(roots) != 1 {
return "", errors.New("more than one root")
}
return roots[0], nil
}

View File

@@ -20,23 +20,17 @@
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package dag
package dag_test
import (
"reflect"
"testing"
"github.com/stretchr/testify/assert"
)
"golang.org/x/exp/slices"
func index(s []string, v string) int {
for i, s := range s {
if s == v {
return i
}
}
return -1
}
"github.com/SecurityBrewery/catalyst/dag"
)
type Edge struct {
From string
@@ -44,13 +38,17 @@ type Edge struct {
}
func TestDuplicatedNode(t *testing.T) {
graph := NewGraph()
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNode("a"))
assert.Error(t, graph.AddNode("a"))
}
func TestWikipedia(t *testing.T) {
graph := NewGraph()
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNodes("2", "3", "5", "7", "8", "9", "10", "11"))
edges := []Edge{
@@ -79,27 +77,30 @@ func TestWikipedia(t *testing.T) {
}
for _, e := range edges {
if i, j := index(result, e.From), index(result, e.To); i > j {
if i, j := slices.Index(result, e.From), slices.Index(result, e.To); i > j {
t.Errorf("dependency failed: not satisfy %v(%v) > %v(%v)", e.From, i, e.To, j)
}
}
}
func TestCycle(t *testing.T) {
graph := NewGraph()
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNodes("1", "2", "3"))
assert.NoError(t, graph.AddEdge("1", "2"))
assert.NoError(t, graph.AddEdge("2", "3"))
assert.NoError(t, graph.AddEdge("3", "1"))
_, err := graph.Toposort()
if err == nil {
if _, err := graph.Toposort(); err == nil {
t.Errorf("closed path not detected in closed pathed graph")
}
}
func TestGraph_GetParents(t *testing.T) {
t.Parallel()
type fields struct {
nodes []string
edges map[string]string
@@ -117,8 +118,11 @@ func TestGraph_GetParents(t *testing.T) {
{"parents 3", fields{nodes: []string{"1", "2", "3"}, edges: map[string]string{"1": "3", "2": "3"}}, args{id: "3"}, []string{"1", "2"}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
g := NewGraph()
t.Parallel()
g := dag.NewGraph()
for _, node := range tt.fields.nodes {
assert.NoError(t, g.AddNode(node))
}
@@ -134,7 +138,9 @@ func TestGraph_GetParents(t *testing.T) {
}
func TestDAG_AddNode(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
v := "1"
assert.NoError(t, dag.AddNode(v))
@@ -143,7 +149,9 @@ func TestDAG_AddNode(t *testing.T) {
}
func TestDAG_AddEdge(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("0"))
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
@@ -162,7 +170,9 @@ func TestDAG_AddEdge(t *testing.T) {
}
func TestDAG_GetParents(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
@@ -176,7 +186,9 @@ func TestDAG_GetParents(t *testing.T) {
}
func TestDAG_GetDescendants(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
@@ -188,7 +200,9 @@ func TestDAG_GetDescendants(t *testing.T) {
}
func TestDAG_Topsort(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
@@ -203,7 +217,9 @@ func TestDAG_Topsort(t *testing.T) {
}
func TestDAG_TopsortStable(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
@@ -216,7 +232,9 @@ func TestDAG_TopsortStable(t *testing.T) {
}
func TestDAG_TopsortStable2(t *testing.T) {
dag := NewGraph()
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNodes("block-ioc", "block-iocs", "block-sender", "board", "fetch-iocs", "escalate", "extract-iocs", "mail-available", "search-email-gateway"))
assert.NoError(t, dag.AddEdge("block-iocs", "block-ioc"))

View File

@@ -3,15 +3,16 @@ package database
import (
"context"
"fmt"
"time"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*models.Artifact, error) {
func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*model.Artifact, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -22,7 +23,7 @@ func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*mo
FOR a in NOT_NULL(d.artifacts, [])
FILTER a.name == @name
RETURN a`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]interface{}{
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]any{
"@collection": TicketCollectionName,
"ID": fmt.Sprint(id),
"name": name,
@@ -32,7 +33,7 @@ func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*mo
}
defer cursor.Close()
var doc models.Artifact
var doc model.Artifact
_, err = cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
@@ -41,7 +42,7 @@ func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*mo
return &doc, nil
}
func (db *Database) ArtifactUpdate(ctx context.Context, id int64, name string, artifact *models.Artifact) (*models.TicketWithTickets, error) {
func (db *Database) ArtifactUpdate(ctx context.Context, id int64, name string, artifact *model.Artifact) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -54,22 +55,22 @@ func (db *Database) ArtifactUpdate(ctx context.Context, id int64, name string, a
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), @artifact)
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"artifact": artifact,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: fmt.Sprintf("Update artifact %s", name),
})
}
func (db *Database) EnrichArtifact(ctx context.Context, id int64, name string, enrichmentForm *models.EnrichmentForm) (*models.TicketWithTickets, error) {
enrichment := models.Enrichment{time.Now().UTC(), enrichmentForm.Data, enrichmentForm.Name}
func (db *Database) EnrichArtifact(ctx context.Context, id int64, name string, enrichmentForm *model.EnrichmentForm) (*model.TicketWithTickets, error) {
enrichment := model.Enrichment{Created: time.Now().UTC(), Data: enrichmentForm.Data, Name: enrichmentForm.Name}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
@@ -78,24 +79,24 @@ func (db *Database) EnrichArtifact(ctx context.Context, id int64, name string, e
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN d.artifacts
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET enrichments = NOT_NULL(a.enrichments, {})
LET newenrichments = MERGE(enrichments, ZIP( [@enrichmentname], [@enrichment]) )
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), MERGE(a, { "enrichments": newenrichments }))
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"enrichmentname": enrichment.Name,
"enrichment": enrichment,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: fmt.Sprintf("Run %s on artifact", enrichment.Name),
})
}

View File

@@ -7,11 +7,11 @@ import (
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toAutomation(doc *models.AutomationForm) interface{} {
return &models.Automation{
func toAutomation(doc *model.AutomationForm) *model.Automation {
return &model.Automation{
Image: doc.Image,
Script: doc.Script,
Schema: doc.Schema,
@@ -19,8 +19,8 @@ func toAutomation(doc *models.AutomationForm) interface{} {
}
}
func toAutomationResponse(id string, doc models.Automation) *models.AutomationResponse {
return &models.AutomationResponse{
func toAutomationResponse(id string, doc model.Automation) *model.AutomationResponse {
return &model.AutomationResponse{
ID: id,
Image: doc.Image,
Script: doc.Script,
@@ -29,7 +29,7 @@ func toAutomationResponse(id string, doc models.Automation) *models.AutomationRe
}
}
func (db *Database) AutomationCreate(ctx context.Context, automation *models.AutomationForm) (*models.AutomationResponse, error) {
func (db *Database) AutomationCreate(ctx context.Context, automation *model.AutomationForm) (*model.AutomationResponse, error) {
if automation == nil {
return nil, errors.New("requires automation")
}
@@ -37,7 +37,7 @@ func (db *Database) AutomationCreate(ctx context.Context, automation *models.Aut
return nil, errors.New("requires automation ID")
}
var doc models.Automation
var doc model.Automation
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.CreateDocument(ctx, newctx, automation.ID, toAutomation(automation))
@@ -48,8 +48,8 @@ func (db *Database) AutomationCreate(ctx context.Context, automation *models.Aut
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationGet(ctx context.Context, id string) (*models.AutomationResponse, error) {
var doc models.Automation
func (db *Database) AutomationGet(ctx context.Context, id string) (*model.AutomationResponse, error) {
var doc model.Automation
meta, err := db.automationCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -58,8 +58,8 @@ func (db *Database) AutomationGet(ctx context.Context, id string) (*models.Autom
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationUpdate(ctx context.Context, id string, automation *models.AutomationForm) (*models.AutomationResponse, error) {
var doc models.Automation
func (db *Database) AutomationUpdate(ctx context.Context, id string, automation *model.AutomationForm) (*model.AutomationResponse, error) {
var doc model.Automation
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.ReplaceDocument(ctx, id, toAutomation(automation))
@@ -72,19 +72,20 @@ func (db *Database) AutomationUpdate(ctx context.Context, id string, automation
func (db *Database) AutomationDelete(ctx context.Context, id string) error {
_, err := db.automationCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) AutomationList(ctx context.Context) ([]*models.AutomationResponse, error) {
func (db *Database) AutomationList(ctx context.Context) ([]*model.AutomationResponse, error) {
query := "FOR d IN @@collection SORT d._key ASC RETURN UNSET(d, 'script')"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": AutomationCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": AutomationCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.AutomationResponse
var docs []*model.AutomationResponse
for {
var doc models.Automation
var doc model.Automation
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break

View File

@@ -2,18 +2,15 @@ package busdb
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type Hook interface {
PublishAction(action string, context, msg map[string]interface{}) error
PublishUpdate(col, id string) error
}
// BusDatabase
// 1. Save entry to log
// 2. Send update ticket to bus
@@ -38,98 +35,82 @@ func NewDatabase(ctx context.Context, internal driver.Database, b *bus.Bus) (*Bu
}, nil
}
type OperationType int
const (
Create OperationType = iota
Read = iota
Update = iota
)
type Operation struct {
OperationType OperationType
Ids []driver.DocumentID
Msg string
Type bus.DatabaseUpdateType
Ids []driver.DocumentID
}
var CreateOperation = &Operation{OperationType: Create}
var ReadOperation = &Operation{OperationType: Read}
var (
CreateOperation = &Operation{Type: bus.DatabaseEntryCreated}
ReadOperation = &Operation{Type: bus.DatabaseEntryRead}
)
func (db BusDatabase) Query(ctx context.Context, query string, vars map[string]interface{}, operation *Operation) (driver.Cursor, *models.LogEntry, error) {
cur, err := db.internal.Query(ctx, query, vars)
func (db *BusDatabase) Query(ctx context.Context, query string, vars map[string]any, operation *Operation) (cur driver.Cursor, logs *model.LogEntry, err error) {
defer func() { err = toHTTPErr(err) }()
cur, err = db.internal.Query(ctx, query, vars)
if err != nil {
return nil, nil, err
}
var logs *models.LogEntry
switch {
case operation.OperationType == Update:
if err := db.LogAndNotify(ctx, operation.Ids, operation.Msg); err != nil {
return nil, nil, err
}
case operation.Type == bus.DatabaseEntryCreated, operation.Type == bus.DatabaseEntryUpdated:
db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: operation.Ids, Type: operation.Type})
}
return cur, logs, err
}
func (db BusDatabase) LogAndNotify(ctx context.Context, ids []driver.DocumentID, msg string) error {
var logEntries []*models.LogEntry
for _, i := range ids {
logEntries = append(logEntries, &models.LogEntry{Reference: i.String(), Message: msg})
}
func (db *BusDatabase) Remove(ctx context.Context) (err error) {
defer func() { err = toHTTPErr(err) }()
if err := db.LogBatchCreate(ctx, logEntries); err != nil {
return err
}
return db.bus.PublishUpdate(ids)
}
func (db BusDatabase) Remove(ctx context.Context) error {
return db.internal.Remove(ctx)
}
func (db BusDatabase) Collection(ctx context.Context, name string) (driver.Collection, error) {
func (db *BusDatabase) Collection(ctx context.Context, name string) (col driver.Collection, err error) {
defer func() { err = toHTTPErr(err) }()
return db.internal.Collection(ctx, name)
}
type Collection struct {
type Collection[T any] struct {
internal driver.Collection
db *BusDatabase
}
func NewCollection(internal driver.Collection, db *BusDatabase) *Collection {
return &Collection{internal: internal, db: db}
func NewCollection[T any](internal driver.Collection, db *BusDatabase) *Collection[T] {
return &Collection[T]{internal: internal, db: db}
}
func (c Collection) CreateDocument(ctx, newctx context.Context, key string, document interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.CreateDocument(newctx, &Keyed{Key: key, Doc: document})
func (c *Collection[T]) CreateDocument(ctx, newctx context.Context, key string, document *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.CreateDocument(newctx, &Keyed[T]{Key: key, Doc: document})
if err != nil {
return meta, err
}
err = c.db.LogAndNotify(ctx, []driver.DocumentID{meta.ID}, "Document created")
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryCreated})
return meta, nil
}
func (c Collection) CreateEdge(ctx, newctx context.Context, edge *driver.EdgeDocument) (driver.DocumentMeta, error) {
meta, err := c.internal.CreateDocument(newctx, edge)
func (c *Collection[T]) CreateEdge(ctx, newctx context.Context, edge *driver.EdgeDocument) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.CreateDocument(newctx, edge)
if err != nil {
return meta, err
}
err = c.db.LogAndNotify(ctx, []driver.DocumentID{meta.ID}, "Document created")
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryCreated})
return meta, nil
}
func (c Collection) CreateEdges(ctx context.Context, edges []*driver.EdgeDocument) (driver.DocumentMetaSlice, error) {
func (c *Collection[T]) CreateEdges(ctx context.Context, edges []*driver.EdgeDocument) (meta driver.DocumentMetaSlice, err error) {
defer func() { err = toHTTPErr(err) }()
metas, errs, err := c.internal.CreateDocuments(ctx, edges)
if err != nil {
return nil, err
@@ -143,40 +124,72 @@ func (c Collection) CreateEdges(ctx context.Context, edges []*driver.EdgeDocumen
ids = append(ids, meta.ID)
}
err = c.db.LogAndNotify(ctx, ids, "Document created")
if err != nil {
return metas, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: ids, Type: bus.DatabaseEntryCreated})
return metas, nil
}
func (c Collection) DocumentExists(ctx context.Context, id string) (bool, error) {
func (c *Collection[T]) DocumentExists(ctx context.Context, id string) (exists bool, err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.DocumentExists(ctx, id)
}
func (c Collection) ReadDocument(ctx context.Context, key string, result interface{}) (driver.DocumentMeta, error) {
return c.internal.ReadDocument(ctx, key, result)
func (c *Collection[T]) ReadDocument(ctx context.Context, key string, result *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.ReadDocument(ctx, key, result)
return
}
func (c Collection) UpdateDocument(ctx context.Context, key string, update interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.UpdateDocument(ctx, key, update)
func (c *Collection[T]) UpdateDocument(ctx context.Context, key string, update any) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.UpdateDocument(ctx, key, update)
if err != nil {
return meta, err
}
return meta, c.db.bus.PublishUpdate([]driver.DocumentID{meta.ID})
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryUpdated})
return meta, nil
}
func (c Collection) ReplaceDocument(ctx context.Context, key string, document interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.ReplaceDocument(ctx, key, document)
func (c *Collection[T]) ReplaceDocument(ctx context.Context, key string, document *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.ReplaceDocument(ctx, key, document)
if err != nil {
return meta, err
}
return meta, c.db.bus.PublishUpdate([]driver.DocumentID{meta.ID})
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryUpdated})
return meta, nil
}
func (c Collection) RemoveDocument(ctx context.Context, formatInt string) (driver.DocumentMeta, error) {
func (c *Collection[T]) RemoveDocument(ctx context.Context, formatInt string) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.RemoveDocument(ctx, formatInt)
}
func (c *Collection[T]) Truncate(ctx context.Context) (err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.Truncate(ctx)
}
func toHTTPErr(err error) error {
if err != nil {
ae := driver.ArangoError{}
if errors.As(err, &ae) {
return &api.HTTPError{Status: ae.Code, Internal: err}
}
return err
}
return nil
}

View File

@@ -1,34 +0,0 @@
package busdb
import (
"context"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/role"
)
const (
userContextKey = "user"
groupContextKey = "groups"
)
func SetContext(ctx *gin.Context, user *models.UserResponse) {
user.Roles = role.Strings(role.Explodes(user.Roles))
ctx.Set(userContextKey, user)
}
func SetGroupContext(ctx *gin.Context, groups []string) {
ctx.Set(groupContextKey, groups)
}
func UserContext(ctx context.Context, user *models.UserResponse) context.Context {
user.Roles = role.Strings(role.Explodes(user.Roles))
return context.WithValue(ctx, userContextKey, user)
}
func UserFromContext(ctx context.Context) (*models.UserResponse, bool) {
u, ok := ctx.Value(userContextKey).(*models.UserResponse)
return u, ok
}

View File

@@ -2,18 +2,18 @@ package busdb
import "encoding/json"
type Keyed struct {
type Keyed[T any] struct {
Key string
Doc interface{}
Doc *T
}
func (p Keyed) MarshalJSON() ([]byte, error) {
func (p *Keyed[T]) MarshalJSON() ([]byte, error) {
b, err := json.Marshal(p.Doc)
if err != nil {
panic(err)
}
var m map[string]interface{}
var m map[string]any
err = json.Unmarshal(b, &m)
if err != nil {
panic(err)

View File

@@ -3,55 +3,53 @@ package busdb
import (
"context"
"errors"
"time"
"strings"
"github.com/arangodb/go-driver"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
const LogCollectionName = "logs"
func (db *BusDatabase) LogCreate(ctx context.Context, id, message string) (*models.LogEntry, error) {
user, ok := UserFromContext(ctx)
func (db *BusDatabase) LogCreate(ctx context.Context, logType, reference, message string) (*model.LogEntry, error) {
user, _, ok := maut.UserFromContext(ctx)
if !ok {
return nil, errors.New("no user in context")
}
logentry := &models.LogEntry{
Reference: id,
Created: time.Now(),
logentry := &model.LogEntry{
Type: logType,
Reference: reference,
Created: time.Now().UTC(),
Creator: user.ID,
Message: message,
}
doc := models.LogEntry{}
doc := model.LogEntry{}
_, err := db.logCollection.CreateDocument(driver.WithReturnNew(ctx, &doc), logentry)
if err != nil {
return nil, err
}
return &doc, db.bus.PublishUpdate([]driver.DocumentID{driver.DocumentID(logentry.Reference)})
return &doc, nil
}
func (db *BusDatabase) LogBatchCreate(ctx context.Context, logEntryForms []*models.LogEntry) error {
user, ok := UserFromContext(ctx)
if !ok {
return errors.New("no user in context")
}
func (db *BusDatabase) LogBatchCreate(ctx context.Context, logentries []*model.LogEntry) error {
var ids []driver.DocumentID
var logentries []*models.LogEntry
for _, logEntryForm := range logEntryForms {
logentry := &models.LogEntry{
Reference: logEntryForm.Reference,
Created: time.Now(),
Creator: user.ID,
Message: logEntryForm.Message,
for _, entry := range logentries {
if strings.HasPrefix(entry.Reference, "tickets/") {
ids = append(ids, driver.DocumentID(entry.Reference))
}
logentries = append(logentries, logentry)
ids = append(ids, driver.DocumentID(logentry.Reference))
}
if ids != nil {
go db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{
IDs: ids,
Type: bus.DatabaseEntryCreated,
})
}
_, errs, err := db.logCollection.CreateDocuments(ctx, logentries)
@@ -63,12 +61,12 @@ func (db *BusDatabase) LogBatchCreate(ctx context.Context, logEntryForms []*mode
return err
}
return db.bus.PublishUpdate(ids)
return nil
}
func (db *BusDatabase) LogList(ctx context.Context, reference string) ([]*models.LogEntry, error) {
func (db *BusDatabase) LogList(ctx context.Context, reference string) ([]*model.LogEntry, error) {
query := "FOR d IN @@collection FILTER d.reference == @reference SORT d.created DESC RETURN d"
cursor, err := db.internal.Query(ctx, query, map[string]interface{}{
cursor, err := db.internal.Query(ctx, query, map[string]any{
"@collection": LogCollectionName,
"reference": reference,
})
@@ -76,9 +74,9 @@ func (db *BusDatabase) LogList(ctx context.Context, reference string) ([]*models
return nil, err
}
defer cursor.Close()
var docs []*models.LogEntry
var docs []*model.LogEntry
for {
var doc models.LogEntry
var doc model.LogEntry
_, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break

119
database/dashboard.go Normal file
View File

@@ -0,0 +1,119 @@
package database
import (
"context"
"errors"
"fmt"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toDashboardResponse(key string, doc *model.Dashboard) *model.DashboardResponse {
return &model.DashboardResponse{
ID: key,
Name: doc.Name,
Widgets: doc.Widgets,
}
}
func (db *Database) DashboardCreate(ctx context.Context, dashboard *model.Dashboard) (*model.DashboardResponse, error) {
if dashboard == nil {
return nil, errors.New("requires dashboard")
}
if dashboard.Name == "" {
return nil, errors.New("requires dashboard name")
}
if err := db.parseWidgets(dashboard); err != nil {
return nil, err
}
var doc model.Dashboard
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.dashboardCollection.CreateDocument(ctx, newctx, strcase.ToKebab(dashboard.Name), dashboard)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardGet(ctx context.Context, id string) (*model.DashboardResponse, error) {
var doc model.Dashboard
meta, err := db.dashboardCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardUpdate(ctx context.Context, id string, dashboard *model.Dashboard) (*model.DashboardResponse, error) {
if err := db.parseWidgets(dashboard); err != nil {
return nil, err
}
var doc model.Dashboard
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.dashboardCollection.ReplaceDocument(ctx, id, dashboard)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardDelete(ctx context.Context, id string) error {
_, err := db.dashboardCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) DashboardList(ctx context.Context) ([]*model.DashboardResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": DashboardCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.DashboardResponse
for {
var doc model.Dashboard
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toDashboardResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) parseWidgets(dashboard *model.Dashboard) error {
for _, widget := range dashboard.Widgets {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
_, err := parser.Parse(widget.Aggregation)
if err != nil {
return fmt.Errorf("invalid aggregation query (%s): syntax error", widget.Aggregation)
}
if widget.Filter != nil {
_, err := parser.Parse(*widget.Filter)
if err != nil {
return fmt.Errorf("invalid filter query (%s): syntax error", *widget.Filter)
}
}
}
return nil
}

View File

@@ -2,8 +2,10 @@ package database
import (
"context"
"errors"
"fmt"
"log"
"time"
"github.com/arangodb/go-driver"
"github.com/arangodb/go-driver/http"
@@ -11,6 +13,7 @@ import (
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/index"
)
@@ -25,6 +28,8 @@ const (
UserCollectionName = "users"
TicketTypeCollectionName = "tickettypes"
JobCollectionName = "jobs"
SettingsCollectionName = "settings"
DashboardCollectionName = "dashboards"
TicketArtifactsGraphName = "Graph"
RelatedTicketsCollectionName = "related"
@@ -36,17 +41,19 @@ type Database struct {
bus *bus.Bus
Hooks *hooks.Hooks
templateCollection *busdb.Collection
ticketCollection *busdb.Collection
playbookCollection *busdb.Collection
automationCollection *busdb.Collection
userdataCollection *busdb.Collection
userCollection *busdb.Collection
tickettypeCollection *busdb.Collection
jobCollection *busdb.Collection
templateCollection *busdb.Collection[model.TicketTemplate]
ticketCollection *busdb.Collection[model.Ticket]
playbookCollection *busdb.Collection[model.PlaybookTemplate]
automationCollection *busdb.Collection[model.Automation]
userdataCollection *busdb.Collection[model.UserData]
userCollection *busdb.Collection[model.User]
tickettypeCollection *busdb.Collection[model.TicketType]
jobCollection *busdb.Collection[model.Job]
settingsCollection *busdb.Collection[model.Settings]
dashboardCollection *busdb.Collection[model.Dashboard]
relatedCollection *busdb.Collection
containsCollection *busdb.Collection
relatedCollection *busdb.Collection[driver.EdgeDocument]
// containsCollection *busdb.Collection
}
type Config struct {
@@ -62,10 +69,115 @@ func New(ctx context.Context, index *index.Index, bus *bus.Bus, hooks *hooks.Hoo
name = Name
}
var err error
var client driver.Client
for {
deadline, ok := ctx.Deadline()
if ok && time.Until(deadline) < 0 {
return nil, context.DeadlineExceeded
}
client, err = getClient(ctx, config)
if err == nil {
break
}
if errors.Is(err, context.DeadlineExceeded) {
return nil, errors.New("could not load database, connection timed out")
}
log.Printf("could not connect to database: %s, retrying in 10 seconds\n", err)
time.Sleep(time.Second * 10)
}
hooks.DatabaseAfterConnect(ctx, client, name)
arangoDB, err := SetupDB(ctx, client, name)
if err != nil {
return nil, fmt.Errorf("DB setup failed: %w", err)
}
if err = migrations.PerformMigrations(ctx, arangoDB); err != nil {
return nil, fmt.Errorf("migrations failed: %w", err)
}
ticketCollection, err := arangoDB.Collection(ctx, TicketCollectionName)
if err != nil {
return nil, err
}
templateCollection, err := arangoDB.Collection(ctx, TemplateCollectionName)
if err != nil {
return nil, err
}
playbookCollection, err := arangoDB.Collection(ctx, PlaybookCollectionName)
if err != nil {
return nil, err
}
relatedCollection, err := arangoDB.Collection(ctx, RelatedTicketsCollectionName)
if err != nil {
return nil, err
}
automationCollection, err := arangoDB.Collection(ctx, AutomationCollectionName)
if err != nil {
return nil, err
}
userdataCollection, err := arangoDB.Collection(ctx, UserDataCollectionName)
if err != nil {
return nil, err
}
userCollection, err := arangoDB.Collection(ctx, UserCollectionName)
if err != nil {
return nil, err
}
tickettypeCollection, err := arangoDB.Collection(ctx, TicketTypeCollectionName)
if err != nil {
return nil, err
}
jobCollection, err := arangoDB.Collection(ctx, JobCollectionName)
if err != nil {
return nil, err
}
settingsCollection, err := arangoDB.Collection(ctx, SettingsCollectionName)
if err != nil {
return nil, err
}
dashboardCollection, err := arangoDB.Collection(ctx, DashboardCollectionName)
if err != nil {
return nil, err
}
hookedDB, err := busdb.NewDatabase(ctx, arangoDB, bus)
if err != nil {
return nil, err
}
db := &Database{
BusDatabase: hookedDB,
bus: bus,
Index: index,
Hooks: hooks,
templateCollection: busdb.NewCollection[model.TicketTemplate](templateCollection, hookedDB),
ticketCollection: busdb.NewCollection[model.Ticket](ticketCollection, hookedDB),
playbookCollection: busdb.NewCollection[model.PlaybookTemplate](playbookCollection, hookedDB),
automationCollection: busdb.NewCollection[model.Automation](automationCollection, hookedDB),
userdataCollection: busdb.NewCollection[model.UserData](userdataCollection, hookedDB),
userCollection: busdb.NewCollection[model.User](userCollection, hookedDB),
tickettypeCollection: busdb.NewCollection[model.TicketType](tickettypeCollection, hookedDB),
jobCollection: busdb.NewCollection[model.Job](jobCollection, hookedDB),
settingsCollection: busdb.NewCollection[model.Settings](settingsCollection, hookedDB),
dashboardCollection: busdb.NewCollection[model.Dashboard](dashboardCollection, hookedDB),
relatedCollection: busdb.NewCollection[driver.EdgeDocument](relatedCollection, hookedDB),
}
return db, nil
}
func getClient(ctx context.Context, config *Config) (driver.Client, error) {
conn, err := http.NewConnection(http.ConnectionConfig{Endpoints: []string{config.Host}})
if err != nil {
return nil, err
}
client, err := driver.NewClient(driver.ClientConfig{
Connection: conn,
Authentication: driver.BasicAuthentication(config.User, config.Password),
@@ -74,80 +186,17 @@ func New(ctx context.Context, index *index.Index, bus *bus.Bus, hooks *hooks.Hoo
return nil, err
}
hooks.DatabaseAfterConnect(ctx, client, name)
db, err := setupDB(ctx, client, name)
if err != nil {
return nil, fmt.Errorf("DB setup failed: %w", err)
}
if err = migrations.PerformMigrations(ctx, db); err != nil {
return nil, fmt.Errorf("migrations failed: %w", err)
}
ticketCollection, err := db.Collection(ctx, TicketCollectionName)
if err != nil {
return nil, err
}
templateCollection, err := db.Collection(ctx, TemplateCollectionName)
if err != nil {
return nil, err
}
playbookCollection, err := db.Collection(ctx, PlaybookCollectionName)
if err != nil {
return nil, err
}
relatedCollection, err := db.Collection(ctx, RelatedTicketsCollectionName)
if err != nil {
return nil, err
}
automationCollection, err := db.Collection(ctx, AutomationCollectionName)
if err != nil {
return nil, err
}
userdataCollection, err := db.Collection(ctx, UserDataCollectionName)
if err != nil {
return nil, err
}
userCollection, err := db.Collection(ctx, UserCollectionName)
if err != nil {
return nil, err
}
tickettypeCollection, err := db.Collection(ctx, TicketTypeCollectionName)
if err != nil {
return nil, err
}
jobCollection, err := db.Collection(ctx, JobCollectionName)
if err != nil {
if _, err := client.Version(ctx); err != nil {
return nil, err
}
hookedDB, err := busdb.NewDatabase(ctx, db, bus)
if err != nil {
return nil, err
}
return &Database{
BusDatabase: hookedDB,
bus: bus,
Index: index,
Hooks: hooks,
templateCollection: busdb.NewCollection(templateCollection, hookedDB),
ticketCollection: busdb.NewCollection(ticketCollection, hookedDB),
playbookCollection: busdb.NewCollection(playbookCollection, hookedDB),
automationCollection: busdb.NewCollection(automationCollection, hookedDB),
relatedCollection: busdb.NewCollection(relatedCollection, hookedDB),
userdataCollection: busdb.NewCollection(userdataCollection, hookedDB),
userCollection: busdb.NewCollection(userCollection, hookedDB),
tickettypeCollection: busdb.NewCollection(tickettypeCollection, hookedDB),
jobCollection: busdb.NewCollection(jobCollection, hookedDB),
}, nil
return client, nil
}
func setupDB(ctx context.Context, client driver.Client, dbName string) (driver.Database, error) {
func SetupDB(ctx context.Context, client driver.Client, dbName string) (driver.Database, error) {
databaseExists, err := client.DatabaseExists(ctx, dbName)
if err != nil {
return nil, err
return nil, fmt.Errorf("could not check if database exists: %w", err)
}
var db driver.Database
@@ -157,12 +206,12 @@ func setupDB(ctx context.Context, client driver.Client, dbName string) (driver.D
db, err = client.Database(ctx, dbName)
}
if err != nil {
return nil, err
return nil, fmt.Errorf("could not create database: %w", err)
}
collectionExists, err := db.CollectionExists(ctx, migrations.MigrationCollection)
if err != nil {
return nil, err
return nil, fmt.Errorf("could not check if collection exists: %w", err)
}
if !collectionExists {
@@ -175,3 +224,18 @@ func setupDB(ctx context.Context, client driver.Client, dbName string) (driver.D
return db, nil
}
func (db *Database) Truncate(ctx context.Context) {
_ = db.templateCollection.Truncate(ctx)
_ = db.ticketCollection.Truncate(ctx)
_ = db.playbookCollection.Truncate(ctx)
_ = db.automationCollection.Truncate(ctx)
_ = db.userdataCollection.Truncate(ctx)
_ = db.userCollection.Truncate(ctx)
_ = db.tickettypeCollection.Truncate(ctx)
_ = db.jobCollection.Truncate(ctx)
_ = db.relatedCollection.Truncate(ctx)
_ = db.settingsCollection.Truncate(ctx)
_ = db.dashboardCollection.Truncate(ctx)
// db.containsCollection.Truncate(ctx)
}

View File

@@ -5,19 +5,18 @@ import (
"encoding/json"
"errors"
"fmt"
"strings"
"github.com/arangodb/go-driver"
"github.com/docker/docker/client"
"github.com/xeipuuv/gojsonschema"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toJob(doc *models.JobForm) *models.Job {
return &models.Job{
func toJob(doc *model.JobForm) *model.Job {
return &model.Job{
Automation: doc.Automation,
Payload: doc.Payload,
Origin: doc.Origin,
@@ -26,7 +25,7 @@ func toJob(doc *models.JobForm) *models.Job {
}
}
func (db *Database) toJobResponse(ctx context.Context, key string, doc *models.Job, update bool) (*models.JobResponse, error) {
func (db *Database) toJobResponse(ctx context.Context, key string, doc *model.Job, update bool) (*model.JobResponse, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return nil, err
@@ -38,20 +37,24 @@ func (db *Database) toJobResponse(ctx context.Context, key string, doc *models.J
if doc.Running {
inspect, err := cli.ContainerInspect(ctx, key)
if err != nil || inspect.State == nil {
doc.Running = false
if update {
db.JobUpdate(ctx, key, doc)
_, _ = db.JobUpdate(ctx, key, &model.JobUpdate{
Status: doc.Status,
Running: false,
})
}
} else if doc.Status != inspect.State.Status {
status = inspect.State.Status
doc.Status = inspect.State.Status
if update {
db.JobUpdate(ctx, key, doc)
_, _ = db.JobUpdate(ctx, key, &model.JobUpdate{
Status: status,
Running: doc.Running,
})
}
}
}
return &models.JobResponse{
return &model.JobResponse{
Automation: doc.Automation,
ID: key,
Log: doc.Log,
@@ -63,33 +66,15 @@ func (db *Database) toJobResponse(ctx context.Context, key string, doc *models.J
}, nil
}
func (db *Database) JobCreate(ctx context.Context, id string, job *models.JobForm) (*models.JobResponse, error) {
func (db *Database) JobCreate(ctx context.Context, id string, job *model.JobForm) (*model.JobResponse, error) {
if job == nil {
return nil, errors.New("requires job")
}
var doc models.Job
var doc model.Job
newctx := driver.WithReturnNew(ctx, &doc)
/* Start validation */
j := toJob(job)
b, _ := json.Marshal(j)
r, err := models.JobSchema.Validate(gojsonschema.NewBytesLoader(b))
if err != nil {
return nil, err
}
if !r.Valid() {
var errs []string
for _, e := range r.Errors() {
errs = append(errs, e.String())
}
return nil, errors.New(strings.Join(errs, ", "))
}
/* End validation */
meta, err := db.jobCollection.CreateDocument(ctx, newctx, id, j)
meta, err := db.jobCollection.CreateDocument(ctx, newctx, id, toJob(job))
if err != nil {
return nil, err
}
@@ -97,8 +82,8 @@ func (db *Database) JobCreate(ctx context.Context, id string, job *models.JobFor
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobGet(ctx context.Context, id string) (*models.JobResponse, error) {
var doc models.Job
func (db *Database) JobGet(ctx context.Context, id string) (*model.JobResponse, error) {
var doc model.Job
meta, err := db.jobCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -107,28 +92,11 @@ func (db *Database) JobGet(ctx context.Context, id string) (*models.JobResponse,
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobUpdate(ctx context.Context, id string, job *models.Job) (*models.JobResponse, error) {
var doc models.Job
func (db *Database) JobUpdate(ctx context.Context, id string, job *model.JobUpdate) (*model.JobResponse, error) {
var doc model.Job
ctx = driver.WithReturnNew(ctx, &doc)
/* Start validation */
b, _ := json.Marshal(job)
r, err := models.JobSchema.Validate(gojsonschema.NewBytesLoader(b))
if err != nil {
return nil, err
}
if !r.Valid() {
var errs []string
for _, e := range r.Errors() {
errs = append(errs, e.String())
}
return nil, errors.New(strings.Join(errs, ", "))
}
/* End validation */
meta, err := db.jobCollection.ReplaceDocument(ctx, id, job)
meta, err := db.jobCollection.UpdateDocument(ctx, id, job)
if err != nil {
return nil, err
}
@@ -139,16 +107,15 @@ func (db *Database) JobUpdate(ctx context.Context, id string, job *models.Job) (
func (db *Database) JobLogAppend(ctx context.Context, id string, logLine string) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "log": CONCAT(NOT_NULL(d.log, ""), @logline) } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]interface{}{
cur, _, err := db.Query(ctx, query, map[string]any{
"@collection": JobCollectionName,
"ID": id,
"logline": logLine,
}, &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
Msg: fmt.Sprintf("Append logline"),
})
if err != nil {
return err
@@ -158,19 +125,18 @@ func (db *Database) JobLogAppend(ctx context.Context, id string, logLine string)
return nil
}
func (db *Database) JobComplete(ctx context.Context, id string, out interface{}) error {
func (db *Database) JobComplete(ctx context.Context, id string, out any) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "output": @out, "status": "completed", "running": false } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]interface{}{
cur, _, err := db.Query(ctx, query, map[string]any{
"@collection": JobCollectionName,
"ID": id,
"out": out,
}, &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
Msg: fmt.Sprintf("Set output"),
})
if err != nil {
return err
@@ -182,19 +148,20 @@ func (db *Database) JobComplete(ctx context.Context, id string, out interface{})
func (db *Database) JobDelete(ctx context.Context, id string) error {
_, err := db.jobCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) JobList(ctx context.Context) ([]*models.JobResponse, error) {
func (db *Database) JobList(ctx context.Context) ([]*model.JobResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": JobCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": JobCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.JobResponse
var docs []*model.JobResponse
for {
var doc models.Job
var doc model.Job
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
@@ -213,33 +180,39 @@ func (db *Database) JobList(ctx context.Context) ([]*models.JobResponse, error)
return docs, err
}
func publishJobMapping(id, automation string, contextStructs *models.Context, origin *models.Origin, payloadMapping map[string]string, db *Database) error {
func publishJobMapping(id, automation string, contextStructs *model.Context, origin *model.Origin, payloadMapping map[string]string, db *Database) error {
msg, err := generatePayload(payloadMapping, contextStructs)
if err != nil {
return fmt.Errorf("message generation failed: %w", err)
}
return publishJob(id, automation, contextStructs, origin, msg, db)
db.bus.JobChannel.Publish(&bus.JobMsg{
ID: id,
Automation: automation,
Origin: origin,
Message: &model.Message{
Context: contextStructs,
Payload: msg,
},
})
return nil
}
func publishJob(id, automation string, contextStructs *models.Context, origin *models.Origin, payload map[string]interface{}, db *Database) error {
return db.bus.PublishJob(id, automation, payload, contextStructs, origin)
}
func generatePayload(msgMapping map[string]string, contextStructs *models.Context) (map[string]interface{}, error) {
contextJson, err := json.Marshal(contextStructs)
func generatePayload(msgMapping map[string]string, contextStructs *model.Context) (map[string]any, error) {
contextJSON, err := json.Marshal(contextStructs)
if err != nil {
return nil, err
}
automationContext := map[string]interface{}{}
err = json.Unmarshal(contextJson, &automationContext)
automationContext := map[string]any{}
err = json.Unmarshal(contextJSON, &automationContext)
if err != nil {
return nil, err
}
parser := caql.Parser{}
msg := map[string]interface{}{}
msg := map[string]any{}
for arg, expr := range msgMapping {
tree, err := parser.Parse(expr)
if err != nil {
@@ -252,5 +225,6 @@ func generatePayload(msgMapping map[string]string, contextStructs *models.Contex
}
msg[arg] = v
}
return msg, nil
}

View File

@@ -1,630 +0,0 @@
#!/usr/bin/env python
import subprocess
import sys
import json
from datetime import datetime
import io
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "thehive4py", "requests", "minio"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
defaultschema = {
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"summary",
"tlp",
"pap"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Unknown",
"title": "Unknown",
"icon": "mdi-help"
},
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
},
{
"const": "Very High",
"title": "Very High",
"icon": "mdi-exclamation"
}
]
},
"flag": {
"title": "Flag",
"type": "boolean",
"x-cols": 6,
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"pap": {
"$id": "#root/pap",
"title": "PAP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"tags": {
"$id": "#root/tags",
"title": "Tags",
"type": "array",
"items": {
"type": "string"
}
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
},
"resolutionStatus": {
"$id": "#root/resolutionStatus",
"title": "Resolution Status",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
},
"endDate": {
"$id": "#root/endDate",
"title": "End Data",
"type": "string",
"format": "date-time",
"x-cols": 6,
"x-class": "pr-2",
},
"summary": {
"$id": "#root/summary",
"title": "Summary",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}
defaultalertschema = {
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"summary",
"tlp",
"pap"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Unknown",
"title": "Unknown",
"icon": "mdi-help"
},
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
},
{
"const": "Very High",
"title": "Very High",
"icon": "mdi-exclamation"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"source": {
"$id": "#root/source",
"title": "Source",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"sourceRef": {
"$id": "#root/sourceRef",
"title": "Source Ref",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"type": {
"$id": "#root/type",
"title": "Type",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}
class schema:
def __init__(self):
self.schema = defaultschema
def add_string(self, title):
self.schema["properties"][title] = { "type": "string", "x-cols": 6, "x-class": "pr-2" }
def add_boolean(self, title):
self.schema["properties"][title] = { "type": "boolean", "x-cols": 6, "x-class": "pr-2" }
def add_date(self, title):
self.schema["properties"][title] = { "type": "string", "format": "date-time", "x-cols": 6, "x-class": "pr-2" }
def add_integer(self, title):
self.schema["properties"][title] = { "type": "integer", "x-cols": 6, "x-class": "pr-2" }
def add_float(self, title):
self.schema["properties"][title] = { "type": "number", "x-cols": 6, "x-class": "pr-2" }
class alertschema:
def __init__(self):
self.schema = defaultalertschema
def maptime(hivetime):
if hivetime is None:
return None
return datetime.fromtimestamp(hivetime/1000).isoformat() + "Z"
def mapstatus(hivestatus):
if hivestatus == "Open" or hivestatus == "New":
return "open"
return "closed"
def maptlp(hivetlp):
if hivetlp == 0:
return "White"
if hivetlp == 1:
return "Green"
if hivetlp == 2:
return "Amber"
if hivetlp == 3:
return "Red"
return "White"
def mapseverity(hiveseverity):
if hiveseverity == 1:
return "Low"
if hiveseverity == 2:
return "Medium"
if hiveseverity == 3:
return "High"
if hiveseverity == 4:
return "Very High"
return "Unknown"
# {
# "_id": "~16416",
# "id": "~16416",
# "createdBy": "jonas@thehive.local",
# "updatedBy": "jonas@thehive.local",
# "createdAt": 1638704013583,
# "updatedAt": 1638704061151,
# "_type": "case",
# "caseId": 1,
# "title": "My Test 1",
# "description": "My Testcase",
# "severity": 2,
# "startDate": 1638703980000,
# "endDate": null,
# "impactStatus": null,
# "resolutionStatus": null,
# "tags": [],
# "flag": false,
# "tlp": 2,
# "pap": 2,
# "status": "Open",
# "summary": null,
# "owner": "jonas@thehive.local",
# "customFields": {},
# "stats": {},
# "permissions": [ "manageShare", "manageAnalyse", "manageTask", "manageCaseTemplate", "manageCase", "manageUser", "manageProcedure", "managePage", "manageObservable", "manageTag", "manageConfig", "manageAlert", "accessTheHiveFS", "manageAction" ]
# }
def mapcase(hivecase, url, keep_ids):
s = schema()
details = {}
for name, data in hivecase["customFields"].items():
if "string" in data and data["string"] is not None:
s.add_string(name)
details[name] = data["string"]
if "boolean" in data and data["boolean"] is not None:
s.add_boolean(name)
details[name] = data["boolean"]
if "date" in data and data["date"] is not None:
s.add_date(name)
details[name] = maptime(data["date"])
if "integer" in data and data["integer"] is not None:
s.add_integer(name)
details[name] = data["integer"]
if "float" in data and data["float"] is not None:
s.add_float(name)
details[name] = data["float"]
case = {}
if keep_ids:
case["id"] = hivecase["caseId"]
return {
"name": hivecase["title"],
"type": "incident",
"status": mapstatus(hivecase["status"]),
"owner": hivecase["owner"],
# "write": hivecase["write"],
# "read": hivecase["read"],
"schema": json.dumps(s.schema),
"details": {
"tlp": maptlp(hivecase["tlp"]),
"pap": maptlp(hivecase["pap"]),
"severity": mapseverity(hivecase["severity"]),
"description": hivecase["description"],
"summary": hivecase["summary"],
"tags": hivecase["tags"],
"endDate": maptime(hivecase["endDate"]),
"resolutionStatus": hivecase["resolutionStatus"],
"flag": hivecase["flag"],
} | details,
"references": [
{ "name": "TheHive #%d" % hivecase["caseId"], "href": "%s/index.html#!/case/~%s/details" % (url, hivecase["id"]) }
],
#
# "playbooks": hivecase["playbooks"],
#
"files": [],
"comments": [],
# creator, created, message
#
"artifacts": [],
# name, type, status, enrichment
# name, data
"created": maptime(hivecase["createdAt"]),
"modified": maptime(hivecase["updatedAt"]),
} | case
# {
# "_id": "ce2c00f17132359cb3c50dfbb1901810",
# "_type": "alert",
# "artifacts": [],
# "createdAt": 1495012062014,
# "createdBy": "myuser",
# "date": 1495012062016,
# "description": "N/A",
# "follow": true,
# "id": "ce2c00f17132359cb3c50dfbb1901810",
# "lastSyncDate": 1495012062016,
# "severity": 2,
# "source": "instance1",
# "sourceRef": "alert-ref",
# "status": "New",
# "title": "New Alert",
# "tlp": 2,
# "type": "external",
# "user": "myuser"
# }
def mapalert(hivealert, url):
s = alertschema()
details = {}
return {
"name": hivealert["title"],
"type": "alert",
"status": mapstatus(hivealert["status"]),
"owner": hivealert["user"],
"schema": json.dumps(s.schema),
"details": {
"tlp": maptlp(hivealert["tlp"]),
"severity": mapseverity(hivealert["severity"]),
"description": hivealert["description"],
"source": hivealert["source"],
"sourceRef": hivealert["sourceRef"],
"type": hivealert["type"],
} | details,
"references": [
{ "name": "TheHive Alerts", "href": "%s/index.html#!/alert/list" % url }
],
"files": [],
"comments": [],
"artifacts": [],
"created": maptime(hivealert["createdAt"]),
"modified": maptime(hivealert["lastSyncDate"]),
}
# {
# "_id": "~41152",
# "id": "~41152",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638723814523,
# "_type": "case_artifact",
# "dataType": "ip",
# "data": "2.2.2.2",
# "startDate": 1638723814523,
# "tlp": 2,
# "tags": [],
# "ioc": false,
# "sighted": false,
# "message": ".",
# "reports": {},
# "stats": {},
# "ignoreSimilarity": false
# }
def mapobservable(hiveobservable):
status = "unknown"
if hiveobservable["ioc"]:
status = "malicious"
return {
"name": hiveobservable["data"],
"type": hiveobservable["dataType"],
"status": status,
}
# {
# "id": "~12296",
# "_id": "~12296",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638704029800,
# "_type": "case_task",
# "title": "Start",
# "group": "MyTaskGroup1",
# "owner": "jonas@thehive.local",
# "status": "InProgress",
# "flag": false,
# "startDate": 1638704115667,
# "order": 0
# }
# {
# "_id": "~24656",
# "id": "~24656",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638729992590,
# "_type": "case_task_log",
# "message": "asd",
# "startDate": 1638729992590,
# "attachment": {
# "name": "Chemistry Vector.eps",
# "hashes": [
# "adf2d4cd72f4141fe7f8eb4af035596415a29c048d3039be6449008f291258e9",
# "180f66a6d22b1f09ed198afd814f701e42440e7c",
# "b28ae347371df003b76cbb8c6199c97e"
# ],
# "size": 3421842,
# "contentType": "application/postscript",
# "id": "adf2d4cd72f4141fe7f8eb4af035596415a29c048d3039be6449008f291258e9"
# },
# "status": "Ok",
# "owner": "jonas@thehive.local"
# }
def maptasklog(hivetask, hivetasklog):
message = "**" + hivetask["group"] + ": " + hivetask["title"] + "** (" + hivetask["status"] + ")\n\n"
message += hivetasklog["message"]
if 'attachment' in hivetasklog:
message += "\n\n*Attachment*: " + hivetasklog['attachment']["name"]
return {
"creator": hivetasklog["createdBy"],
"created": maptime(hivetasklog["createdAt"]),
"message": message,
}
def run(msg):
skip_files = msg["payload"]["skip_files"]
keep_ids = msg["payload"]["keep_ids"]
from thehive4py.api import TheHiveApi
import requests
from minio import Minio
headers = {"PRIVATE-TOKEN": msg["secrets"]["catalyst_apikey"]}
# minioclient = Minio("try.catalyst-soar.com:9000", access_key="minio", secret_key="password")
if not skip_files:
minioclient = Minio(
msg["secrets"]["minio_host"],
access_key=msg["secrets"]["minio_access_key"],
secret_key=msg["secrets"]["minio_secret_key"])
# url = "http://localhost:9000"
url = msg["payload"]["thehiveurl"]
# api = TheHiveApi(url, "dtUCnzY4h291GIFHJKW/Z2I2SgjTRQqo")
api = TheHiveApi(url, msg["payload"]["thehivekey"])
print("find alerts", file=sys.stderr)
alerts = []
resp = api.find_alerts(query={}, sort=['-createdAt'], range='all')
resp.raise_for_status()
for alert in resp.json():
alerts.append(mapalert(alert, url))
if alerts:
print("create %s alerts" % len(alerts), file=sys.stderr)
response = requests.post(msg["secrets"]["catalyst_apiurl"] + "/tickets/batch", json=alerts, headers=headers)
response.raise_for_status()
print("find incidents", file=sys.stderr)
incidents = []
resp = api.find_cases(query={}, sort=['-createdAt'], range='all')
resp.raise_for_status()
for case in resp.json():
incident = mapcase(case, url, keep_ids)
for observable in api.get_case_observables(case["id"]).json():
incident["artifacts"].append(mapobservable(observable))
for task in api.get_case_tasks(case["id"]).json():
for log in api.get_task_logs(task["id"]).json():
incident["comments"].append(maptasklog(task, log))
if 'attachment' in log and not skip_files:
incident["files"].append({ "key": log['attachment']["id"], "name": log['attachment']["name"] })
bucket_name = "catalyst-%d" % incident["id"]
if not minioclient.bucket_exists(bucket_name):
minioclient.make_bucket(bucket_name)
response = api.download_attachment(log["attachment"]["id"])
data = io.BytesIO(response.content)
minioclient.put_object(bucket_name, log["attachment"]["id"], data, length=-1, part_size=10*1024*1024)
incidents.append(incident)
if incidents:
if keep_ids:
print("delete incidents", file=sys.stderr)
for incident in incidents:
requests.delete(msg["secrets"]["catalyst_apiurl"] + "/tickets/%d" % incident["id"], headers=headers)
print("create %d incidents" % len(incidents), file=sys.stderr)
response = requests.post(msg["secrets"]["catalyst_apiurl"] + "/tickets/batch", json=incidents, headers=headers)
response.raise_for_status()
return {"done": True}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -11,9 +11,6 @@ var SHA1HashAutomation string
//go:embed automations/vt.hash.py
var VTHashAutomation string
//go:embed automations/thehive.py
var TheHiveAutomation string
//go:embed automations/comment.py
var CommentAutomation string

View File

@@ -7,8 +7,8 @@ import (
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
)
const MigrationCollection string = "migrations"
@@ -32,25 +32,38 @@ func generateMigrations() ([]Migration, error) {
&createGraph{ID: "create-ticket-graph", Name: "Graph", EdgeDefinitions: []driver.EdgeDefinition{{Collection: "related", From: []string{"tickets"}, To: []string{"tickets"}}}},
&createDocument{ID: "create-template-default", Collection: "templates", Document: &busdb.Keyed{Key: "default", Doc: models.TicketTemplate{Schema: DefaultTemplateSchema, Name: "Default"}}},
&createDocument{ID: "create-automation-vt.hash", Collection: "automations", Document: &busdb.Keyed{Key: "vt.hash", Doc: models.Automation{Image: "docker.io/python:3", Script: VTHashAutomation}}},
&createDocument{ID: "create-automation-comment", Collection: "automations", Document: &busdb.Keyed{Key: "comment", Doc: models.Automation{Image: "docker.io/python:3", Script: CommentAutomation}}},
&createDocument{ID: "create-automation-thehive", Collection: "automations", Document: &busdb.Keyed{Key: "thehive", Doc: models.Automation{Image: "docker.io/python:3", Script: TheHiveAutomation}}},
&createDocument{ID: "create-automation-hash.sha1", Collection: "automations", Document: &busdb.Keyed{Key: "hash.sha1", Doc: models.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation}}},
&createDocument{ID: "create-playbook-malware", Collection: "playbooks", Document: &busdb.Keyed{Key: "malware", Doc: models.PlaybookTemplate{Name: "Malware", Yaml: MalwarePlaybook}}},
&createDocument{ID: "create-playbook-phishing", Collection: "playbooks", Document: &busdb.Keyed{Key: "phishing", Doc: models.PlaybookTemplate{Name: "Phishing", Yaml: PhishingPlaybook}}},
&createDocument{ID: "create-tickettype-alert", Collection: "tickettypes", Document: &busdb.Keyed{Key: "alert", Doc: models.TicketType{Name: "Alerts", Icon: "mdi-alert", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-incident", Collection: "tickettypes", Document: &busdb.Keyed{Key: "incident", Doc: models.TicketType{Name: "Incidents", Icon: "mdi-radioactive", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-investigation", Collection: "tickettypes", Document: &busdb.Keyed{Key: "investigation", Doc: models.TicketType{Name: "Forensic Investigations", Icon: "mdi-fingerprint", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-hunt", Collection: "tickettypes", Document: &busdb.Keyed{Key: "hunt", Doc: models.TicketType{Name: "Threat Hunting", Icon: "mdi-target", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketTemplate]]{ID: "create-template-default", Collection: "templates", Document: &busdb.Keyed[model.TicketTemplate]{Key: "default", Doc: &model.TicketTemplate{Schema: DefaultTemplateSchema, Name: "Default"}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-vt.hash", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "vt.hash", Doc: &model.Automation{Image: "docker.io/python:3", Script: VTHashAutomation}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-comment", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "comment", Doc: &model.Automation{Image: "docker.io/python:3", Script: CommentAutomation}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-hash.sha1", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "hash.sha1", Doc: &model.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation}}},
&createDocument[busdb.Keyed[model.PlaybookTemplate]]{ID: "create-playbook-malware", Collection: "playbooks", Document: &busdb.Keyed[model.PlaybookTemplate]{Key: "malware", Doc: &model.PlaybookTemplate{Name: "Malware", Yaml: MalwarePlaybook}}},
&createDocument[busdb.Keyed[model.PlaybookTemplate]]{ID: "create-playbook-phishing", Collection: "playbooks", Document: &busdb.Keyed[model.PlaybookTemplate]{Key: "phishing", Doc: &model.PlaybookTemplate{Name: "Phishing", Yaml: PhishingPlaybook}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-alert", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "alert", Doc: &model.TicketType{Name: "Alerts", Icon: "mdi-alert", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-incident", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "incident", Doc: &model.TicketType{Name: "Incidents", Icon: "mdi-radioactive", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-investigation", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "investigation", Doc: &model.TicketType{Name: "Forensic Investigations", Icon: "mdi-fingerprint", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-hunt", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "hunt", Doc: &model.TicketType{Name: "Threat Hunting", Icon: "mdi-target", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&updateSchema{ID: "update-automation-collection-1", Name: "automations", DataType: "automation", Schema: `{"properties":{"image":{"type":"string"},"script":{"type":"string"}},"required":["image","script"],"type":"object"}`},
&updateDocument{ID: "update-automation-vt.hash-1", Collection: "automations", Key: "vt.hash", Document: models.Automation{Image: "docker.io/python:3", Script: VTHashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&updateDocument{ID: "update-automation-comment-1", Collection: "automations", Key: "comment", Document: models.Automation{Image: "docker.io/python:3", Script: CommentAutomation, Type: []string{"playbook"}}},
&updateDocument{ID: "update-automation-thehive-1", Collection: "automations", Key: "thehive", Document: models.Automation{Image: "docker.io/python:3", Script: TheHiveAutomation, Schema: pointer.String(`{"title":"TheHive credentials","type":"object","properties":{"thehiveurl":{"type":"string","title":"TheHive URL (e.g. 'https://thehive.example.org')"},"thehivekey":{"type":"string","title":"TheHive API Key"},"skip_files":{"type":"boolean", "default": true, "title":"Skip Files (much faster)"},"keep_ids":{"type":"boolean", "default": true, "title":"Keep IDs and overwrite existing IDs"}},"required":["thehiveurl", "thehivekey", "skip_files", "keep_ids"]}`), Type: []string{"global"}}},
&updateDocument{ID: "update-automation-hash.sha1-1", Collection: "automations", Key: "hash.sha1", Document: models.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&updateDocument[model.Automation]{ID: "update-automation-vt.hash-1", Collection: "automations", Key: "vt.hash", Document: &model.Automation{Image: "docker.io/python:3", Script: VTHashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&updateDocument[model.Automation]{ID: "update-automation-comment-1", Collection: "automations", Key: "comment", Document: &model.Automation{Image: "docker.io/python:3", Script: CommentAutomation, Type: []string{"playbook"}}},
&updateDocument[model.Automation]{ID: "update-automation-hash.sha1-1", Collection: "automations", Key: "hash.sha1", Document: &model.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&createCollection{ID: "create-job-collection", Name: "jobs", DataType: "job", Schema: `{"properties":{"automation":{"type":"string"},"log":{"type":"string"},"payload":{},"origin":{"properties":{"artifact_origin":{"properties":{"artifact":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["artifact","ticket_id"],"type":"object"},"task_origin":{"properties":{"playbook_id":{"type":"string"},"task_id":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["playbook_id","task_id","ticket_id"],"type":"object"}},"type":"object"},"output":{"properties":{},"type":"object"},"running":{"type":"boolean"},"status":{"type":"string"}},"required":["automation","running","status"],"type":"object"}`},
&createDocument[busdb.Keyed[model.PlaybookTemplate]]{ID: "create-playbook-simple", Collection: "playbooks", Document: &busdb.Keyed[model.PlaybookTemplate]{Key: "simple", Doc: &model.PlaybookTemplate{Name: "Simple", Yaml: SimplePlaybook}}},
&createCollection{ID: "create-settings-collection", Name: "settings", DataType: "settings", Schema: `{"type":"object","properties":{"artifactStates":{"title":"Artifact States","items":{"type":"object","properties":{"color":{"title":"Color","type":"string","enum":["error","info","success","warning"]},"icon":{"title":"Icon (https://materialdesignicons.com)","type":"string"},"id":{"title":"ID","type":"string"},"name":{"title":"Name","type":"string"}},"required":["id","name","icon"]},"type":"array"},"artifactKinds":{"title":"Artifact Kinds","items":{"type":"object","properties":{"color":{"title":"Color","type":"string","enum":["error","info","success","warning"]},"icon":{"title":"Icon (https://materialdesignicons.com)","type":"string"},"id":{"title":"ID","type":"string"},"name":{"title":"Name","type":"string"}},"required":["id","name","icon"]},"type":"array"},"timeformat":{"title":"Time Format","type":"string"}},"required":["timeformat","artifactKinds","artifactStates"]}`},
&createDocument[busdb.Keyed[model.Settings]]{ID: "create-settings-global", Collection: "settings", Document: &busdb.Keyed[model.Settings]{Key: "global", Doc: &model.Settings{ArtifactStates: []*model.Type{{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(model.TypeColorInfo)}, {Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(model.TypeColorError)}, {Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(model.TypeColorSuccess)}}, ArtifactKinds: []*model.Type{{Icon: "mdi-server", ID: "asset", Name: "Asset"}, {Icon: "mdi-bullseye", ID: "ioc", Name: "IOC"}}, Timeformat: "YYYY-MM-DDThh:mm:ss"}}},
&updateSchema{ID: "update-ticket-collection", Name: "tickets", DataType: "ticket", Schema: `{"properties":{"artifacts":{"items":{"properties":{"enrichments":{"additionalProperties":{"properties":{"created":{"format":"date-time","type":"string"},"data":{"example":{"hash":"b7a067a742c20d07a7456646de89bc2d408a1153"},"properties":{},"type":"object"},"name":{"example":"hash.sha1","type":"string"}},"required":["created","data","name"],"type":"object"},"type":"object"},"name":{"example":"2.2.2.2","type":"string"},"status":{"example":"Unknown","type":"string"},"type":{"type":"string"},"kind":{"type":"string"}},"required":["name"],"type":"object"},"type":"array"},"comments":{"items":{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"}},"required":["created","creator","message"],"type":"object"},"type":"array"},"created":{"format":"date-time","type":"string"},"details":{"example":{"description":"my little incident"},"properties":{},"type":"object"},"files":{"items":{"properties":{"key":{"example":"myfile","type":"string"},"name":{"example":"notes.docx","type":"string"}},"required":["key","name"],"type":"object"},"type":"array"},"modified":{"format":"date-time","type":"string"},"name":{"example":"WannyCry","type":"string"},"owner":{"example":"bob","type":"string"},"playbooks":{"additionalProperties":{"properties":{"name":{"example":"Phishing","type":"string"},"tasks":{"additionalProperties":{"properties":{"automation":{"type":"string"},"closed":{"format":"date-time","type":"string"},"created":{"format":"date-time","type":"string"},"data":{"properties":{},"type":"object"},"done":{"type":"boolean"},"join":{"example":false,"type":"boolean"},"payload":{"additionalProperties":{"type":"string"},"type":"object"},"name":{"example":"Inform user","type":"string"},"next":{"additionalProperties":{"type":"string"},"type":"object"},"owner":{"type":"string"},"schema":{"properties":{},"type":"object"},"type":{"enum":["task","input","automation"],"example":"task","type":"string"}},"required":["created","done","name","type"],"type":"object"},"type":"object"}},"required":["name","tasks"],"type":"object"},"type":"object"},"read":{"example":["bob"],"items":{"type":"string"},"type":"array"},"references":{"items":{"properties":{"href":{"example":"https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2017-0144","type":"string"},"name":{"example":"CVE-2017-0144","type":"string"}},"required":["href","name"],"type":"object"},"type":"array"},"schema":{"example":"{}","type":"string"},"status":{"example":"open","type":"string"},"type":{"example":"incident","type":"string"},"write":{"example":["alice"],"items":{"type":"string"},"type":"array"}},"required":["created","modified","name","schema","status","type"],"type":"object"}`},
&createCollection{ID: "create-dashboard-collection", Name: "dashboards", DataType: "dashboards", Schema: `{"type":"object","properties":{"name":{"type":"string"},"widgets":{"items":{"type":"object","properties":{"aggregation":{"type":"string"},"filter":{"type":"string"},"name":{"type":"string"},"type":{"enum":[ "bar", "line", "pie" ]},"width": { "type": "integer", "minimum": 1, "maximum": 12 }},"required":["name","aggregation", "type", "width"]},"type":"array"}},"required":["name","widgets"]}`},
&updateDocument[model.Settings]{ID: "update-settings-global-1", Collection: "settings", Key: "global", Document: &model.Settings{ArtifactStates: []*model.Type{{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(model.TypeColorInfo)}, {Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(model.TypeColorError)}, {Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(model.TypeColorSuccess)}}, ArtifactKinds: []*model.Type{{Icon: "mdi-server", ID: "asset", Name: "Asset"}, {Icon: "mdi-bullseye", ID: "ioc", Name: "IOC"}}, Timeformat: "yyyy-MM-dd hh:mm:ss"}},
&updateSchema{ID: "update-user-simple-login", Name: "users", DataType: "user", Schema: `{"type":"object","properties":{"apikey":{"type":"boolean"},"blocked":{"type":"boolean"},"roles":{"items":{"type":"string"},"type":"array"},"salt":{"type":"string"},"sha256":{"type":"string"},"sha512":{"type":"string"}},"required":["blocked","apikey","roles"],"$id":"#/definitions/User"}`},
&mapRoles{ID: "simplify-roles"},
}, nil
}
@@ -58,6 +71,7 @@ func loadSchema(dataType, jsonschema string) (*driver.CollectionSchemaOptions, e
ticketCollectionSchema := &driver.CollectionSchemaOptions{Level: driver.CollectionSchemaLevelStrict, Message: fmt.Sprintf("Validation of %s failed", dataType)}
err := ticketCollectionSchema.LoadRule([]byte(jsonschema))
return ticketCollectionSchema, err
}
@@ -92,6 +106,7 @@ func PerformMigrations(ctx context.Context, db driver.Database) error {
}
}
}
return nil
}
@@ -162,41 +177,43 @@ func (m *createGraph) Migrate(ctx context.Context, db driver.Database) error {
_, err := db.CreateGraph(ctx, m.Name, &driver.CreateGraphOptions{
EdgeDefinitions: m.EdgeDefinitions,
})
return err
}
type createDocument struct {
type createDocument[T any] struct {
ID string
Collection string
Document interface{}
Document *T
}
func (m *createDocument) MID() string {
func (m *createDocument[T]) MID() string {
return m.ID
}
func (m *createDocument) Migrate(ctx context.Context, driver driver.Database) error {
func (m *createDocument[T]) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
}
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
type updateDocument struct {
type updateDocument[T any] struct {
ID string
Collection string
Key string
Document interface{}
Document *T
}
func (m *updateDocument) MID() string {
func (m *updateDocument[T]) MID() string {
return m.ID
}
func (m *updateDocument) Migrate(ctx context.Context, driver driver.Database) error {
func (m *updateDocument[T]) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
@@ -209,9 +226,25 @@ func (m *updateDocument) Migrate(ctx context.Context, driver driver.Database) er
if !exists {
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
_, err = collection.ReplaceDocument(ctx, m.Key, m.Document)
return err
}
type mapRoles struct {
ID string
}
func (m mapRoles) MID() string {
return m.ID
}
func (m mapRoles) Migrate(ctx context.Context, driver driver.Database) error {
_, err := driver.Query(ctx, "FOR u IN users UPDATE u WITH {roles: u.roles[*].name} IN users", nil)
return err
}

View File

@@ -1,208 +0,0 @@
{
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Advanced",
"type": "object",
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"nx-enum": [
"White",
"Green",
"Amber",
"Red"
],
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
},
"type": {
"type": "object",
"title": "Select an incident type",
"oneOf": [
{
"title": "Malware",
"properties": {
"schemaKey": {
"type": "string",
"const": "malware"
},
"malware_type": {
"type": "string",
"title": "Malware Type",
"enum": ["Ransomware", "Worm", "Virus"]
}
}
},
{
"title": "Phishing",
"properties": {
"schemaKey": {
"type": "string",
"const": "phishing"
},
"phishing_type": {
"type": "string",
"title": "Phishing Type",
"enum": ["Normal", "Spear", "Whale"]
}
}
}
],
"x-cols": 12
},
"apt": {
"type": "boolean",
"x-display": "switch",
"title": "APT involved?",
"x-cols": 6
},
"apt-group": {
"type": "string",
"title": "Select APT",
"enum": ["Lazarus Group", "Equation Group", "Fancy Bear (APT 28)", "OceanLotus (APT 32)", "Other"],
"x-if": "apt",
"x-cols": 6
},
"tactics": {
"type": "array",
"title": "MITRE Att&ck",
"description": "This description is used as a help message.",
"items": {
"type": "object",
"oneOf": [
{
"title": "Reconnaissance",
"properties": {
"tactic": {
"type": "string",
"const": "reconnaissance",
"title": "Tactic",
"description": "The adversary is trying to gather information they can use to plan future operations."
},
"techniques": {
"type": "array",
"title": "Techniques",
"items": {
"type": "string",
"oneOf": [
{
"const": "T1595",
"title": "Active Scanning",
"description": "Adversaries may execute active reconnaissance scans to gather information that can be used during targeting. Active scans are those where the adversary probes victim infrastructure via network traffic, as opposed to other forms of reconnaissance that do not involve direct interaction."
},
{
"const": "T1592",
"title": "Gather Victim Host Information"
}
]
},
"minItems": 1,
"uniqueItems": true
}
}
},
{
"title": "Persistence",
"properties": {
"tactic": {
"type": "string",
"const": "persistence"
},
"techniques": {
"type": "string",
"title": "Techniques",
"oneOf": [
{
"const": "T1098",
"title": "Account Manipulation"
},
{
"const": "T1197",
"title": "BITS Jobs"
}
]
}
}
}
]
},
"uniqueItems": true
},
"tags": {
"type": "array",
"title": "Tags",
"items": {
"type": "string",
"examples": [
"misp",
"external report",
"internal report"
]
}
}
},
"required": ["severity", "description", "tactics", "type"]
}

File diff suppressed because it is too large Load Diff

View File

@@ -3,7 +3,6 @@ package database
import (
"context"
"errors"
"time"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
@@ -11,7 +10,8 @@ import (
"gopkg.in/yaml.v3"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
type PlaybookYAML struct {
@@ -22,15 +22,15 @@ type PlaybookYAML struct {
type TaskYAML struct {
Name string `yaml:"name"`
Type string `yaml:"type"`
Schema interface{} `yaml:"schema"`
Schema any `yaml:"schema"`
Automation string `yaml:"automation"`
Payload map[string]string `yaml:"payload"`
Next map[string]string `yaml:"next"`
Join bool `yaml:"join"`
}
func toPlaybooks(docs []*models.PlaybookTemplateForm) (map[string]*models.Playbook, error) {
playbooks := map[string]*models.Playbook{}
func toPlaybooks(docs []*model.PlaybookTemplateForm) (map[string]*model.Playbook, error) {
playbooks := map[string]*model.Playbook{}
for _, doc := range docs {
playbook, err := toPlaybook(doc)
if err != nil {
@@ -42,30 +42,37 @@ func toPlaybooks(docs []*models.PlaybookTemplateForm) (map[string]*models.Playbo
playbooks[strcase.ToKebab(playbook.Name)] = playbook
}
}
return playbooks, nil
}
func toPlaybook(doc *models.PlaybookTemplateForm) (*models.Playbook, error) {
ticketPlaybook := &models.Playbook{}
func toPlaybook(doc *model.PlaybookTemplateForm) (*model.Playbook, error) {
ticketPlaybook := &model.Playbook{}
err := yaml.Unmarshal([]byte(doc.Yaml), ticketPlaybook)
if err != nil {
return nil, err
}
for idx, task := range ticketPlaybook.Tasks {
if task.Schema != nil {
task.Schema = dyno.ConvertMapI2MapS(task.Schema.(map[string]interface{}))
schema, ok := dyno.ConvertMapI2MapS(task.Schema).(map[string]any)
if ok {
task.Schema = schema
} else {
return nil, errors.New("could not convert schema")
}
}
task.Created = time.Now().UTC()
ticketPlaybook.Tasks[idx] = task
}
return ticketPlaybook, nil
}
func toPlaybookTemplateResponse(key string, doc *models.PlaybookTemplate) *models.PlaybookTemplateResponse {
return &models.PlaybookTemplateResponse{ID: key, Name: doc.Name, Yaml: doc.Yaml}
func toPlaybookTemplateResponse(key string, doc *model.PlaybookTemplate) *model.PlaybookTemplateResponse {
return &model.PlaybookTemplateResponse{ID: key, Name: doc.Name, Yaml: doc.Yaml}
}
func (db *Database) PlaybookCreate(ctx context.Context, playbook *models.PlaybookTemplateForm) (*models.PlaybookTemplateResponse, error) {
func (db *Database) PlaybookCreate(ctx context.Context, playbook *model.PlaybookTemplateForm) (*model.PlaybookTemplateResponse, error) {
if playbook == nil {
return nil, errors.New("requires playbook")
}
@@ -79,12 +86,12 @@ func (db *Database) PlaybookCreate(ctx context.Context, playbook *models.Playboo
if playbookYAML.Name == "" {
return nil, errors.New("requires template name")
}
p := models.PlaybookTemplate{Name: playbookYAML.Name, Yaml: playbook.Yaml}
p := model.PlaybookTemplate{Name: playbookYAML.Name, Yaml: playbook.Yaml}
var doc models.PlaybookTemplate
var doc model.PlaybookTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.CreateDocument(ctx, newctx, strcase.ToKebab(playbookYAML.Name), p)
meta, err := db.playbookCollection.CreateDocument(ctx, newctx, strcase.ToKebab(playbookYAML.Name), &p)
if err != nil {
return nil, err
}
@@ -92,8 +99,8 @@ func (db *Database) PlaybookCreate(ctx context.Context, playbook *models.Playboo
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookGet(ctx context.Context, id string) (*models.PlaybookTemplateResponse, error) {
doc := models.PlaybookTemplate{}
func (db *Database) PlaybookGet(ctx context.Context, id string) (*model.PlaybookTemplateResponse, error) {
doc := model.PlaybookTemplate{}
meta, err := db.playbookCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -104,10 +111,11 @@ func (db *Database) PlaybookGet(ctx context.Context, id string) (*models.Playboo
func (db *Database) PlaybookDelete(ctx context.Context, id string) error {
_, err := db.playbookCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *models.PlaybookTemplateForm) (*models.PlaybookTemplateResponse, error) {
func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *model.PlaybookTemplateForm) (*model.PlaybookTemplateResponse, error) {
var pb PlaybookYAML
err := yaml.Unmarshal([]byte(playbook.Yaml), &pb)
if err != nil {
@@ -118,10 +126,10 @@ func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *mod
return nil, errors.New("requires template name")
}
var doc models.PlaybookTemplate
var doc model.PlaybookTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.ReplaceDocument(ctx, id, models.PlaybookTemplate{Name: pb.Name, Yaml: playbook.Yaml})
meta, err := db.playbookCollection.ReplaceDocument(ctx, id, &model.PlaybookTemplate{Name: pb.Name, Yaml: playbook.Yaml})
if err != nil {
return nil, err
}
@@ -129,16 +137,16 @@ func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *mod
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookList(ctx context.Context) ([]*models.PlaybookTemplateResponse, error) {
func (db *Database) PlaybookList(ctx context.Context) ([]*model.PlaybookTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": PlaybookCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": PlaybookCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.PlaybookTemplateResponse
var docs []*model.PlaybookTemplateResponse
for {
var doc models.PlaybookTemplate
var doc model.PlaybookTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break

View File

@@ -8,10 +8,10 @@ import (
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/dag"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func playbookGraph(playbook *models.Playbook) (*dag.Graph, error) {
func playbookGraph(playbook *model.Playbook) (*dag.Graph, error) {
d := dag.NewGraph()
var taskIDs []string
@@ -33,16 +33,17 @@ func playbookGraph(playbook *models.Playbook) (*dag.Graph, error) {
}
}
}
return d, nil
}
func toTaskResponse(playbook *models.Playbook, taskID string, order int, graph *dag.Graph) (*models.TaskResponse, error) {
func toTaskResponse(playbook *model.Playbook, taskID string, order int, graph *dag.Graph) (*model.TaskResponse, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return nil, fmt.Errorf("task %s not found", taskID)
}
tr := &models.TaskResponse{
tr := &model.TaskResponse{
Automation: task.Automation,
Closed: task.Closed,
Created: task.Created,
@@ -67,7 +68,7 @@ func toTaskResponse(playbook *models.Playbook, taskID string, order int, graph *
return tr, nil
}
func activePlaybook(playbook *models.Playbook, taskID string) (bool, error) {
func activePlaybook(playbook *model.Playbook, taskID string) (bool, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return false, fmt.Errorf("playbook does not contain tasks %s", taskID)
@@ -81,7 +82,7 @@ func activePlaybook(playbook *models.Playbook, taskID string) (bool, error) {
return active(playbook, taskID, d, task)
}
func active(playbook *models.Playbook, taskID string, d *dag.Graph, task *models.Task) (bool, error) {
func active(playbook *model.Playbook, taskID string, d *dag.Graph, task *model.Task) (bool, error) {
if task.Done {
return false, nil
}
@@ -109,6 +110,7 @@ func active(playbook *models.Playbook, taskID string, d *dag.Graph, task *models
return false, nil
}
}
return true, nil
}
@@ -129,10 +131,11 @@ func active(playbook *models.Playbook, taskID string, d *dag.Graph, task *models
return true, nil
}
}
return false, nil
}
func evalRequirement(aql string, data interface{}) (bool, error) {
func evalRequirement(aql string, data any) (bool, error) {
if aql == "" {
return true, nil
}
@@ -143,9 +146,9 @@ func evalRequirement(aql string, data interface{}) (bool, error) {
return false, err
}
var dataMap map[string]interface{}
var dataMap map[string]any
if data != nil {
if dataMapX, ok := data.(map[string]interface{}); ok {
if dataMapX, ok := data.(map[string]any); ok {
dataMap = dataMapX
} else {
log.Println("wrong data type for task data")
@@ -160,12 +163,13 @@ func evalRequirement(aql string, data interface{}) (bool, error) {
if b, ok := v.(bool); ok {
return b, nil
}
return false, err
}
/*
// "github.com/qri-io/jsonschema"
func valid(task *models.Task) (bool, error) {
func valid(task *model.Task) (bool, error) {
schema, err := json.Marshal(task.Schema)
if err != nil {
return false, err

View File

@@ -5,18 +5,18 @@ import (
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
var playbook2 = &models.Playbook{
var playbook2 = &model.Playbook{
Name: "Phishing",
Tasks: map[string]*models.Task{
Tasks: map[string]*model.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"aquire-mail": "boardInvolved == false",
"escalate": "boardInvolved == true",
"acquire-mail": "boardInvolved == false",
}},
"escalate": {},
"aquire-mail": {Next: map[string]string{
"acquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
@@ -30,15 +30,15 @@ var playbook2 = &models.Playbook{
},
}
var playbook3 = &models.Playbook{
var playbook3 = &model.Playbook{
Name: "Phishing",
Tasks: map[string]*models.Task{
Tasks: map[string]*model.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"aquire-mail": "boardInvolved == false",
}, Data: map[string]interface{}{"boardInvolved": true}, Done: true},
"escalate": "boardInvolved == true",
"acquire-mail": "boardInvolved == false",
}, Data: map[string]any{"boardInvolved": true}, Done: true},
"escalate": {},
"aquire-mail": {Next: map[string]string{
"acquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
@@ -52,9 +52,9 @@ var playbook3 = &models.Playbook{
},
}
var playbook4 = &models.Playbook{
var playbook4 = &model.Playbook{
Name: "Malware",
Tasks: map[string]*models.Task{
Tasks: map[string]*model.Task{
"file-or-hash": {Next: map[string]string{
"enter-hash": "file == 'Hash'",
"upload": "file == 'File'",
@@ -71,8 +71,10 @@ var playbook4 = &models.Playbook{
}
func Test_canBeCompleted(t *testing.T) {
t.Parallel()
type args struct {
playbook *models.Playbook
playbook *model.Playbook
taskID string
}
tests := []struct {
@@ -83,18 +85,22 @@ func Test_canBeCompleted(t *testing.T) {
}{
{"playbook2 board", args{playbook: playbook2, taskID: "board"}, true, false},
{"playbook2 escalate", args{playbook: playbook2, taskID: "escalate"}, false, false},
{"playbook2 aquire-mail", args{playbook: playbook2, taskID: "aquire-mail"}, false, false},
{"playbook2 acquire-mail", args{playbook: playbook2, taskID: "acquire-mail"}, false, false},
{"playbook2 block-ioc", args{playbook: playbook2, taskID: "block-ioc"}, false, false},
{"playbook3 board", args{playbook: playbook3, taskID: "board"}, false, false},
{"playbook3 escalate", args{playbook: playbook3, taskID: "escalate"}, true, false},
{"playbook3 aquire-mail", args{playbook: playbook3, taskID: "aquire-mail"}, false, false},
{"playbook3 acquire-mail", args{playbook: playbook3, taskID: "acquire-mail"}, false, false},
{"playbook3 block-ioc", args{playbook: playbook3, taskID: "block-ioc"}, false, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := activePlaybook(tt.args.playbook, tt.args.taskID)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
@@ -105,8 +111,10 @@ func Test_canBeCompleted(t *testing.T) {
}
func Test_playbookOrder(t *testing.T) {
t.Parallel()
type args struct {
playbook *models.Playbook
playbook *model.Playbook
}
tests := []struct {
name string
@@ -117,10 +125,14 @@ func Test_playbookOrder(t *testing.T) {
{"playbook4", args{playbook: playbook4}, []string{"file-or-hash", "enter-hash", "upload", "hash", "virustotal"}, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := toPlaybookResponse(tt.args.playbook)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}

View File

@@ -7,6 +7,7 @@ import (
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
)
@@ -19,11 +20,13 @@ func (db *Database) RelatedCreate(ctx context.Context, id, id2 int64) error {
From: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
To: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
})
return err
}
func (db *Database) RelatedBatchCreate(ctx context.Context, edges []*driver.EdgeDocument) error {
_, err := db.relatedCollection.CreateEdges(ctx, edges)
return err
}
@@ -32,17 +35,17 @@ func (db *Database) RelatedRemove(ctx context.Context, id, id2 int64) error {
FOR d in @@collection
FILTER (d._from == @id && d._to == @id2) || (d._to == @id && d._from == @id2)
REMOVE d in @@collection`
_, _, err := db.Query(ctx, q, map[string]interface{}{
_, _, err := db.Query(ctx, q, map[string]any{
"@collection": RelatedTicketsCollectionName,
"id": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
"id2": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
}, &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
},
Msg: "Removed ticket/artifact relation",
})
return err
}

View File

@@ -2,85 +2,34 @@ package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toUserDataResponse(key string, doc *models.UserData) *models.UserDataResponse {
return &models.UserDataResponse{
Email: doc.Email,
ID: key,
Image: doc.Image,
Name: doc.Name,
Timeformat: doc.Timeformat,
func (db *Database) Settings(ctx context.Context) (*model.Settings, error) {
settings := &model.Settings{}
if _, err := db.settingsCollection.ReadDocument(ctx, "global", settings); err != nil {
return nil, err
}
return settings, nil
}
func (db *Database) UserDataCreate(ctx context.Context, id string, userdata *models.UserData) error {
if userdata == nil {
return errors.New("requires setting")
}
if id == "" {
return errors.New("requires username")
}
_, err := db.userdataCollection.CreateDocument(ctx, ctx, id, userdata)
return err
}
func (db *Database) UserDataGetOrCreate(ctx *gin.Context, id string, newUserData *models.UserData) (*models.UserDataResponse, error) {
setting, err := db.UserDataGet(ctx, id)
if err != nil {
return toUserDataResponse(id, newUserData), db.UserDataCreate(ctx, id, newUserData)
}
return setting, nil
}
func (db *Database) UserDataGet(ctx context.Context, id string) (*models.UserDataResponse, error) {
var doc models.UserData
meta, err := db.userdataCollection.ReadDocument(ctx, id, &doc)
func (db *Database) SaveSettings(ctx context.Context, settings *model.Settings) (*model.Settings, error) {
exists, err := db.settingsCollection.DocumentExists(ctx, "global")
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), err
}
func (db *Database) UserDataList(ctx context.Context) ([]*models.UserDataResponse, error) {
query := "FOR d IN @@collection SORT d.username ASC RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserDataCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.UserDataResponse
for {
var doc models.UserData
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
if exists {
if _, err := db.settingsCollection.ReplaceDocument(ctx, "global", settings); err != nil {
return nil, err
}
} else {
if _, err := db.settingsCollection.CreateDocument(ctx, ctx, "global", settings); err != nil {
return nil, err
}
docs = append(docs, toUserDataResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserDataUpdate(ctx context.Context, id string, userdata *models.UserData) (*models.UserDataResponse, error) {
var doc models.UserData
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userdataCollection.ReplaceDocument(ctx, id, userdata)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), nil
return settings, nil
}

View File

@@ -2,12 +2,14 @@ package database
import (
"context"
"fmt"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (db *Database) Statistics(ctx context.Context) (*models.Statistics, error) {
func (db *Database) Statistics(ctx context.Context) (*model.Statistics, error) {
query := `RETURN {
tickets_per_type: MERGE(FOR d in tickets
COLLECT type = d.type WITH COUNT INTO typecount
@@ -34,10 +36,56 @@ func (db *Database) Statistics(ctx context.Context) (*models.Statistics, error)
}
defer cur.Close()
statistics := models.Statistics{}
statistics := model.Statistics{}
if _, err := cur.ReadDocument(ctx, &statistics); err != nil {
return nil, err
}
return &statistics, nil
}
func (db *Database) WidgetData(ctx context.Context, aggregation string, filter *string) (map[string]any, error) {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
queryTree, err := parser.Parse(aggregation)
if err != nil {
return nil, fmt.Errorf("invalid aggregation query (%s): syntax error", aggregation)
}
aggregationString, err := queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid widget aggregation query (%s): %w", aggregation, err)
}
aggregation = aggregationString
filterQ := ""
if filter != nil && *filter != "" {
queryTree, err := parser.Parse(*filter)
if err != nil {
return nil, fmt.Errorf("invalid filter query (%s): syntax error", *filter)
}
filterString, err := queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid widget filter query (%s): %w", *filter, err)
}
filterQ = "FILTER " + filterString
}
query := `RETURN MERGE(FOR d in tickets
` + filterQ + `
COLLECT field = ` + aggregation + ` WITH COUNT INTO count
RETURN ZIP([field], [count]))`
cur, _, err := db.Query(ctx, query, nil, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cur.Close()
statistics := map[string]any{}
if _, err := cur.ReadDocument(ctx, &statistics); err != nil {
return nil, err
}
return statistics, nil
}

View File

@@ -6,18 +6,18 @@ import (
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type playbookResponse struct {
PlaybookId string `json:"playbook_id"`
PlaybookName string `json:"playbook_name"`
Playbook models.Playbook `json:"playbook"`
TicketId int64 `json:"ticket_id"`
TicketName string `json:"ticket_name"`
PlaybookID string `json:"playbook_id"`
PlaybookName string `json:"playbook_name"`
Playbook model.Playbook `json:"playbook"`
TicketID int64 `json:"ticket_id"`
TicketName string `json:"ticket_name"`
}
func (db *Database) TaskList(ctx context.Context) ([]*models.TaskWithContext, error) {
func (db *Database) TaskList(ctx context.Context) ([]*model.TaskWithContext, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -28,14 +28,14 @@ func (db *Database) TaskList(ctx context.Context) ([]*models.TaskWithContext, er
FILTER d.status == 'open'
FOR playbook IN NOT_NULL(VALUES(d.playbooks), [])
RETURN { ticket_id: TO_NUMBER(d._key), ticket_name: d.name, playbook_id: POSITION(d.playbooks, playbook, true), playbook_name: playbook.name, playbook: playbook }`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]interface{}{
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]any{
"@collection": TicketCollectionName,
}), busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
docs := []*models.TaskWithContext{}
var docs []*model.TaskWithContext
for {
var doc playbookResponse
_, err := cursor.ReadDocument(ctx, &doc)
@@ -45,7 +45,6 @@ func (db *Database) TaskList(ctx context.Context) ([]*models.TaskWithContext, er
return nil, err
}
playbook, err := toPlaybookResponse(&doc.Playbook)
if err != nil {
return nil, err
@@ -53,11 +52,11 @@ func (db *Database) TaskList(ctx context.Context) ([]*models.TaskWithContext, er
for _, task := range playbook.Tasks {
if task.Active {
docs = append(docs, &models.TaskWithContext{
PlaybookId: doc.PlaybookId,
docs = append(docs, &model.TaskWithContext{
PlaybookId: doc.PlaybookID,
PlaybookName: doc.PlaybookName,
Task: *task,
TicketId: doc.TicketId,
Task: task,
TicketId: doc.TicketID,
TicketName: doc.TicketName,
})
}

View File

@@ -8,18 +8,18 @@ import (
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toTicketTemplate(doc *models.TicketTemplateForm) *models.TicketTemplate {
return &models.TicketTemplate{Name: doc.Name, Schema: doc.Schema}
func toTicketTemplate(doc *model.TicketTemplateForm) *model.TicketTemplate {
return &model.TicketTemplate{Name: doc.Name, Schema: doc.Schema}
}
func toTicketTemplateResponse(key string, doc *models.TicketTemplate) *models.TicketTemplateResponse {
return &models.TicketTemplateResponse{ID: key, Name: doc.Name, Schema: doc.Schema}
func toTicketTemplateResponse(key string, doc *model.TicketTemplate) *model.TicketTemplateResponse {
return &model.TicketTemplateResponse{ID: key, Name: doc.Name, Schema: doc.Schema}
}
func (db *Database) TemplateCreate(ctx context.Context, template *models.TicketTemplateForm) (*models.TicketTemplateResponse, error) {
func (db *Database) TemplateCreate(ctx context.Context, template *model.TicketTemplateForm) (*model.TicketTemplateResponse, error) {
if template == nil {
return nil, errors.New("requires template")
}
@@ -27,7 +27,7 @@ func (db *Database) TemplateCreate(ctx context.Context, template *models.TicketT
return nil, errors.New("requires template name")
}
var doc models.TicketTemplate
var doc model.TicketTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.CreateDocument(ctx, newctx, strcase.ToKebab(template.Name), toTicketTemplate(template))
@@ -38,8 +38,8 @@ func (db *Database) TemplateCreate(ctx context.Context, template *models.TicketT
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateGet(ctx context.Context, id string) (*models.TicketTemplateResponse, error) {
var doc models.TicketTemplate
func (db *Database) TemplateGet(ctx context.Context, id string) (*model.TicketTemplateResponse, error) {
var doc model.TicketTemplate
meta, err := db.templateCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -48,8 +48,8 @@ func (db *Database) TemplateGet(ctx context.Context, id string) (*models.TicketT
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateUpdate(ctx context.Context, id string, template *models.TicketTemplateForm) (*models.TicketTemplateResponse, error) {
var doc models.TicketTemplate
func (db *Database) TemplateUpdate(ctx context.Context, id string, template *model.TicketTemplateForm) (*model.TicketTemplateResponse, error) {
var doc model.TicketTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.ReplaceDocument(ctx, id, toTicketTemplate(template))
@@ -62,19 +62,20 @@ func (db *Database) TemplateUpdate(ctx context.Context, id string, template *mod
func (db *Database) TemplateDelete(ctx context.Context, id string) error {
_, err := db.templateCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TemplateList(ctx context.Context) ([]*models.TicketTemplateResponse, error) {
func (db *Database) TemplateList(ctx context.Context) ([]*model.TicketTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": TemplateCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": TemplateCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.TicketTemplateResponse
var docs []*model.TicketTemplateResponse
for {
var doc models.TicketTemplate
var doc model.TicketTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break

View File

@@ -6,22 +6,26 @@ import (
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/test"
)
var template1 = &models.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Template 1",
}
var default1 = &models.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Default",
}
var (
template1 = &model.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Template 1",
}
default1 = &model.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Default",
}
)
func TestDatabase_TemplateCreate(t *testing.T) {
t.Parallel()
type args struct {
template *models.TicketTemplateForm
template *model.TicketTemplateForm
}
tests := []struct {
name string
@@ -31,11 +35,14 @@ func TestDatabase_TemplateCreate(t *testing.T) {
{name: "Normal", args: args{template: template1}},
{name: "Duplicate", args: args{template: default1}, wantErr: true},
{name: "Nil template", args: args{}, wantErr: true},
{name: "Template without fields", args: args{template: &models.TicketTemplateForm{}}, wantErr: true},
{name: "Only name", args: args{template: &models.TicketTemplateForm{Name: "name"}}, wantErr: false},
{name: "Template without fields", args: args{template: &model.TicketTemplateForm{}}, wantErr: true},
{name: "Only name", args: args{template: &model.TicketTemplateForm{Name: "name"}}, wantErr: false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -50,6 +57,8 @@ func TestDatabase_TemplateCreate(t *testing.T) {
}
func TestDatabase_TemplateDelete(t *testing.T) {
t.Parallel()
type args struct {
id string
}
@@ -62,7 +71,10 @@ func TestDatabase_TemplateDelete(t *testing.T) {
{name: "Not existing", args: args{"foobar"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -81,20 +93,25 @@ func TestDatabase_TemplateDelete(t *testing.T) {
}
func TestDatabase_TemplateGet(t *testing.T) {
t.Parallel()
type args struct {
id string
}
tests := []struct {
name string
args args
want *models.TicketTemplateResponse
want *model.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", args: args{id: "default"}, want: &models.TicketTemplateResponse{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}},
{name: "Normal", args: args{id: "default"}, want: &model.TicketTemplateResponse{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}},
{name: "Not existing", args: args{id: "foobar"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -108,6 +125,7 @@ func TestDatabase_TemplateGet(t *testing.T) {
got, err := db.TemplateGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("TemplateGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
@@ -120,15 +138,20 @@ func TestDatabase_TemplateGet(t *testing.T) {
}
func TestDatabase_TemplateList(t *testing.T) {
t.Parallel()
tests := []struct {
name string
want []*models.TicketTemplateResponse
want []*model.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", want: []*models.TicketTemplateResponse{{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}, {ID: "template-1", Name: template1.Name, Schema: template1.Schema}}},
{name: "Normal", want: []*model.TicketTemplateResponse{{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}, {ID: "template-1", Name: template1.Name, Schema: template1.Schema}}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -142,6 +165,7 @@ func TestDatabase_TemplateList(t *testing.T) {
got, err := db.TemplateList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("TemplateList() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, got, tt.want)
@@ -150,9 +174,11 @@ func TestDatabase_TemplateList(t *testing.T) {
}
func TestDatabase_TemplateUpdate(t *testing.T) {
t.Parallel()
type args struct {
id string
template *models.TicketTemplateForm
template *model.TicketTemplateForm
}
tests := []struct {
name string
@@ -163,7 +189,10 @@ func TestDatabase_TemplateUpdate(t *testing.T) {
{name: "Not existing", args: args{"foobar", template1}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)

View File

@@ -9,24 +9,25 @@ import (
"strconv"
"strings"
"sync"
"time"
"github.com/arangodb/go-driver"
"github.com/xeipuuv/gojsonschema"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
"github.com/SecurityBrewery/catalyst/index"
)
func toTicket(ticketForm *models.TicketForm) (interface{}, error) {
func toTicket(ticketForm *model.TicketForm) (any, error) {
playbooks, err := toPlaybooks(ticketForm.Playbooks)
if err != nil {
return nil, err
}
ticket := &models.Ticket{
ticket := &model.Ticket{
Artifacts: ticketForm.Artifacts,
Comments: ticketForm.Comments,
Details: ticketForm.Details,
@@ -64,13 +65,14 @@ func toTicket(ticketForm *models.TicketForm) (interface{}, error) {
ticket.Status = "open"
}
if ticketForm.ID != nil {
return &busdb.Keyed{Key: strconv.FormatInt(*ticketForm.ID, 10), Doc: ticket}, nil
return &busdb.Keyed[model.Ticket]{Key: strconv.FormatInt(*ticketForm.ID, 10), Doc: ticket}, nil
}
return ticket, nil
}
func toTicketResponses(tickets []*models.TicketSimpleResponse) ([]*models.TicketResponse, error) {
var extendedTickets []*models.TicketResponse
func toTicketResponses(tickets []*model.TicketSimpleResponse) ([]*model.TicketResponse, error) {
var extendedTickets []*model.TicketResponse
for _, simple := range tickets {
tr, err := toTicketResponse(simple)
if err != nil {
@@ -78,16 +80,17 @@ func toTicketResponses(tickets []*models.TicketSimpleResponse) ([]*models.Ticket
}
extendedTickets = append(extendedTickets, tr)
}
return extendedTickets, nil
}
func toTicketResponse(ticket *models.TicketSimpleResponse) (*models.TicketResponse, error) {
func toTicketResponse(ticket *model.TicketSimpleResponse) (*model.TicketResponse, error) {
playbooks, err := toPlaybookResponses(ticket.Playbooks)
if err != nil {
return nil, err
}
return &models.TicketResponse{
return &model.TicketResponse{
ID: ticket.ID,
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
@@ -107,13 +110,13 @@ func toTicketResponse(ticket *models.TicketSimpleResponse) (*models.TicketRespon
}, nil
}
func toTicketSimpleResponse(key string, ticket *models.Ticket) (*models.TicketSimpleResponse, error) {
func toTicketSimpleResponse(key string, ticket *model.Ticket) (*model.TicketSimpleResponse, error) {
id, err := strconv.ParseInt(key, 10, 64)
if err != nil {
return nil, err
}
return &models.TicketSimpleResponse{
return &model.TicketSimpleResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
@@ -133,8 +136,8 @@ func toTicketSimpleResponse(key string, ticket *models.Ticket) (*models.TicketSi
}, nil
}
func toTicketWithTickets(ticketResponse *models.TicketResponse, tickets []*models.TicketSimpleResponse) *models.TicketWithTickets {
return &models.TicketWithTickets{
func toTicketWithTickets(ticketResponse *model.TicketResponse, tickets []*model.TicketSimpleResponse, logs []*model.LogEntry) *model.TicketWithTickets {
return &model.TicketWithTickets{
Artifacts: ticketResponse.Artifacts,
Comments: ticketResponse.Comments,
Created: ticketResponse.Created,
@@ -152,12 +155,13 @@ func toTicketWithTickets(ticketResponse *models.TicketResponse, tickets []*model
Type: ticketResponse.Type,
Write: ticketResponse.Write,
Logs: logs,
Tickets: tickets,
}
}
func toPlaybookResponses(playbooks map[string]*models.Playbook) (map[string]*models.PlaybookResponse, error) {
pr := map[string]*models.PlaybookResponse{}
func toPlaybookResponses(playbooks map[string]*model.Playbook) (map[string]*model.PlaybookResponse, error) {
pr := map[string]*model.PlaybookResponse{}
var err error
for k, v := range playbooks {
pr[k], err = toPlaybookResponse(v)
@@ -165,18 +169,19 @@ func toPlaybookResponses(playbooks map[string]*models.Playbook) (map[string]*mod
return nil, err
}
}
return pr, nil
}
func toPlaybookResponse(playbook *models.Playbook) (*models.PlaybookResponse, error) {
func toPlaybookResponse(playbook *model.Playbook) (*model.PlaybookResponse, error) {
graph, err := playbookGraph(playbook)
if err != nil {
return nil, err
}
re := &models.PlaybookResponse{
re := &model.PlaybookResponse{
Name: playbook.Name,
Tasks: map[string]*models.TaskResponse{},
Tasks: map[string]*model.TaskResponse{},
}
results, err := graph.Toposort()
@@ -193,23 +198,24 @@ func toPlaybookResponse(playbook *models.Playbook) (*models.PlaybookResponse, er
re.Tasks[taskID] = rootTask
i++
}
return re, nil
}
func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*models.TicketForm) ([]*models.TicketResponse, error) {
func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*model.TicketForm) ([]*model.TicketResponse, error) {
update, err := db.Hooks.IngestionFilter(ctx, db.Index)
if err != nil {
return nil, err
}
var dbTickets []interface{}
var dbTickets []any
for _, ticketForm := range ticketForms {
ticket, err := toTicket(ticketForm)
if err != nil {
return nil, err
}
if err := validate(ticket, models.TicketSchema); err != nil {
if err := validate(ticket, model.TicketSchema); err != nil {
return nil, err
}
@@ -229,7 +235,7 @@ func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*models
LET noiddoc = UNSET(keyeddoc, "id")
INSERT noiddoc INTO @@collection
RETURN NEW`
apiTickets, _, err := db.ticketListQuery(ctx, query, mergeMaps(map[string]interface{}{
apiTickets, _, err := db.ticketListQuery(ctx, query, mergeMaps(map[string]any{
"tickets": dbTickets,
}, ticketFilterVars), busdb.CreateOperation)
if err != nil {
@@ -244,9 +250,11 @@ func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*models
for _, apiTicket := range apiTickets {
ids = append(ids, driver.NewDocumentID(TicketCollectionName, fmt.Sprint(apiTicket.ID)))
}
if err := db.BusDatabase.LogAndNotify(ctx, ids, "Ticket created"); err != nil {
return nil, err
}
db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{
IDs: ids,
Type: bus.DatabaseEntryCreated,
})
ticketResponses, err := toTicketResponses(apiTickets)
if err != nil {
@@ -277,26 +285,27 @@ func (db *Database) IndexRebuild(ctx context.Context) error {
return batchIndex(db.Index, tickets)
}
func batchIndex(index *index.Index, tickets []*models.TicketSimpleResponse) error {
func batchIndex(index *index.Index, tickets []*model.TicketSimpleResponse) error {
var wg sync.WaitGroup
var batch []*models.TicketSimpleResponse
var batch []*model.TicketSimpleResponse
for _, ticket := range tickets {
batch = append(batch, ticket)
if len(batch) > 100 {
wg.Add(1)
go func(docs []*models.TicketSimpleResponse) {
go func(docs []*model.TicketSimpleResponse) {
index.Index(docs)
wg.Done()
}(batch)
batch = []*models.TicketSimpleResponse{}
batch = []*model.TicketSimpleResponse{}
}
}
wg.Wait()
return nil
}
func (db *Database) TicketGet(ctx context.Context, ticketID int64) (*models.TicketWithTickets, error) {
func (db *Database) TicketGet(ctx context.Context, ticketID int64) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
@@ -305,9 +314,9 @@ func (db *Database) TicketGet(ctx context.Context, ticketID int64) (*models.Tick
return db.ticketGetQuery(ctx, ticketID, `LET d = DOCUMENT(@@collection, @ID) `+ticketFilterQuery+` RETURN d`, ticketFilterVars, busdb.ReadOperation)
}
func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query string, bindVars map[string]interface{}, operation *busdb.Operation) (*models.TicketWithTickets, error) {
func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query string, bindVars map[string]any, operation *busdb.Operation) (*model.TicketWithTickets, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
if ticketID != 0 {
@@ -320,7 +329,7 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
}
defer cur.Close()
ticket := models.Ticket{}
ticket := model.Ticket{}
meta, err := cur.ReadDocument(ctx, &ticket)
if err != nil {
return nil, err
@@ -332,7 +341,7 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
}
// index
go db.Index.Index([]*models.TicketSimpleResponse{ticketSimpleResponse})
go db.Index.Index([]*model.TicketSimpleResponse{ticketSimpleResponse})
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
@@ -349,7 +358,7 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
` + ticketFilterQuery + `
RETURN d`
outTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
outTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
@@ -367,7 +376,7 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
` + ticketFilterQuery + `
RETURN d`
inTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
inTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
@@ -386,7 +395,7 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
FOR a IN NOT_NULL(d.artifacts, [])
FILTER POSITION(@artifacts, a.name)
RETURN d`
sameArtifactTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
sameArtifactTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"artifacts": artifactNames,
}, ticketFilterVars), busdb.ReadOperation)
@@ -394,7 +403,8 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
return nil, err
}
tickets := append(outTickets, inTickets...)
tickets := outTickets
tickets = append(tickets, inTickets...)
tickets = append(tickets, sameArtifactTickets...)
sort.Slice(tickets, func(i, j int) bool {
return tickets[i].ID < tickets[j].ID
@@ -405,10 +415,15 @@ func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query st
return nil, err
}
return toTicketWithTickets(ticketResponse, tickets), nil
logs, err := db.LogList(ctx, fmt.Sprintf("%s/%d", TicketCollectionName, ticketID))
if err != nil {
return nil, err
}
return toTicketWithTickets(ticketResponse, tickets, logs), nil
}
func (db *Database) TicketUpdate(ctx context.Context, ticketID int64, ticket *models.Ticket) (*models.TicketWithTickets, error) {
func (db *Database) TicketUpdate(ctx context.Context, ticketID int64, ticket *model.Ticket) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -419,11 +434,11 @@ func (db *Database) TicketUpdate(ctx context.Context, ticketID int64, ticket *mo
REPLACE d WITH @ticket IN @@collection
RETURN NEW`
ticket.Modified = time.Now().UTC() // TODO make setable?
return db.ticketGetQuery(ctx, ticketID, query, mergeMaps(map[string]interface{}{"ticket": ticket}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update, Ids: []driver.DocumentID{
return db.ticketGetQuery(ctx, ticketID, query, mergeMaps(map[string]any{"ticket": ticket}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated, Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, strconv.FormatInt(ticketID, 10)),
},
Msg: "Ticket updated",
})
}
@@ -441,19 +456,18 @@ func (db *Database) TicketDelete(ctx context.Context, ticketID int64) error {
return nil
}
func (db *Database) TicketList(ctx context.Context, ticketType string, query string, sorts []string, desc []bool, offset, count int64) (*models.TicketList, error) {
binVars := map[string]interface{}{}
func (db *Database) TicketList(ctx context.Context, ticketType string, query string, sorts []string, desc []bool, offset, count int64) (*model.TicketList, error) {
binVars := map[string]any{}
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
var typeString = ""
typeString := ""
if ticketType != "" {
typeString = "FILTER d.type == @type "
binVars["type"] = ticketType
}
var filterString = ""
filterString := ""
if query != "" {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
queryTree, err := parser.Parse(query)
if err != nil {
return nil, errors.New("invalid filter query: syntax error")
@@ -489,16 +503,17 @@ func (db *Database) TicketList(ctx context.Context, ticketType string, query str
RETURN d`
// RETURN KEEP(d, "_key", "id", "name", "type", "created")`
ticketList, _, err := db.ticketListQuery(ctx, q, mergeMaps(binVars, ticketFilterVars), busdb.ReadOperation)
return &models.TicketList{
return &model.TicketList{
Count: documentCount,
Tickets: ticketList,
}, err
// return map[string]interface{}{"tickets": ticketList, "count": documentCount}, err
}
func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars map[string]interface{}, operation *busdb.Operation) ([]*models.TicketSimpleResponse, *models.LogEntry, error) {
func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars map[string]any, operation *busdb.Operation) ([]*model.TicketSimpleResponse, *model.LogEntry, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
@@ -508,9 +523,9 @@ func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars
}
defer cursor.Close()
var docs []*models.TicketSimpleResponse
var docs []*model.TicketSimpleResponse
for {
doc := models.Ticket{}
doc := model.Ticket{}
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
@@ -529,9 +544,9 @@ func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars
return docs, logEntry, nil
}
func (db *Database) TicketCount(ctx context.Context, typequery, filterquery string, bindVars map[string]interface{}) (int, error) {
func (db *Database) TicketCount(ctx context.Context, typequery, filterquery string, bindVars map[string]any) (int, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
@@ -551,11 +566,12 @@ func (db *Database) TicketCount(ctx context.Context, typequery, filterquery stri
return 0, err
}
cursor.Close()
return documentCount, nil
}
func sortQuery(paramsSort []string, paramsDesc []bool, bindVars map[string]interface{}) string {
sort := ""
func sortQuery(paramsSort []string, paramsDesc []bool, bindVars map[string]any) string {
sortQuery := ""
if len(paramsSort) > 0 {
var sorts []string
for i, column := range paramsSort {
@@ -566,23 +582,25 @@ func sortQuery(paramsSort []string, paramsDesc []bool, bindVars map[string]inter
sorts = append(sorts, colsort)
bindVars[fmt.Sprintf("column%d", i)] = column
}
sort = "SORT " + strings.Join(sorts, ", ")
sortQuery = "SORT " + strings.Join(sorts, ", ")
}
return sort
return sortQuery
}
func mergeMaps(a map[string]interface{}, b map[string]interface{}) map[string]interface{} {
merged := map[string]interface{}{}
func mergeMaps(a map[string]any, b map[string]any) map[string]any {
merged := map[string]any{}
for k, v := range a {
merged[k] = v
}
for k, v := range b {
merged[k] = v
}
return merged
}
func validate(e interface{}, schema *gojsonschema.Schema) error {
func validate(e any, schema *gojsonschema.Schema) error {
b, err := json.Marshal(e)
if err != nil {
return err
@@ -598,7 +616,9 @@ func validate(e interface{}, schema *gojsonschema.Schema) error {
for _, e := range res.Errors() {
l = append(l, e.String())
}
return fmt.Errorf("validation failed: %v", strings.Join(l, ", "))
}
return nil
}

View File

@@ -4,18 +4,20 @@ import (
"context"
"errors"
"fmt"
"time"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
maut "github.com/jonas-plum/maut/auth"
"github.com/mingrammer/commonregex"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) AddArtifact(ctx context.Context, id int64, artifact *models.Artifact) (*models.TicketWithTickets, error) {
func (db *Database) AddArtifact(ctx context.Context, id int64, artifact *model.Artifact) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -31,14 +33,14 @@ func (db *Database) AddArtifact(ctx context.Context, id int64, artifact *models.
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "artifacts": PUSH(NOT_NULL(d.artifacts, []), @artifact) } IN @@collection
UPDATE d WITH { "modified": @now, "artifacts": PUSH(NOT_NULL(d.artifacts, []), @artifact) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"artifact": artifact}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"artifact": artifact, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Add artifact",
})
}
@@ -57,10 +59,11 @@ func inferType(name string) string {
case commonregex.SHA256HexRegex.MatchString(name):
return "sha256"
}
return "unknown"
}
func (db *Database) RemoveArtifact(ctx context.Context, id int64, name string) (*models.TicketWithTickets, error) {
func (db *Database) RemoveArtifact(ctx context.Context, id int64, name string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -71,18 +74,18 @@ func (db *Database) RemoveArtifact(ctx context.Context, id int64, name string) (
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET newartifacts = REMOVE_VALUE(d.artifacts, a)
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "artifacts": newartifacts } IN @@collection
UPDATE d WITH { "modified": @now, "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"name": name}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"name": name, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Remove artifact",
})
}
func (db *Database) SetTemplate(ctx context.Context, id int64, schema string) (*models.TicketWithTickets, error) {
func (db *Database) SetTemplate(ctx context.Context, id int64, schema string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -92,23 +95,23 @@ func (db *Database) SetTemplate(ctx context.Context, id int64, schema string) (*
` + ticketFilterQuery + `
UPDATE d WITH { "schema": @schema } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"schema": schema}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"schema": schema}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Set Template",
})
}
func (db *Database) AddComment(ctx context.Context, id int64, comment *models.CommentForm) (*models.TicketWithTickets, error) {
func (db *Database) AddComment(ctx context.Context, id int64, comment *model.CommentForm) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
if comment.Creator == nil || *comment.Creator == "" {
user, exists := busdb.UserFromContext(ctx)
user, _, exists := maut.UserFromContext(ctx)
if !exists {
return nil, errors.New("no user in context")
}
@@ -122,18 +125,18 @@ func (db *Database) AddComment(ctx context.Context, id int64, comment *models.Co
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "comments": PUSH(NOT_NULL(d.comments, []), @comment) } IN @@collection
UPDATE d WITH { "modified": @now, "comments": PUSH(NOT_NULL(d.comments, []), @comment) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"comment": comment}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"comment": comment, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Add comment",
})
}
func (db *Database) RemoveComment(ctx context.Context, id int64, commentID int64) (*models.TicketWithTickets, error) {
func (db *Database) RemoveComment(ctx context.Context, id int64, commentID int64) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -141,18 +144,18 @@ func (db *Database) RemoveComment(ctx context.Context, id int64, commentID int64
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "comments": REMOVE_NTH(d.comments, @commentID) } IN @@collection
UPDATE d WITH { "modified": @now, "comments": REMOVE_NTH(d.comments, @commentID) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"commentID": commentID}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"commentID": commentID, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Remove comment",
})
}
func (db *Database) SetReferences(ctx context.Context, id int64, references []*models.Reference) (*models.TicketWithTickets, error) {
func (db *Database) SetReferences(ctx context.Context, id int64, references []*model.Reference) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -160,18 +163,18 @@ func (db *Database) SetReferences(ctx context.Context, id int64, references []*m
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "references": @references } IN @@collection
UPDATE d WITH { "modified": @now, "references": @references } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"references": references}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"references": references, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Changed references",
})
}
func (db *Database) LinkFiles(ctx context.Context, id int64, files []*models.File) (*models.TicketWithTickets, error) {
func (db *Database) AddFile(ctx context.Context, id int64, file *model.File) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -179,18 +182,18 @@ func (db *Database) LinkFiles(ctx context.Context, id int64, files []*models.Fil
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "files": @files } IN @@collection
UPDATE d WITH { "modified": @now, "files": APPEND(NOT_NULL(d.files, []), [@file]) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"files": files}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"file": file, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Linked files",
})
}
func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTemplate *models.PlaybookTemplateForm) (*models.TicketWithTickets, error) {
func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTemplate *model.PlaybookTemplateForm) (*model.TicketWithTickets, error) {
pb, err := toPlaybook(playbookTemplate)
if err != nil {
return nil, err
@@ -216,18 +219,18 @@ func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTem
FILTER d._key == @ID
LET newplaybook = ZIP( [@playbookID], [@playbook] )
LET newplaybooks = MERGE(NOT_NULL(d.playbooks, {}), newplaybook)
LET newticket = MERGE(d, { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks })
LET newticket = MERGE(d, { "modified": @now, "playbooks": newplaybooks })
REPLACE d WITH newticket IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbook": pb,
"playbookID": findName(parentTicket.Playbooks, playbookID),
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: "Added playbook",
})
if err != nil {
return nil, err
@@ -240,7 +243,7 @@ func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTem
return ticket, nil
}
func findName(playbooks map[string]*models.PlaybookResponse, name string) string {
func findName(playbooks map[string]*model.PlaybookResponse, name string) string {
if _, ok := playbooks[name]; !ok {
return name
}
@@ -253,10 +256,10 @@ func findName(playbooks map[string]*models.PlaybookResponse, name string) string
}
}
func runRootTask(ticket *models.TicketResponse, playbookID string, db *Database) error {
func runRootTask(ticket *model.TicketResponse, playbookID string, db *Database) error {
playbook := ticket.Playbooks[playbookID]
var root *models.TaskResponse
var root *model.TaskResponse
for _, task := range playbook.Tasks {
if task.Order == 0 {
root = task
@@ -264,10 +267,11 @@ func runRootTask(ticket *models.TicketResponse, playbookID string, db *Database)
}
runNextTasks(ticket.ID, playbookID, root.Next, root.Data, ticket, db)
return nil
}
func (db *Database) RemoveTicketPlaybook(ctx context.Context, id int64, playbookID string) (*models.TicketWithTickets, error) {
func (db *Database) RemoveTicketPlaybook(ctx context.Context, id int64, playbookID string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -277,15 +281,16 @@ func (db *Database) RemoveTicketPlaybook(ctx context.Context, id int64, playbook
` + ticketFilterQuery + `
FILTER d._key == @ID
LET newplaybooks = UNSET(d.playbooks, @playbookID)
REPLACE d WITH MERGE(d, { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks }) IN @@collection
REPLACE d WITH MERGE(d, { "modified": @now, "playbooks": newplaybooks }) IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Removed playbook %s", playbookID),
})
}

View File

@@ -5,16 +5,17 @@ import (
"errors"
"fmt"
"log"
"time"
"github.com/arangodb/go-driver"
"github.com/google/uuid"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) TaskGet(ctx context.Context, id int64, playbookID string, taskID string) (*models.TicketWithTickets, *models.PlaybookResponse, *models.TaskWithContext, error) {
func (db *Database) TaskGet(ctx context.Context, id int64, playbookID string, taskID string) (*model.TicketWithTickets, *model.PlaybookResponse, *model.TaskWithContext, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, nil, nil, err
@@ -30,17 +31,17 @@ func (db *Database) TaskGet(ctx context.Context, id int64, playbookID string, ta
return nil, nil, nil, errors.New("task does not exist")
}
return inc, playbook, &models.TaskWithContext{
return inc, playbook, &model.TaskWithContext{
PlaybookId: playbookID,
PlaybookName: playbook.Name,
TaskId: taskID,
Task: *task,
Task: task,
TicketId: id,
TicketName: inc.Name,
}, nil
}
func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID string, taskID string, data interface{}) (*models.TicketWithTickets, error) {
func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID string, taskID string, data any) (*model.TicketWithTickets, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, err
@@ -65,19 +66,19 @@ func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID strin
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks } IN @@collection
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"data": data,
"closed": time.Now().UTC(),
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Completed task %s in playbook %s", taskID, playbookID),
})
if err != nil {
return nil, err
@@ -91,8 +92,8 @@ func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID strin
return ticket, nil
}
func extractTicketResponse(ticket *models.TicketWithTickets) *models.TicketResponse {
return &models.TicketResponse{
func extractTicketResponse(ticket *model.TicketWithTickets) *model.TicketResponse {
return &model.TicketResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
@@ -112,7 +113,7 @@ func extractTicketResponse(ticket *models.TicketWithTickets) *models.TicketRespo
}
}
func (db *Database) TaskUpdate(ctx context.Context, id int64, playbookID string, taskID string, task *models.Task) (*models.TicketWithTickets, error) {
func (db *Database) TaskUpdateOwner(ctx context.Context, id int64, playbookID string, taskID string, owner string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
@@ -121,22 +122,59 @@ func (db *Database) TaskUpdate(ctx context.Context, id int64, playbookID string,
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET newtasks = MERGE(playbook.tasks, { @taskID: @task } )
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"owner": @owner })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks } IN @@collection
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"task": task,
"owner": owner,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
if err != nil {
return nil, err
}
return ticket, nil
}
func (db *Database) TaskUpdateData(ctx context.Context, id int64, playbookID string, taskID string, data map[string]any) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"data": @data })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"data": data,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Saved task %s in playbook %s", taskID, playbookID),
})
if err != nil {
return nil, err
@@ -151,8 +189,8 @@ func (db *Database) TaskRun(ctx context.Context, id int64, playbookID string, ta
return err
}
if task.Task.Type == models.TaskTypeAutomation {
if err := runTask(id, playbookID, taskID, &task.Task, extractTicketResponse(ticket), db); err != nil {
if task.Task.Type == model.TaskTypeAutomation {
if err := runTask(id, playbookID, taskID, task.Task, extractTicketResponse(ticket), db); err != nil {
return err
}
}
@@ -160,10 +198,10 @@ func (db *Database) TaskRun(ctx context.Context, id int64, playbookID string, ta
return nil
}
func runNextTasks(id int64, playbookID string, next map[string]string, data interface{}, ticket *models.TicketResponse, db *Database) {
func runNextTasks(id int64, playbookID string, next map[string]string, data any, ticket *model.TicketResponse, db *Database) {
for nextTaskID, requirement := range next {
nextTask := ticket.Playbooks[playbookID].Tasks[nextTaskID]
if nextTask.Type == models.TaskTypeAutomation {
if nextTask.Type == model.TaskTypeAutomation {
b, err := evalRequirement(requirement, data)
if err != nil {
continue
@@ -177,10 +215,11 @@ func runNextTasks(id int64, playbookID string, next map[string]string, data inte
}
}
func runTask(ticketID int64, playbookID string, taskID string, task *models.TaskResponse, ticket *models.TicketResponse, db *Database) error {
func runTask(ticketID int64, playbookID string, taskID string, task *model.TaskResponse, ticket *model.TicketResponse, db *Database) error {
playbook := ticket.Playbooks[playbookID]
msgContext := &models.Context{Playbook: playbook, Task: task, Ticket: ticket}
origin := &models.Origin{TaskOrigin: &models.TaskOrigin{TaskId: taskID, PlaybookId: playbookID, TicketId: ticketID}}
msgContext := &model.Context{Playbook: playbook, Task: task, Ticket: ticket}
origin := &model.Origin{TaskOrigin: &model.TaskOrigin{TaskId: taskID, PlaybookId: playbookID, TicketId: ticketID}}
jobID := uuid.NewString()
return publishJobMapping(jobID, *task.Automation, msgContext, origin, task.Payload, db)
}

View File

@@ -8,11 +8,11 @@ import (
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toTicketType(doc *models.TicketTypeForm) *models.TicketType {
return &models.TicketType{
func toTicketType(doc *model.TicketTypeForm) *model.TicketType {
return &model.TicketType{
Name: doc.Name,
Icon: doc.Icon,
DefaultPlaybooks: doc.DefaultPlaybooks,
@@ -21,8 +21,8 @@ func toTicketType(doc *models.TicketTypeForm) *models.TicketType {
}
}
func toTicketTypeResponse(key string, doc *models.TicketType) *models.TicketTypeResponse {
return &models.TicketTypeResponse{
func toTicketTypeResponse(key string, doc *model.TicketType) *model.TicketTypeResponse {
return &model.TicketTypeResponse{
ID: key,
Name: doc.Name,
Icon: doc.Icon,
@@ -32,7 +32,7 @@ func toTicketTypeResponse(key string, doc *models.TicketType) *models.TicketType
}
}
func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *models.TicketTypeForm) (*models.TicketTypeResponse, error) {
func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *model.TicketTypeForm) (*model.TicketTypeResponse, error) {
if tickettype == nil {
return nil, errors.New("requires ticket type")
}
@@ -40,7 +40,7 @@ func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *models.Tic
return nil, errors.New("requires ticket type name")
}
var doc models.TicketType
var doc model.TicketType
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.CreateDocument(ctx, newctx, strcase.ToKebab(tickettype.Name), toTicketType(tickettype))
@@ -51,8 +51,8 @@ func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *models.Tic
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeGet(ctx context.Context, id string) (*models.TicketTypeResponse, error) {
var doc models.TicketType
func (db *Database) TicketTypeGet(ctx context.Context, id string) (*model.TicketTypeResponse, error) {
var doc model.TicketType
meta, err := db.tickettypeCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -61,8 +61,8 @@ func (db *Database) TicketTypeGet(ctx context.Context, id string) (*models.Ticke
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeUpdate(ctx context.Context, id string, tickettype *models.TicketTypeForm) (*models.TicketTypeResponse, error) {
var doc models.TicketType
func (db *Database) TicketTypeUpdate(ctx context.Context, id string, tickettype *model.TicketTypeForm) (*model.TicketTypeResponse, error) {
var doc model.TicketType
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.ReplaceDocument(ctx, id, toTicketType(tickettype))
@@ -75,19 +75,20 @@ func (db *Database) TicketTypeUpdate(ctx context.Context, id string, tickettype
func (db *Database) TicketTypeDelete(ctx context.Context, id string) error {
_, err := db.tickettypeCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TicketTypeList(ctx context.Context) ([]*models.TicketTypeResponse, error) {
func (db *Database) TicketTypeList(ctx context.Context) ([]*model.TicketTypeResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": TicketTypeCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": TicketTypeCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.TicketTypeResponse
var docs []*model.TicketTypeResponse
for {
var doc models.TicketType
var doc model.TicketType
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break

View File

@@ -3,19 +3,20 @@ package database
import (
"context"
"crypto/sha256"
"crypto/sha512"
"errors"
"fmt"
"log"
"math/rand"
"time"
"github.com/arangodb/go-driver"
"github.com/gin-gonic/gin"
"github.com/iancoleman/strcase"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/generated/time"
)
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
@@ -29,23 +30,24 @@ func generateKey() string {
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}
func toUser(user *models.UserForm, sha256 *string) *models.User {
roles := []string{}
roles = append(roles, role.Strings(role.Explodes(user.Roles))...)
u := &models.User{
func toUser(user *model.UserForm, salt, sha256, sha512 *string) *model.User {
u := &model.User{
Blocked: user.Blocked,
Roles: roles,
Roles: user.Roles,
Salt: salt,
Sha256: sha256,
Sha512: sha512,
Apikey: user.Apikey,
}
// log.Println(u)
// b, _ := json.Marshal(u)
// loader := gojsonschema.NewBytesLoader(b)
// res, err := models.UserSchema.Validate(loader)
// res, err := model.UserSchema.Validate(loader)
// if err != nil {
// log.Println(err)
// }
@@ -54,8 +56,8 @@ func toUser(user *models.UserForm, sha256 *string) *models.User {
return u
}
func toUserResponse(key string, user *models.User) *models.UserResponse {
return &models.UserResponse{
func toUserResponse(key string, user *model.User) *model.UserResponse {
return &model.UserResponse{
ID: key,
Roles: user.Roles,
Blocked: user.Blocked,
@@ -63,8 +65,8 @@ func toUserResponse(key string, user *models.User) *models.UserResponse {
}
}
func toNewUserResponse(key string, user *models.User, secret *string) *models.NewUserResponse {
return &models.NewUserResponse{
func toNewUserResponse(key string, user *model.User, secret *string) *model.NewUserResponse {
return &model.NewUserResponse{
ID: key,
Roles: user.Roles,
Secret: secret,
@@ -72,48 +74,50 @@ func toNewUserResponse(key string, user *models.User, secret *string) *models.Ne
}
}
func (db *Database) UserGetOrCreate(ctx *gin.Context, newUser *models.UserForm) (*models.UserResponse, error) {
func (db *Database) UserGetOrCreate(ctx context.Context, newUser *model.UserForm) (*model.UserResponse, error) {
user, err := db.UserGet(ctx, newUser.ID)
if err != nil {
newUser, err := db.UserCreate(ctx, newUser)
if err != nil {
return nil, err
}
return &models.UserResponse{ID: newUser.ID, Roles: newUser.Roles, Blocked: newUser.Blocked}, nil
return &model.UserResponse{ID: newUser.ID, Roles: newUser.Roles, Blocked: newUser.Blocked}, nil
}
return user, nil
}
func (db *Database) UserCreate(ctx context.Context, newUser *models.UserForm) (*models.NewUserResponse, error) {
var key string
var hash *string
func (db *Database) UserCreate(ctx context.Context, newUser *model.UserForm) (*model.NewUserResponse, error) {
var key, salt, sha256Hash, sha512Hash *string
if newUser.Apikey {
key = generateKey()
hash = pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
key, sha256Hash = generateAPIKey()
} else if newUser.Password != nil {
salt, sha512Hash = hashUserPassword(newUser)
}
var doc models.User
var doc model.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, hash))
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, salt, sha256Hash, sha512Hash))
if err != nil {
return nil, err
}
return toNewUserResponse(meta.Key, &doc, pointer.String(key)), nil
return toNewUserResponse(meta.Key, &doc, key), nil
}
func (db *Database) UserCreateSetupAPIKey(ctx context.Context, key string) (*models.UserResponse, error) {
newUser := &models.UserForm{
func (db *Database) UserCreateSetupAPIKey(ctx context.Context, key string) (*model.UserResponse, error) {
newUser := &model.UserForm{
ID: "setup",
Roles: []string{role.Admin},
Roles: []string{maut.AdminRole},
Apikey: true,
Blocked: false,
}
hash := pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
sha256Hash := pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
var doc models.User
var doc model.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, hash))
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, nil, sha256Hash, nil))
if err != nil {
return nil, err
}
@@ -121,8 +125,39 @@ func (db *Database) UserCreateSetupAPIKey(ctx context.Context, key string) (*mod
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserGet(ctx context.Context, id string) (*models.UserResponse, error) {
var doc models.User
func (db *Database) UserUpdate(ctx context.Context, id string, user *model.UserForm) (*model.UserResponse, error) {
var doc model.User
_, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
if doc.Apikey {
return nil, errors.New("cannot update an API key")
}
var salt, sha512Hash *string
if user.Password != nil {
salt, sha512Hash = hashUserPassword(user)
} else {
salt = doc.Salt
sha512Hash = doc.Sha512
}
ctx = driver.WithReturnNew(ctx, &doc)
user.ID = id
meta, err := db.userCollection.ReplaceDocument(ctx, id, toUser(user, salt, nil, sha512Hash))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserGet(ctx context.Context, id string) (*model.UserResponse, error) {
var doc model.User
meta, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
@@ -133,19 +168,20 @@ func (db *Database) UserGet(ctx context.Context, id string) (*models.UserRespons
func (db *Database) UserDelete(ctx context.Context, id string) error {
_, err := db.userCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) UserList(ctx context.Context) ([]*models.UserResponse, error) {
func (db *Database) UserList(ctx context.Context) ([]*model.UserResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserCollectionName}, busdb.ReadOperation)
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": UserCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.UserResponse
var docs []*model.UserResponse
for {
var doc models.User
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
@@ -159,18 +195,19 @@ func (db *Database) UserList(ctx context.Context) ([]*models.UserResponse, error
return docs, err
}
func (db *Database) UserByHash(ctx context.Context, sha256 string) (*models.UserResponse, error) {
func (db *Database) UserAPIKeyByHash(ctx context.Context, sha256 string) (*model.UserResponse, error) {
query := `FOR d in @@collection
FILTER d.sha256 == @sha256
FILTER d.apikey && d.sha256 == @sha256
RETURN d`
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserCollectionName, "sha256": sha256}, busdb.ReadOperation)
vars := map[string]any{"@collection": UserCollectionName, "sha256": sha256}
cursor, _, err := db.Query(ctx, query, vars, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc models.User
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
@@ -179,23 +216,41 @@ func (db *Database) UserByHash(ctx context.Context, sha256 string) (*models.User
return toUserResponse(meta.Key, &doc), err
}
func (db *Database) UserUpdate(ctx context.Context, id string, user *models.UserForm) (*models.UserResponse, error) {
var doc models.User
_, err := db.userCollection.ReadDocument(ctx, id, &doc)
func (db *Database) UserByIDAndPassword(ctx context.Context, id, password string) (*model.UserResponse, error) {
log.Println("UserByIDAndPassword", id, password)
query := `FOR d in @@collection
FILTER d._key == @id && !d.apikey && d.sha512 == SHA512(CONCAT(d.salt, @password))
RETURN d`
vars := map[string]any{"@collection": UserCollectionName, "id": id, "password": password}
cursor, _, err := db.Query(ctx, query, vars, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
if doc.Sha256 != nil {
return nil, errors.New("cannot update an API key")
}
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.ReplaceDocument(ctx, id, toUser(user, nil))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
return toUserResponse(meta.Key, &doc), err
}
func generateAPIKey() (key, sha256Hash *string) {
newKey := generateKey()
sha256Hash = pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(newKey))))
return &newKey, sha256Hash
}
func hashUserPassword(newUser *model.UserForm) (salt, sha512Hash *string) {
if newUser.Password != nil {
saltKey := generateKey()
salt = &saltKey
sha512Hash = pointer.String(fmt.Sprintf("%x", sha512.Sum512([]byte(saltKey+*newUser.Password))))
}
return salt, sha512Hash
}

87
database/userdata.go Normal file
View File

@@ -0,0 +1,87 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toUserDataResponse(key string, doc *model.UserData) *model.UserDataResponse {
return &model.UserDataResponse{
Email: doc.Email,
ID: key,
Image: doc.Image,
Name: doc.Name,
Timeformat: doc.Timeformat,
}
}
func (db *Database) UserDataCreate(ctx context.Context, id string, userdata *model.UserData) error {
if userdata == nil {
return errors.New("requires setting")
}
if id == "" {
return errors.New("requires username")
}
_, err := db.userdataCollection.CreateDocument(ctx, ctx, id, userdata)
return err
}
func (db *Database) UserDataGetOrCreate(ctx context.Context, id string, newUserData *model.UserData) (*model.UserDataResponse, error) {
setting, err := db.UserDataGet(ctx, id)
if err != nil {
return toUserDataResponse(id, newUserData), db.UserDataCreate(ctx, id, newUserData)
}
return setting, nil
}
func (db *Database) UserDataGet(ctx context.Context, id string) (*model.UserDataResponse, error) {
var doc model.UserData
meta, err := db.userdataCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), err
}
func (db *Database) UserDataList(ctx context.Context) ([]*model.UserDataResponse, error) {
query := "FOR d IN @@collection SORT d.username ASC RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": UserDataCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.UserDataResponse
for {
var doc model.UserData
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toUserDataResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserDataUpdate(ctx context.Context, id string, userdata *model.UserData) (*model.UserDataResponse, error) {
var doc model.UserData
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userdataCollection.ReplaceDocument(ctx, id, userdata)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), nil
}

View File

@@ -3,33 +3,30 @@ package database_test
import (
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/test"
)
func init() {
gin.SetMode(gin.TestMode)
}
var bob = &models.UserData{
var bob = &model.UserData{
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
var bobResponse = &models.UserDataResponse{
var bobResponse = &model.UserDataResponse{
ID: "bob",
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
func TestDatabase_UserDataCreate(t *testing.T) {
t.Parallel()
type args struct {
id string
setting *models.UserData
setting *model.UserData
}
tests := []struct {
name string
@@ -42,7 +39,10 @@ func TestDatabase_UserDataCreate(t *testing.T) {
{name: "Only settingname", args: args{id: "bob"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -57,20 +57,25 @@ func TestDatabase_UserDataCreate(t *testing.T) {
}
func TestDatabase_UserDataGet(t *testing.T) {
t.Parallel()
type args struct {
id string
}
tests := []struct {
name string
args args
want *models.UserDataResponse
want *model.UserDataResponse
wantErr bool
}{
{name: "Normal get", args: args{id: "bob"}, want: bobResponse},
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -84,6 +89,7 @@ func TestDatabase_UserDataGet(t *testing.T) {
got, err := db.UserDataGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("UserDataGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
@@ -96,15 +102,20 @@ func TestDatabase_UserDataGet(t *testing.T) {
}
func TestDatabase_UserDataList(t *testing.T) {
t.Parallel()
tests := []struct {
name string
want []*models.UserDataResponse
want []*model.UserDataResponse
wantErr bool
}{
{name: "Normal list", want: []*models.UserDataResponse{bobResponse}},
{name: "Normal list", want: []*model.UserDataResponse{bobResponse}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
@@ -118,6 +129,7 @@ func TestDatabase_UserDataList(t *testing.T) {
got, err := db.UserDataList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("UserDataList() error = %v, wantErr %v", err, tt.wantErr)
return
}
@@ -127,9 +139,11 @@ func TestDatabase_UserDataList(t *testing.T) {
}
func TestDatabase_UserDataUpdate(t *testing.T) {
t.Parallel()
type args struct {
id string
setting *models.UserData
setting *model.UserData
}
tests := []struct {
name string
@@ -140,7 +154,10 @@ func TestDatabase_UserDataUpdate(t *testing.T) {
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)

View File

@@ -10,6 +10,7 @@ definitions:
properties:
name: { type: string, example: "2.2.2.2" }
type: { type: string }
kind: { type: string }
status: { type: string, example: "Unknown" }
enrichments: { type: object, additionalProperties: { $ref: "#/definitions/Enrichment" } }

View File

@@ -22,11 +22,6 @@ paths:
script: ""
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
- id: thehive
image: "docker.io/python:3"
script: ""
type: [ global ]
schema: "{\"title\":\"TheHive credentials\",\"type\":\"object\",\"properties\":{\"thehiveurl\":{\"type\":\"string\",\"title\":\"TheHive URL (e.g. 'https://thehive.example.org')\"},\"thehivekey\":{\"type\":\"string\",\"title\":\"TheHive API Key\"},\"skip_files\":{\"type\":\"boolean\", \"default\": true, \"title\":\"Skip Files (much faster)\"},\"keep_ids\":{\"type\":\"boolean\", \"default\": true, \"title\":\"Keep IDs and overwrite existing IDs\"}},\"required\":[\"thehiveurl\", \"thehivekey\", \"skip_files\", \"keep_ids\"]}"
- id: vt.hash
image: "docker.io/python:3"
script: ""

167
definition/dashboards.yaml Normal file
View File

@@ -0,0 +1,167 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/statistics:
get:
tags: [ "statistics" ]
summary: "Get statistics"
operationId: "getStatistics"
responses:
"200":
description: "successful operation"
schema: { $ref: '#/definitions/Statistics' }
examples:
test:
unassigned: 0
open_tickets_per_user: { }
tickets_per_week: { "2021-39": 3 }
tickets_per_type: { "alert": 2, "incident": 1 }
security: [ { roles: [ "ticket:read" ] } ]
/dashboards:
get:
tags: [ "dashboards" ]
summary: "List dashboards"
operationId: "listDashboards"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/DashboardResponse" } }
examples:
test:
- id: simple
name: Simple
widgets:
- name: "open_tickets_per_user"
aggregation: "owner"
filter: 'status == "open"'
type: "bar"
width: 4
- name: "tickets_per_week"
aggregation: 'CONCAT(DATE_YEAR(created), "-", DATE_ISOWEEK(created) < 10 ? "0" : "", DATE_ISOWEEK(created))'
type: "line"
width: 8
security: [ { roles: [ "dashboard:read" ] } ]
post:
tags: [ "dashboards" ]
summary: "Create a new dashboard"
operationId: "createDashboard"
parameters:
- { name: "template", in: "body", description: "New template", required: true, schema: { $ref: "#/definitions/Dashboard" }, x-example: { name: "My Dashboard", widgets: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: "my-dashboard"
name: "My Dashboard"
widgets: []
security: [ { roles: [ "dashboard:write" ] } ]
/dashboards/{id}:
get:
tags: [ "dashboards" ]
summary: "Get a single dashboard"
operationId: "getDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: simple
name: Simple
widgets:
- name: "open_tickets_per_user"
aggregation: "owner"
filter: 'status == "open"'
type: "bar"
width: 4
- name: "tickets_per_week"
aggregation: 'CONCAT(DATE_YEAR(created), "-", DATE_ISOWEEK(created) < 10 ? "0" : "", DATE_ISOWEEK(created))'
type: "line"
width: 8
security: [ { roles: [ "dashboard:read" ] } ]
put:
tags: [ "dashboards" ]
summary: "Update an existing dashboard"
operationId: "updateDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
- { name: "dashboard", in: "body", description: "Dashboard object that needs to be added", required: true, schema: { $ref: "#/definitions/Dashboard" }, x-example: { name: "Simple", widgets: [] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: simple
name: Simple
widgets: []
security: [ { roles: [ "dashboard:write" ] } ]
delete:
tags: [ "dashboards" ]
summary: "Delete a dashboard"
operationId: "deleteDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "dashboard:write" ] } ]
/dashboard/data:
get:
tags: [ "dashboards" ]
summary: "Get widget data"
operationId: "dashboardData"
parameters:
- { name: "aggregation", in: "query", description: "Aggregation", required: true, type: string, x-example: "type" }
- { name: "filter", in: "query", description: "Filter", type: string, x-example: 'status == "closed"' }
responses:
"200":
description: "successful operation"
schema: { type: object }
examples:
test:
alert: 2
incident: 1
security: [ { roles: [ "dashboard:read" ] } ]
definitions:
Statistics:
type: object
required: [ unassigned, open_tickets_per_user, tickets_per_week, tickets_per_type ]
properties:
unassigned: { type: integer }
open_tickets_per_user: { type: object, additionalProperties: { type: integer } }
tickets_per_week: { type: object, additionalProperties: { type: integer } }
tickets_per_type: { type: object, additionalProperties: { type: integer } }
Dashboard:
type: object
required: [ name, widgets ]
properties:
name: { type: string }
widgets: { type: array, items: { $ref: "#/definitions/Widget" } }
DashboardResponse:
type: object
required: [ id, name, widgets ]
properties:
id: { type: string }
name: { type: string }
widgets: { type: array, items: { $ref: "#/definitions/Widget" } }
Widget:
type: object
required: [ name, type, aggregation, width ]
properties:
name: { type: string }
type: { type: string, enum: [ "bar", "line", "pie" ] }
filter: { type: string }
aggregation: { type: string }
width: { type: integer, minimum: 1, maximum: 12 }

View File

@@ -13,7 +13,7 @@ paths:
schema: { type: array, items: { $ref: "#/definitions/JobResponse" } }
examples:
test:
- id: "99cd67131b48"
- id: "b81c2366-ea37-43d2-b61b-03afdc21d985"
automation: "hash.sha1"
payload: "test"
status: "created"
@@ -23,9 +23,13 @@ paths:
summary: "Start a new job"
operationId: "runJob"
parameters:
- { name: "job", in: "body", description: "New job", required: true, schema: { $ref: "#/definitions/JobForm" }, x-example: { automation: "hash.sha1", message: { payload: "test" } } }
- { name: "job", in: "body", description: "New job", required: true, schema: { $ref: "#/definitions/JobForm" }, x-example: { automation: "hash.sha1", payload: "test" } }
responses:
"204": { description: "successful operation" }
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "87390749-2125-4a87-91c5-da7e3f9bebf1", automation: "hash.sha1", payload: "test", status: "created" }
security: [ { roles: [ "job:write" ] } ]
/jobs/{id}:
@@ -34,27 +38,27 @@ paths:
summary: "Get a single job"
operationId: "getJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "99cd67131b48" }
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "b81c2366-ea37-43d2-b61b-03afdc21d985" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "created" }
test: { id: "b81c2366-ea37-43d2-b61b-03afdc21d985", automation: "hash.sha1", payload: "test", status: "created" }
security: [ { roles: [ "job:read" ] } ]
put:
tags: [ "jobs" ]
summary: "Update an existing job"
operationId: "updateJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "99cd67131b48" }
- { name: "job", in: "body", description: "Job object that needs to be added", required: true, schema: { $ref: "#/definitions/Job" }, x-example: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "failed" } }
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "b81c2366-ea37-43d2-b61b-03afdc21d985" }
- { name: "job", in: "body", description: "Job object that needs to be added", required: true, schema: { $ref: "#/definitions/JobUpdate" }, x-example: { status: "failed", running: false } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "failed" }
test: { id: "b81c2366-ea37-43d2-b61b-03afdc21d985", automation: "hash.sha1", payload: "test", status: "failed" }
security: [ { roles: [ "job:write" ] } ]
@@ -62,7 +66,7 @@ definitions:
Message:
type: object
properties:
payload: { type: object }
payload: { }
secrets: { type: object, additionalProperties: { type: string } }
context: { $ref: "#/definitions/Context" }
@@ -103,6 +107,16 @@ definitions:
payload: { }
origin: { $ref: "#/definitions/Origin" }
JobUpdate:
type: object
required: [ running, status ]
properties:
container: { type: string }
running: { type: boolean }
status: { type: string }
log: { type: string }
output: { type: object }
Job:
type: object
required: [ automation, running, status ]

View File

@@ -16,14 +16,15 @@ paths:
schema: { type: array, items: { $ref: "#/definitions/LogEntry" } }
examples:
test:
- { "created": "2021-10-02T18:05:00.333535+02:00","creator": "bob","reference": "tickets/294511","message": "Fail run account resist lend solve incident centre priority temperature. Cause change distribution examine location technique shape partner milk customer. Rail tea plate soil report cook railway interpretation breath action. Exercise dream accept park conclusion addition shoot assistance may answer. Gold writer link stop combine hear power name commitment operation. Determine lifespan support grow degree henry exclude detail set religion. Direct library policy convention chain retain discover ride walk student. Gather proposal select march aspect play noise avoid encourage employ. Assessment preserve transport combine wish influence income guess run stand. Charge limit crime ignore statement foundation study issue stop claim." }
- { type: "manual", "created": "2021-12-12T12:12:12.000000012Z","creator": "bob","reference": "tickets/294511","message": "Fail run account resist lend solve incident centre priority temperature. Cause change distribution examine location technique shape partner milk customer. Rail tea plate soil report cook railway interpretation breath action. Exercise dream accept park conclusion addition shoot assistance may answer. Gold writer link stop combine hear power name commitment operation. Determine lifespan support grow degree henry exclude detail set religion. Direct library policy convention chain retain discover ride walk student. Gather proposal select march aspect play noise avoid encourage employ. Assessment preserve transport combine wish influence income guess run stand. Charge limit crime ignore statement foundation study issue stop claim." }
security: [ { roles: [ "log:read" ] } ]
definitions:
LogEntry:
type: object
required: [ reference, creator, created, message ]
required: [ type, reference, creator, created, message ]
properties:
type: { type: string }
reference: { type: string }
creator: { type: string }
created: { type: string, format: "date-time" }

View File

@@ -32,7 +32,7 @@ paths:
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/PlaybookTemplateResponse" } }
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple-2

View File

@@ -10,42 +10,72 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Settings" }
schema: { $ref: "#/definitions/SettingsResponse" }
examples:
test:
version: "0.0.0-test"
tier: community
timeformat: "YYYY-MM-DDThh:mm:ss"
timeformat: "yyyy-MM-dd hh:mm:ss"
ticketTypes:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
artifactKinds:
- { icon: "mdi-server", id: "asset", name: "Asset" }
- { icon: "mdi-bullseye", id: "ioc", name: "IOC" }
artifactStates:
- { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" }
- { icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" }
- { icon: "mdi-check", id: "clean", name: "Clean", color: "success" }
roles: [
"admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write",
"admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read",
"admin:userdata:write", "analyst:automation:read",
"analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read",
"analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read",
"analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write",
"analyst:tickettype:read", "analyst:user:read", "engineer:automation:write",
"engineer:playbook:write", "engineer:rule:write", "engineer:template:write",
"engineer:tickettype:write" ]
security: [ { roles: [ "settings:read" ] } ]
post:
tags: [ "settings" ]
summary: "Save settings"
operationId: "saveSettings"
parameters:
- { name: "settings", in: "body", description: "Save settings", required: true, schema: { $ref: "#/definitions/Settings" }, x-example: { timeformat: "yyyy-MM-dd hh:mm:ss", artifactKinds: [ { icon: "mdi-server", id: "asset", name: "Asset" }, { icon: "mdi-bullseye", id: "ioc", name: "IOC" } ], artifactStates: [ { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" },{ icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" },{ icon: "mdi-check", id: "clean", name: "Clean", color: "success" } ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/SettingsResponse" }
examples:
test:
version: "0.0.0-test"
tier: community
timeformat: "yyyy-MM-dd hh:mm:ss"
ticketTypes:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
artifactKinds:
- { icon: "mdi-server", id: "asset", name: "Asset" }
- { icon: "mdi-bullseye", id: "ioc", name: "IOC" }
artifactStates:
- { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" }
- { icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" }
- { icon: "mdi-check", id: "clean", name: "Clean", color: "success" }
security: [ { roles: [ "settings:write" ] } ]
definitions:
Settings:
type: object
required: [ version, tier, timeformat, ticketTypes, artifactStates ]
required: [ timeformat, artifactKinds, artifactStates ]
properties:
timeformat: { title: "Time Format", type: string }
artifactKinds: { title: "Artifact Kinds", type: array, items: { $ref: "#/definitions/Type" } }
artifactStates: { title: "Artifact States", type: array, items: { $ref: "#/definitions/Type" } }
SettingsResponse:
type: object
required: [ version, tier, timeformat, ticketTypes, artifactKinds, artifactStates ]
properties:
version: { title: "Version", type: string }
tier: { title: "Tier", type: string, enum: [ "community", "enterprise" ] }
timeformat: { title: "Time Format", type: string }
ticketTypes: { title: "Ticket Types", type: array, items: { $ref: "#/definitions/TicketTypeResponse" } }
artifactKinds: { title: "Artifact Kinds", type: array, items: { $ref: "#/definitions/Type" } }
artifactStates: { title: "Artifact States", type: array, items: { $ref: "#/definitions/Type" } }
roles: { title: "Roles", type: array, items: { type: string } }

View File

@@ -1,31 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/statistics:
get:
tags: [ "statistics" ]
summary: "Get statistics"
operationId: "getStatistics"
responses:
"200":
description: "successful operation"
schema: { $ref: '#/definitions/Statistics' }
examples:
test:
unassigned: 0
open_tickets_per_user: { }
tickets_per_week: { "2021-39": 3 }
tickets_per_type: { "alert": 2, "incident": 1 }
security: [ { roles: [ "ticket:read" ] } ]
definitions:
Statistics:
type: object
required: [ unassigned, open_tickets_per_user, tickets_per_week, tickets_per_type ]
properties:
unassigned: { type: integer }
open_tickets_per_user: { type: object, additionalProperties: { type: integer } }
tickets_per_week: { type: object, additionalProperties: { type: integer } }
tickets_per_type: { type: object, additionalProperties: { type: integer } }

View File

@@ -10,37 +10,12 @@ paths:
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TaskResponse" } }
schema: { type: array, items: { $ref: "#/definitions/TaskWithContext" } }
examples:
test: [ ]
security: [ { roles: [ "ticket:read" ] } ]
definitions:
TaskForm:
type: object
required: [ name, type ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
Task:
type: object
required: [ name, type, done, created ]

View File

@@ -23,21 +23,21 @@ paths:
count: 3
tickets:
- id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-10-02T16:04:59.078206Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -49,8 +49,8 @@ paths:
- { name: "94d5cab6f5fe3422a447ab15436e7a672bc0c09a", status: "unknown" }
- { name: "http://www.customerviral.io/scalable/vertical/killer", status: "clean" }
- { name: "leadreintermediate.io", status: "malicious" }
- { id: 8125, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00",name: "phishing from selenafadel@von.com detected", owner: "demo", references: [ { href: "https://www.seniorleading-edge.name/users/efficient", name: "recovery" },{ href: "http://www.dynamicseamless.com/clicks-and-mortar", name: "force" },{ href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8125, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z",name: "phishing from selenafadel@von.com detected", owner: "demo", references: [ { href: "https://www.seniorleading-edge.name/users/efficient", name: "recovery" },{ href: "http://www.dynamicseamless.com/clicks-and-mortar", name: "force" },{ href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:read" ] } ]
post:
tags: [ "tickets" ]
@@ -68,8 +68,8 @@ paths:
name: "Wannacry infection"
type: "incident"
status: "open"
created: "1985-04-12T23:20:50.52Z"
modified: "1985-04-12T23:20:50.52Z"
created: "2021-12-12T12:12:12.000000012Z"
modified: "2021-12-12T12:12:12.000000012Z"
owner: "bob"
schema: "{}"
security: [ { roles: [ "ticket:write" ] } ]
@@ -80,7 +80,7 @@ paths:
summary: "Create a new tickets in batch"
operationId: "createTicketBatch"
parameters:
- { name: "ticket", in: "body", description: "New ticket", required: true, schema: { type: array, items: { $ref: "#/definitions/TicketForm" } }, x-example: [ { id: 123, owner: bob, name: "Wannacry infection", status: "open", type: "incident" } ] }
- { name: "ticket", in: "body", description: "New ticket", required: true, schema: { $ref: "#/definitions/TicketFormArray" }, x-example: [ { id: 123, owner: bob, name: "Wannacry infection", status: "open", type: "incident" } ] }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "ticket:write" ] } ]
@@ -95,12 +95,12 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-10-02T16:04:59.078186Z"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
references:
@@ -114,7 +114,7 @@ paths:
status: "closed"
type: "alert"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:read" ] } ]
put:
tags: [ "tickets" ]
@@ -122,16 +122,16 @@ paths:
operationId: "updateTicket"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8125 }
- { name: "ticket", in: "body", description: "Updated ticket", required: true, schema: { $ref: "#/definitions/Ticket" }, x-example: { "created": "2021-10-02T18:04:59.078186+02:00",modified: "2021-10-02T18:04:59.078186+02:00", name: "phishing from selenafadel@von.org detected", owner: "demo", references: [ { href: "https://www.seniorleading-edge.name/users/efficient", name: "recovery" },{ href: "http://www.dynamicseamless.com/clicks-and-mortar", name: "force" },{ href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ], schema: "{}", status: "closed", type: "alert" } }
- { name: "ticket", in: "body", description: "Updated ticket", required: true, schema: { $ref: "#/definitions/Ticket" }, x-example: { "created": "2021-12-12T12:12:12.000000012Z",modified: "2021-12-12T12:12:12.000000012Z", name: "phishing from selenafadel@von.org detected", owner: "demo", references: [ { href: "https://www.seniorleading-edge.name/users/efficient", name: "recovery" },{ href: "http://www.dynamicseamless.com/clicks-and-mortar", name: "force" },{ href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ], schema: "{}", status: "closed", type: "alert" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-12-12T12:12:12.000000012Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "phishing from selenafadel@von.org detected"
owner: "demo"
references:
@@ -145,7 +145,7 @@ paths:
status: "closed"
type: "alert"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:write" ] } ]
delete:
tags: [ "tickets" ]
@@ -168,12 +168,12 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8126
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-10-02T16:04:59.078186Z"
name: "Surfaceintroduce virus detected"
owner: "demo"
references:
@@ -185,21 +185,21 @@ paths:
type: "alert"
tickets:
- id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-10-02T16:04:59.078206Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -212,8 +212,8 @@ paths:
- { name: "http://www.customerviral.io/scalable/vertical/killer", status: "clean" }
- { name: "leadreintermediate.io", status: "malicious" }
- id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-10-02T16:04:59.078186Z"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
references:
@@ -234,12 +234,12 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8126
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-10-02T16:04:59.078186Z"
name: "Surfaceintroduce virus detected"
owner: "demo"
references:
@@ -262,16 +262,16 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
comments:
- created: "2021-10-02T18:04:59.078186+02:00"
- created: "2021-12-12T12:12:12.000000012Z"
creator: "bob"
message: "My first comment"
references:
@@ -282,7 +282,7 @@ paths:
status: "closed"
type: "alert"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/comments/{commentID}:
@@ -297,25 +297,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -336,16 +336,16 @@ paths:
operationId: "setReferences"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8125 }
- { name: "references", in: "body", description: "All ticket references", required: true, schema: { type: array, items: { $ref: "#/definitions/Reference" } }, x-example: [ { href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ] }
- { name: "references", in: "body", description: "All ticket references", required: true, schema: { $ref: "#/definitions/ReferenceArray" }, x-example: [ { href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ] }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
references: [ { href: "http://www.leadscalable.biz/envisioneer", name: "fund" } ]
@@ -353,7 +353,7 @@ paths:
status: "closed"
type: "alert"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/schema:
@@ -363,16 +363,16 @@ paths:
operationId: "setSchema"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8125 }
- { name: "schema", in: "body", description: "New ticket schema", schema: { type: string }, x-example: "{}" }
- { name: "schema", in: "body", description: "New ticket schema", required: true, schema: { type: string }, x-example: "{}" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-10-02T16:04:59.078186Z"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
references:
@@ -383,40 +383,7 @@ paths:
status: "closed"
type: "alert"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/files:
put:
tags: [ "tickets" ]
summary: "Link files to an ticket"
description: "Link files to an ticket. The files themself will be stored in object storage."
operationId: "linkFiles"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8125 }
- { name: "files", in: "body", description: "Added files", required: true, schema: { type: array, items: { $ref: "#/definitions/File" } }, x-example: [ { key: myfile, name: "document.doc" } ] }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
examples:
test:
id: 8125
created: "2021-10-02T18:04:59.078186+02:00"
modified: "2021-10-02T18:04:59.078186+02:00"
name: "phishing from selenafadel@von.com detected"
owner: "demo"
references:
- { href: "https://www.seniorleading-edge.name/users/efficient", name: "recovery" }
- { href: "http://www.dynamicseamless.com/clicks-and-mortar", name: "force" }
- { href: "http://www.leadscalable.biz/envisioneer", name: "fund" }
"schema": "{}"
status: "closed"
type: "alert"
files: [ { key: myfile, name: "document.doc" } ]
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/playbooks:
@@ -430,7 +397,7 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8125
@@ -438,11 +405,11 @@ paths:
owner: demo
type: "alert"
status: "closed"
created: "1985-04-12T23:20:50.52Z"
modified: "1985-04-12T23:20:50.52Z"
created: "2021-10-02T16:04:59.078186Z"
modified: "2021-12-12T12:12:12.000000012Z"
schema: "{}"
tickets:
- { id: 8126, created: "2021-10-02T18:04:59.078186+02:00", modified: "2021-10-02T18:04:59.078186+02:00", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
- { id: 8126, created: "2021-10-02T16:04:59.078186Z", modified: "2021-10-02T16:04:59.078186Z", name: "Surfaceintroduce virus detected", owner: "demo", references: [ { href: "http://www.centralworld-class.io/synthesize", name: "university" },{ href: "https://www.futurevirtual.org/supply-chains/markets/sticky/iterate", name: "goal" },{ href: "http://www.chiefsyndicate.io/action-items", name: "unemployment" } ],"schema": "{}", status: "closed", type: "alert" }
references:
- { href: "https://www.seniorleading-edge.name/users/efficient", name: recovery }
- { href: "http://www.dynamicseamless.com/clicks-and-mortar", name: force }
@@ -454,7 +421,7 @@ paths:
input:
active: true
done: false
created: "2021-10-02T18:04:59.078186+02:00"
created: "2021-12-12T12:12:12.000000012Z"
order: 0
name: Upload malware if possible
type: input
@@ -472,7 +439,7 @@ paths:
hash:
active: false
done: false
created: "2021-10-02T18:04:59.078186+02:00"
created: "2021-12-12T12:12:12.000000012Z"
order: 1
name: Hash the malware
type: automation
@@ -485,10 +452,11 @@ paths:
escalate:
active: false
done: false
created: "2021-10-02T18:04:59.078186+02:00"
created: "2021-12-12T12:12:12.000000012Z"
order: 2
name: Escalate to malware team
type: task
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/playbooks/{playbookID}:
delete:
@@ -501,15 +469,15 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
name: "live zebra"
type: "incident"
status: "closed"
created: "1985-04-12T23:20:50.52Z"
modified: "1985-04-12T23:20:50.52Z"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
owner: "demo"
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: performance }
@@ -615,35 +583,80 @@ paths:
/tickets/{id}/playbooks/{playbookID}/task/{taskID}:
put:
tags: [ "tickets" ]
summary: "Set a ticket playbook task"
operationId: "setTask"
summary: "Set a ticket playbook task data"
operationId: "setTaskData"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8123 }
- { name: "playbookID", in: "path", description: "Playbook ID", required: true, type: string, x-example: "phishing" }
- { name: "taskID", in: "path", description: "Task ID", required: true, type: string, x-example: "board" }
- { name: "task", in: "body", description: "Task", required: true, schema: { $ref: "#/definitions/Task" }, x-example: { done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", data: { boardInvolved: true } } }
- { name: "data", in: "body", description: "Task data", required: true, schema: { type: object }, x-example: { boardInvolved: true } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", data: { boardInvolved: true } }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", data: { boardInvolved: true } }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
- { href: "https://www.corporateintuitive.org/intuitive/platforms/integrate", name: "suggest" }
"schema": "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
status: "closed"
type: "incident"
artifacts:
- { name: "94d5cab6f5fe3422a447ab15436e7a672bc0c09a", status: "unknown" }
- { name: "http://www.customerviral.io/scalable/vertical/killer", status: "clean" }
- { name: "leadreintermediate.io", status: "malicious" }
security: [ { roles: [ "ticket:write" ] } ]
/tickets/{id}/playbooks/{playbookID}/task/{taskID}/owner:
put:
tags: [ "tickets" ]
summary: "Set a ticket playbook task owner"
operationId: "setTaskOwner"
parameters:
- { name: "id", in: "path", description: "Ticket ID", required: true, type: integer, format: "int64", x-example: 8123 }
- { name: "playbookID", in: "path", description: "Playbook ID", required: true, type: string, x-example: "phishing" }
- { name: "taskID", in: "path", description: "Task ID", required: true, type: string, x-example: "board" }
- { name: "owner", in: "body", description: "Task owner", required: true, schema: { type: string }, x-example: "eve" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", owner: "eve" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -670,25 +683,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", closed: "2021-10-02T18:04:59.078186+02:00", done: true, "active": false, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", data: { boardInvolved: true } }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", closed: "2021-12-12T12:12:12.000000012Z", done: true, "active": false, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input", data: { boardInvolved: true } }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -726,25 +739,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -785,25 +798,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-10-02T16:04:59.078206Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -826,25 +839,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketResponse" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-12-12T12:12:12.000000012Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -882,25 +895,25 @@ paths:
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Artifact" }
schema: { $ref: "#/definitions/TicketWithTickets" }
examples:
test:
id: 8123
created: "2021-10-02T18:04:59.078206+02:00"
modified: "2021-10-02T18:04:59.078206+02:00"
created: "2021-10-02T16:04:59.078206Z"
modified: "2021-10-02T16:04:59.078206Z"
name: "live zebra"
owner: "demo"
playbooks:
phishing:
name: "Phishing"
tasks:
"block-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-10-02T18:04:59.078186+02:00", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
"block-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 6, name: "Block IOCs", type: "task" }
"block-sender": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 3, name: "Block sender","next": { "extract-iocs": "" }, type: "task" }
"board": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": true, "order": 0, name: "Board Involvement?","next": { "escalate": "boardInvolved == true","mail-available": "boardInvolved == false" },"schema": { "properties": { "boardInvolved": { "default": false, "title": "A board member is involved.", type: "boolean" } }, "required": [ "boardInvolved" ], "title": "Board Involvement?", type: "object" }, type: "input" }
"escalate": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 1, name: "Escalate to CISO", type: "task" }
"extract-iocs": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 5, name: "Extract IOCs", "next": { "block-iocs": "" },"schema": { "properties": { "iocs": { "items": { type: "string" },"title": "IOCs", type: "array" } }, "title": "Extract IOCs", type: "object" }, type: "input" }
"mail-available": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 2, name: "Mail available","next": { "block-sender": "schemaKey == 'yes'", "extract-iocs": "schemaKey == 'yes'", "search-email-gateway": "schemaKey == 'no'" },"schema": { "oneOf": [ { "properties": { "mail": { "title": "Mail", type: "string", "x-display": "textarea" }, "schemaKey": { "const": "yes", type: "string" } },"required": [ "mail" ], "title": "Yes" },{ "properties": { "schemaKey": { "const": "no", type: "string" } },"title": "No" } ],"title": "Mail available", type: "object" }, type: "input" }
"search-email-gateway": { created: "2021-12-12T12:12:12.000000012Z", done: false, "active": false, "order": 4, name: "Search email gateway","next": { "extract-iocs": "" }, type: "task" }
references:
- { href: "https://www.leadmaximize.net/e-services/back-end", name: "performance" }
- { href: "http://www.corporateinteractive.name/rich", name: "autumn" }
@@ -911,10 +924,13 @@ paths:
artifacts:
- { name: "94d5cab6f5fe3422a447ab15436e7a672bc0c09a", status: "unknown" }
- { name: "http://www.customerviral.io/scalable/vertical/killer", status: "clean" }
- { name: "leadreintermediate.io", status: "malicious", enrichments: { hash.sha1: { name: "hash.sha1", created: "2021-10-03T18:44:06.488923+02:00", data: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } } } }
- { name: "leadreintermediate.io", status: "malicious", enrichments: { hash.sha1: { name: "hash.sha1", created: "2021-12-12T12:12:12.000000012Z", data: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } } } }
security: [ { roles: [ "ticket:write" ] } ]
definitions:
TicketFormArray:
type: array
items: { $ref: "#/definitions/TicketForm" }
TicketForm:
type: object
@@ -1037,6 +1053,8 @@ definitions:
comments: { type: array, items: { $ref: '#/definitions/Comment' } }
artifacts: { type: array, items: { $ref: "#/definitions/Artifact" } }
logs: { type: array, items: { $ref: '#/definitions/LogEntry' } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
modified: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
@@ -1065,6 +1083,10 @@ definitions:
created: { type: string, format: "date-time" }
message: { type: string }
ReferenceArray:
type: array
items: { $ref: '#/definitions/Reference' }
Reference:
type: object
required: [ name, href ]

View File

@@ -12,7 +12,7 @@ paths:
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: bob, roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: false }
test: { id: bob, roles: [ "admin" ], blocked: false, apikey: false }
security: [ { roles: [ "currentuser:read" ] } ]
/users:
@@ -26,21 +26,21 @@ paths:
schema: { type: array, items: { $ref: "#/definitions/UserResponse" } }
examples:
test:
- { id: bob, blocked: false, roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], apikey: false }
- { id: script, roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: true }
- { id: bob, blocked: false, roles: [ "admin" ], apikey: false }
- { id: script, roles: [ "engineer" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
post:
tags: [ "users" ]
summary: "Create user"
operationId: "createUser"
parameters:
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst" ] } }
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst" ], blocked: false, apikey: true } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/NewUserResponse" }
examples:
test: { id: "syncscript", roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read" ], secret: "v39bOuobnlEljfWzjAgoKzhmnh1xSMxH", blocked: false }
test: { id: "syncscript", roles: [ "analyst" ], secret: "v39bOuobnlEljfWzjAgoKzhmnh1xSMxH", blocked: false }
security: [ { roles: [ "user:write" ] } ]
/users/{id}:
get:
@@ -54,7 +54,7 @@ paths:
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: "script", roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: true }
test: { id: "script", roles: [ "engineer" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
put:
tags: [ "users" ]
@@ -62,7 +62,7 @@ paths:
operationId: "updateUser"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "bob" }
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { roles: [ "analyst", "admin" ] } }
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst", "admin" ], blocked: false, apikey: false } }
responses:
"200":
description: "successful operation"
@@ -70,7 +70,7 @@ paths:
examples:
test:
id: bob
roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ]
roles: [ "analyst", "admin" ]
apikey: false
blocked: false
security: [ { roles: [ "user:write" ] } ]
@@ -90,6 +90,7 @@ definitions:
required: [ id, blocked, roles, apikey ]
properties:
id: { type: string }
password: { type: string }
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
@@ -101,7 +102,9 @@ definitions:
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
salt: { type: string }
sha256: { type: string } # for api keys
sha512: { type: string } # for users
UserResponse:
type: object

View File

@@ -0,0 +1,52 @@
version: '2.4'
services:
nginx:
image: nginx:1.23
volumes:
- ./nginx-with-keycloak.conf:/etc/nginx/nginx.conf:ro
ports: [ "80:80", "8529:8529", "9000:9000", "9002:9002", "9003:9003" ]
networks: [ catalyst ]
arangodb:
image: arangodb/arangodb:3.8.1
environment:
ARANGO_ROOT_PASSWORD: foobar
networks: [ catalyst ]
minio:
image: minio/minio:RELEASE.2021-12-10T23-03-39Z
environment:
MINIO_ROOT_USER: minio
MINIO_ROOT_PASSWORD: minio123
command: server /data -console-address ":9003"
networks: [ catalyst ]
postgres:
image: postgres:13
environment:
POSTGRES_DB: keycloak
POSTGRES_USER: keycloak
POSTGRES_PASSWORD: password
networks: [ catalyst ]
keycloak:
image: quay.io/keycloak/keycloak:14.0.0
environment:
DB_VENDOR: POSTGRES
DB_ADDR: postgres
DB_DATABASE: keycloak
DB_USER: keycloak
DB_SCHEMA: public
DB_PASSWORD: password
KEYCLOAK_USER: admin
KEYCLOAK_PASSWORD: admin
KEYCLOAK_IMPORT: /tmp/realm.json
PROXY_ADDRESS_FORWARDING: "true"
volumes:
- ./keycloak/realm.json:/tmp/realm.json
depends_on: [ postgres ]
networks: [ catalyst ]
networks:
catalyst:
name: catalyst

View File

@@ -1,49 +1,26 @@
version: '2.2'
version: '2.4'
services:
nginx:
image: nginx:1.21
image: nginx:1.23
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
ports: [ "80:80", "8529:8529", "9000:9000", "9001:9001", "9002:9002" ]
ports: [ "80:80", "8529:8529", "9000:9000", "9003:9003" ]
networks: [ catalyst ]
arangodb:
image: arangodb/arangodb:3.8.1
environment:
ARANGO_ROOT_PASSWORD: foobar
emitter:
image: emitter/server
environment:
- EMITTER_LICENSE=PfA8ID8izeSlDUlNZgNXo77DQV9QzlNtxTk64WreCXKfDZsREAVXUXwh20UKOZdkALbLTmOytO_iC6mc_twKAQ:3
# A9RysEsPJni8RaHeg_K0FKXQNfBrUyw-
networks: [ catalyst ]
minio:
image: minio/minio
image: minio/minio:RELEASE.2021-12-10T23-03-39Z
environment:
MINIO_ROOT_USER: minio
MINIO_ROOT_PASSWORD: minio123
command: server /data -console-address ":9003"
networks: [ catalyst ]
postgres:
image: postgres
environment:
POSTGRES_DB: keycloak
POSTGRES_USER: keycloak
POSTGRES_PASSWORD: password
keycloak:
image: quay.io/keycloak/keycloak:14.0.0
environment:
DB_VENDOR: POSTGRES
DB_ADDR: postgres
DB_DATABASE: keycloak
DB_USER: keycloak
DB_SCHEMA: public
DB_PASSWORD: password
KEYCLOAK_USER: admin
KEYCLOAK_PASSWORD: admin
KEYCLOAK_IMPORT: /tmp/realm.json
PROXY_ADDRESS_FORWARDING: "true"
volumes:
- ./keycloak/realm.json:/tmp/realm.json
depends_on: [ postgres ]
networks:
catalyst:
name: catalyst

View File

@@ -455,8 +455,8 @@
"secret": "d3ec0d91-b6ea-482d-8a4e-2f5a7ca0b4cb",
"redirectUris": [
"http://catalyst.internal.com/*",
"http://localhost:8000/callback",
"http://localhost/callback"
"http://localhost:8000/auth/callback",
"http://localhost/auth/callback"
],
"webOrigins": [
"http://catalyst.internal.com",

View File

@@ -0,0 +1,112 @@
user www-data;
worker_processes 5;
error_log /var/log/nginx/error.log;
events {
worker_connections 4096;
}
http {
include mime.types;
index index.html index.htm;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
server {
listen 80 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
}
location /wss {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
}
server {
listen 8529 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_arangodb arangodb;
proxy_pass http://$upstream_arangodb:8529;
}
}
server {
listen 9000 default_server;
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9000;
}
}
server {
listen 9002 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_keycloak keycloak;
proxy_pass http://$upstream_keycloak:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Server $host;
}
}
server {
listen 9003 default_server;
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9003;
}
}
}

View File

@@ -53,6 +53,17 @@ http {
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9000;
@@ -60,30 +71,24 @@ http {
}
server {
listen 9002 default_server;
listen 9003 default_server;
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_keycloak keycloak;
proxy_pass http://$upstream_keycloak:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Server $host;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9003;
}
}
}
stream {
server {
listen 9001;
resolver 127.0.0.11 valid=30s;
set $upstream_emitter emitter;
proxy_pass $upstream_emitter:8080;
}
}

View File

@@ -1,5 +1,6 @@
export SECRET=4ef5b29539b70233dd40c02a1799d25079595565e05a193b09da2c3e60ada1cd
# export OIDC_ENABLE=true
export OIDC_ISSUER=http://localhost:9002/auth/realms/catalyst
export OIDC_CLIENT_SECRET=d3ec0d91-b6ea-482d-8a4e-2f5a7ca0b4cb
@@ -7,8 +8,6 @@ export ARANGO_DB_HOST=http://localhost:8529
export ARANGO_DB_PASSWORD=foobar
export S3_HOST=http://localhost:9000
export S3_PASSWORD=minio123
export EMITTER_IO_HOST=tcp://localhost:9001
export EMITTER_IO_KEY=A9RysEsPJni8RaHeg_K0FKXQNfBrUyw-
export AUTH_BLOCK_NEW=false
export AUTH_DEFAULT_ROLES=analyst,admin
@@ -17,4 +16,4 @@ export EXTERNAL_ADDRESS=http://localhost
export CATALYST_ADDRESS=http://host.docker.internal
export INITIAL_API_KEY=d0169af94c40981eb4452a42fae536b6caa9be3a
go run cmd/catalyst-dev/*.go
go run ../cmd/catalyst-dev/*.go

View File

@@ -0,0 +1,20 @@
export SECRET=4ef5b29539b70233dd40c02a1799d25079595565e05a193b09da2c3e60ada1cd
export SIMPLE_AUTH_ENABLE=false
export OIDC_ENABLE=true
export OIDC_ISSUER=http://localhost:9002/auth/realms/catalyst
export OIDC_CLIENT_SECRET=d3ec0d91-b6ea-482d-8a4e-2f5a7ca0b4cb
export ARANGO_DB_HOST=http://localhost:8529
export ARANGO_DB_PASSWORD=foobar
export S3_HOST=http://localhost:9000
export S3_PASSWORD=minio123
export AUTH_BLOCK_NEW=false
export AUTH_DEFAULT_ROLES=analyst,admin
export EXTERNAL_ADDRESS=http://localhost
export CATALYST_ADDRESS=http://host.docker.internal
export INITIAL_API_KEY=d0169af94c40981eb4452a42fae536b6caa9be3a
go run ../cmd/catalyst-dev/*.go

Binary file not shown.

After

Width:  |  Height:  |  Size: 218 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 191 KiB

150
file.go
View File

@@ -1,32 +1,42 @@
package catalyst
import (
"context"
"errors"
"fmt"
"io"
"log"
"net/http"
"strconv"
"github.com/arangodb/go-driver"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/gin-gonic/gin"
"github.com/go-chi/chi/v5"
maut "github.com/jonas-plum/maut/auth"
tusd "github.com/tus/tusd/pkg/handler"
"github.com/tus/tusd/pkg/s3store"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/storage"
)
func upload(client *s3.S3, external string) gin.HandlerFunc {
return func(ctx *gin.Context) {
ticketID, exists := ctx.Params.Get("ticketID")
if !exists {
ctx.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "ticketID not given"})
func tusdUpload(db *database.Database, catalystBus *bus.Bus, client *s3.S3, external string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ticketID := chi.URLParam(r, "ticketID")
if ticketID == "" {
api.JSONErrorStatus(w, http.StatusBadRequest, errors.New("ticketID not given"))
return
}
if err := storage.CreateBucket(client, ticketID); err != nil {
ctx.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Errorf("could not create bucket: %w", err)})
api.JSONErrorStatus(w, http.StatusBadRequest, fmt.Errorf("could not create bucket: %w", err))
return
}
@@ -36,43 +46,130 @@ func upload(client *s3.S3, external string) gin.HandlerFunc {
store.UseIn(composer)
handler, err := tusd.NewUnroutedHandler(tusd.Config{
BasePath: external + "/api/files/" + ticketID + "/upload/",
StoreComposer: composer,
BasePath: external + "/api/files/" + ticketID + "/tusd/",
StoreComposer: composer,
NotifyCompleteUploads: true,
})
if err != nil {
ctx.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": fmt.Errorf("could not create tusd handler: %w", err)})
api.JSONErrorStatus(w, http.StatusBadRequest, fmt.Errorf("could not create tusd handler: %w", err))
return
}
switch ctx.Request.Method {
userID := "unknown"
user, _, ok := maut.UserFromContext(r.Context())
if ok {
userID = user.ID
}
go func() {
event := <-handler.CompleteUploads
id, err := strconv.ParseInt(ticketID, 10, 64)
if err != nil {
return
}
file := &model.File{Key: event.Upload.Storage["Key"], Name: event.Upload.MetaData["filename"]}
ctx := context.Background()
doc, err := db.AddFile(ctx, id, file)
if err != nil {
log.Println(err)
return
}
catalystBus.RequestChannel.Publish(&bus.RequestMsg{
User: userID,
Function: "LinkFiles",
IDs: []driver.DocumentID{driver.DocumentID(fmt.Sprintf("tickets/%d", doc.ID))},
})
}()
switch r.Method {
case http.MethodHead:
gin.WrapF(handler.HeadFile)(ctx)
handler.HeadFile(w, r)
case http.MethodPost:
gin.WrapF(handler.PostFile)(ctx)
handler.PostFile(w, r)
case http.MethodPatch:
gin.WrapF(handler.PatchFile)(ctx)
handler.PatchFile(w, r)
default:
log.Println(errors.New("unknown method"))
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "unknown method"})
api.JSONErrorStatus(w, http.StatusInternalServerError, errors.New("unknown method"))
}
}
}
func download(downloader *s3manager.Downloader) gin.HandlerFunc {
return func(ctx *gin.Context) {
ticketID, exists := ctx.Params.Get("ticketID")
if !exists {
ctx.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "ticketID not given"})
func upload(db *database.Database, client *s3.S3, uploader *s3manager.Uploader) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ticketID := chi.URLParam(r, "ticketID")
if ticketID == "" {
api.JSONErrorStatus(w, http.StatusBadRequest, errors.New("ticketID not given"))
return
}
key, exists := ctx.Params.Get("key")
if !exists {
ctx.AbortWithStatusJSON(http.StatusBadRequest, gin.H{"error": "key not given"})
file, header, err := r.FormFile("file")
if err != nil {
api.JSONErrorStatus(w, http.StatusBadRequest, err)
return
}
defer file.Close()
if err := storage.CreateBucket(client, ticketID); err != nil {
api.JSONErrorStatus(w, http.StatusBadRequest, fmt.Errorf("could not create bucket: %w", err))
return
}
buf := sequentialWriter{ctx.Writer}
_, err = uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String("catalyst-" + ticketID),
Key: aws.String(header.Filename),
Body: file,
})
if err != nil {
api.JSONErrorStatus(w, http.StatusBadRequest, err)
return
}
id, err := strconv.ParseInt(ticketID, 10, 64)
if err != nil {
api.JSONErrorStatus(w, http.StatusBadRequest, err)
return
}
_, err = db.AddFile(r.Context(), id, &model.File{
Key: header.Filename,
Name: header.Filename,
})
if err != nil {
api.JSONErrorStatus(w, http.StatusBadRequest, err)
return
}
}
}
func download(downloader *s3manager.Downloader) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
ticketID := chi.URLParam(r, "ticketID")
if ticketID == "" {
api.JSONErrorStatus(w, http.StatusBadRequest, errors.New("ticketID not given"))
return
}
key := chi.URLParam(r, "key")
if key == "" {
api.JSONErrorStatus(w, http.StatusBadRequest, errors.New("key not given"))
return
}
buf := sequentialWriter{w}
downloader.Concurrency = 1
_, err := downloader.Download(buf, &s3.GetObjectInput{
@@ -80,8 +177,7 @@ func download(downloader *s3manager.Downloader) gin.HandlerFunc {
Key: aws.String(key),
})
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err})
return
api.JSONErrorStatus(w, http.StatusInternalServerError, err)
}
}
}

View File

@@ -8,21 +8,22 @@ spruce merge definition/*.yaml definition/enterprise/*.yaml >generated/catalyst.
echo generate caql parser and lexer
cd definition || exit
# antlr 4.10.1
antlr -Dlanguage=Go -o ../generated/caql/parser CAQLParser.g4 CAQLLexer.g4
antlr -Dlanguage=JavaScript -o ../ui/src/suggestions/grammar CAQLParser.g4 CAQLLexer.g4
cd ..
echo generate json
# openapi-generator 6.0.0
openapi-generator generate -i generated/community.yml -o generated -g openapi
mv generated/openapi.json generated/community.json
openapi-generator generate -i generated/catalyst.yml -o generated -g openapi
mv generated/openapi.json generated/catalyst.json
# generate python client
# openapi-generator generate -i generated/community.yml -o generated/python -g python --package-name catalystpy --ignore-file-override .openapi-generator-ignore
echo generate server and tests
go run ./generator/. ./generator
swagger-go-chi generated/community.yml generated
rm -rf generated/auth generated/cli
find generated -type f -name "*.go" -print0 | xargs -0 sed -i '' -e 's#"github.com/go-chi/chi"#"github.com/go-chi/chi/v5"#g'
echo generate typescript client
openapi-generator generate -i generated/catalyst.yml -o ui/src/client -g typescript-axios --artifact-version 1.0.0-SNAPSHOT
@@ -30,8 +31,10 @@ openapi-generator generate -i generated/catalyst.yml -o ui/src/client -g typescr
rm -rf gen
rm -rf generated/models/old
rm -rf generated/.openapi-generator generated/.openapi-generator-ignore generated/README.md
# rm -rf generated/python/.openapi-generator generated/python/.gitlab-ci.yml generated/python/git_push.sh generated/python/.travis.yml generated/python/.gitignore generated/python/.openapi-generator-ignore
rm -rf ui/src/client/.openapi-generator ui/src/client/git_push.sh ui/src/client/.gitignore ui/src/client/.openapi-generator-ignore
go mod tidy
gci -w -local "github.com/SecurityBrewery/catalyst" .
gci write --Section Standard --Section Default --Section "Prefix(github.com/SecurityBrewery/catalyst)" .
cd internal/maut
gci write --Section Standard --Section Default --Section "Prefix(github.com/jonas-plum/maut)" .
cd ../..

192
generated/api/api.go Executable file
View File

@@ -0,0 +1,192 @@
package api
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"net/http"
"strconv"
"github.com/go-chi/chi/v5"
"github.com/xeipuuv/gojsonschema"
)
type HTTPError struct {
Status int
Internal error
}
func (e *HTTPError) Error() string {
return fmt.Sprintf("HTTPError(%d): %s", e.Status, e.Internal)
}
func (e *HTTPError) Unwrap() error {
return e.Internal
}
func parseURLInt64(r *http.Request, s string) (int64, error) {
i, err := strconv.ParseInt(chi.URLParam(r, s), 10, 64)
if err != nil {
return 0, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return i, nil
}
func parseURLInt(r *http.Request, s string) (int, error) {
i, err := strconv.Atoi(chi.URLParam(r, s))
if err != nil {
return 0, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return i, nil
}
func parseQueryInt(r *http.Request, s string) (int, error) {
i, err := strconv.Atoi(r.URL.Query().Get(s))
if err != nil {
return 0, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return i, nil
}
func parseQueryBool(r *http.Request, s string) (bool, error) {
b, err := strconv.ParseBool(r.URL.Query().Get(s))
if err != nil {
return false, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return b, nil
}
func parseQueryStringArray(r *http.Request, key string) ([]string, error) {
stringArray, ok := r.URL.Query()[key]
if !ok {
return nil, nil
}
return removeEmpty(stringArray), nil
}
func removeEmpty(l []string) []string {
var stringArray []string
for _, s := range l {
if s == "" {
continue
}
stringArray = append(stringArray, s)
}
return stringArray
}
func parseQueryBoolArray(r *http.Request, key string) ([]bool, error) {
stringArray, ok := r.URL.Query()[key]
if !ok {
return nil, nil
}
var boolArray []bool
for _, s := range stringArray {
if s == "" {
continue
}
b, err := strconv.ParseBool(s)
if err != nil {
return nil, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
boolArray = append(boolArray, b)
}
return boolArray, nil
}
func parseQueryOptionalInt(r *http.Request, key string) (*int, error) {
s := r.URL.Query().Get(key)
if s == "" {
return nil, nil
}
i, err := strconv.Atoi(s)
if err != nil {
return nil, fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return &i, nil
}
func parseQueryOptionalStringArray(r *http.Request, key string) ([]string, error) {
return parseQueryStringArray(r, key)
}
func parseQueryOptionalBoolArray(r *http.Request, key string) ([]bool, error) {
return parseQueryBoolArray(r, key)
}
func parseBody(b []byte, i interface{}) error {
dec := json.NewDecoder(bytes.NewBuffer(b))
err := dec.Decode(i)
if err != nil {
return fmt.Errorf("%w", &HTTPError{http.StatusUnprocessableEntity, err})
}
return nil
}
func JSONError(w http.ResponseWriter, err error) {
JSONErrorStatus(w, http.StatusInternalServerError, err)
}
func JSONErrorStatus(w http.ResponseWriter, status int, err error) {
w.WriteHeader(status)
b, _ := json.Marshal(map[string]string{"error": err.Error()})
w.Write(b)
}
func response(w http.ResponseWriter, v interface{}, err error) {
if err != nil {
var httpError *HTTPError
if errors.As(err, &httpError) {
JSONErrorStatus(w, httpError.Status, httpError.Internal)
return
}
JSONError(w, err)
return
}
if v == nil {
w.WriteHeader(http.StatusNoContent)
return
}
w.WriteHeader(http.StatusOK)
b, _ := json.Marshal(v)
w.Write(b)
}
func validateSchema(body []byte, schema *gojsonschema.Schema, w http.ResponseWriter) bool {
jl := gojsonschema.NewBytesLoader(body)
validationResult, err := schema.Validate(jl)
if err != nil {
JSONError(w, err)
return true
}
if !validationResult.Valid() {
w.WriteHeader(http.StatusUnprocessableEntity)
var validationErrors []string
for _, valdiationError := range validationResult.Errors() {
validationErrors = append(validationErrors, valdiationError.String())
}
b, _ := json.Marshal(map[string]interface{}{"error": "wrong input", "errors": validationErrors})
w.Write(b)
return true
}
return false
}
func NilMiddleware() func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
next.ServeHTTP(w, r)
})
}
}
func IgnoreRoles(_ []string) func(next http.Handler) http.Handler {
return NilMiddleware()
}

1235
generated/api/server.go Executable file

File diff suppressed because it is too large Load Diff

40
generated/api/static.go Executable file
View File

@@ -0,0 +1,40 @@
package api
import (
"io/fs"
"net/http"
"net/http/httputil"
"net/url"
"strings"
)
func VueStatic(fsys fs.FS) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
handler := http.FileServer(http.FS(fsys))
if strings.HasPrefix(r.URL.Path, "/static/") {
handler = http.StripPrefix("/static/", handler)
} else {
r.URL.Path = "/"
}
handler.ServeHTTP(w, r)
}
}
func Static(fsys fs.FS) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
http.FileServer(http.FS(fsys)).ServeHTTP(w, r)
}
}
func Proxy(dest string) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
u, _ := url.Parse(dest)
proxy := httputil.NewSingleHostReverseProxy(u)
r.Host = r.URL.Host
proxy.ServeHTTP(w, r)
}
}

650
generated/api/test_api.go Executable file

File diff suppressed because one or more lines are too long

View File

@@ -1,9 +1,10 @@
// Code generated from CAQLLexer.g4 by ANTLR 4.9.2. DO NOT EDIT.
// Code generated from CAQLLexer.g4 by ANTLR 4.10.1. DO NOT EDIT.
package parser
import (
"fmt"
"sync"
"unicode"
"github.com/antlr/antlr4/runtime/Go/antlr"
@@ -11,391 +12,9 @@ import (
// Suppress unused import error
var _ = fmt.Printf
var _ = sync.Once{}
var _ = unicode.IsLetter
var serializedLexerAtn = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 80, 739,
8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7,
9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12,
4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4,
18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23,
9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9,
28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33,
4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4,
39, 9, 39, 4, 40, 9, 40, 4, 41, 9, 41, 4, 42, 9, 42, 4, 43, 9, 43, 4, 44,
9, 44, 4, 45, 9, 45, 4, 46, 9, 46, 4, 47, 9, 47, 4, 48, 9, 48, 4, 49, 9,
49, 4, 50, 9, 50, 4, 51, 9, 51, 4, 52, 9, 52, 4, 53, 9, 53, 4, 54, 9, 54,
4, 55, 9, 55, 4, 56, 9, 56, 4, 57, 9, 57, 4, 58, 9, 58, 4, 59, 9, 59, 4,
60, 9, 60, 4, 61, 9, 61, 4, 62, 9, 62, 4, 63, 9, 63, 4, 64, 9, 64, 4, 65,
9, 65, 4, 66, 9, 66, 4, 67, 9, 67, 4, 68, 9, 68, 4, 69, 9, 69, 4, 70, 9,
70, 4, 71, 9, 71, 4, 72, 9, 72, 4, 73, 9, 73, 4, 74, 9, 74, 4, 75, 9, 75,
4, 76, 9, 76, 4, 77, 9, 77, 4, 78, 9, 78, 4, 79, 9, 79, 4, 80, 9, 80, 4,
81, 9, 81, 4, 82, 9, 82, 4, 83, 9, 83, 4, 84, 9, 84, 4, 85, 9, 85, 4, 86,
9, 86, 4, 87, 9, 87, 4, 88, 9, 88, 4, 89, 9, 89, 4, 90, 9, 90, 4, 91, 9,
91, 4, 92, 9, 92, 4, 93, 9, 93, 4, 94, 9, 94, 4, 95, 9, 95, 4, 96, 9, 96,
4, 97, 9, 97, 4, 98, 9, 98, 4, 99, 9, 99, 4, 100, 9, 100, 4, 101, 9, 101,
4, 102, 9, 102, 4, 103, 9, 103, 4, 104, 9, 104, 4, 105, 9, 105, 4, 106,
9, 106, 4, 107, 9, 107, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4,
3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 8, 3, 8, 3, 9, 3, 9,
3, 9, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 12, 3, 12, 3, 13, 3, 13, 3,
14, 3, 14, 3, 15, 3, 15, 3, 16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18,
3, 19, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 23, 3,
23, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3, 26, 3, 27, 3, 27, 3, 27, 3, 27,
3, 27, 3, 27, 3, 27, 3, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 28, 3, 28, 3,
29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 5, 29, 294, 10, 29, 3, 30, 3, 30,
3, 30, 3, 30, 3, 31, 3, 31, 3, 31, 3, 31, 3, 32, 3, 32, 3, 32, 3, 32, 3,
32, 3, 32, 3, 32, 3, 32, 3, 33, 3, 33, 3, 33, 3, 33, 3, 33, 3, 34, 3, 34,
3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 34, 3, 35, 3, 35, 3, 35, 3,
35, 3, 35, 3, 35, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 37,
3, 37, 3, 37, 3, 37, 3, 38, 3, 38, 3, 38, 3, 38, 3, 38, 3, 38, 3, 39, 3,
39, 3, 39, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 40, 3, 41,
3, 41, 3, 41, 3, 41, 3, 41, 3, 41, 3, 41, 3, 42, 3, 42, 3, 42, 3, 42, 3,
42, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43,
3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 43, 3, 44, 3, 44, 3, 44, 3,
44, 3, 45, 3, 45, 3, 45, 3, 45, 3, 45, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46,
3, 46, 3, 47, 3, 47, 3, 47, 3, 47, 3, 47, 3, 48, 3, 48, 3, 48, 3, 48, 3,
48, 5, 48, 414, 10, 48, 3, 49, 3, 49, 3, 49, 3, 49, 3, 49, 3, 50, 3, 50,
3, 50, 3, 50, 3, 50, 5, 50, 426, 10, 50, 3, 51, 3, 51, 3, 51, 3, 51, 3,
51, 3, 51, 3, 51, 3, 51, 3, 51, 3, 52, 3, 52, 3, 52, 3, 52, 3, 52, 3, 52,
3, 52, 3, 53, 3, 53, 3, 53, 3, 53, 3, 53, 3, 53, 3, 53, 3, 53, 3, 54, 3,
54, 3, 54, 3, 54, 3, 54, 3, 54, 3, 54, 3, 55, 3, 55, 3, 55, 3, 55, 3, 55,
3, 55, 3, 55, 3, 55, 3, 55, 3, 55, 3, 55, 3, 55, 3, 55, 3, 55, 3, 56, 3,
56, 3, 56, 3, 56, 3, 56, 3, 57, 3, 57, 3, 57, 3, 57, 3, 57, 3, 58, 3, 58,
3, 58, 3, 58, 3, 58, 3, 58, 3, 58, 3, 59, 3, 59, 3, 59, 3, 59, 3, 59, 3,
59, 3, 59, 3, 60, 3, 60, 3, 60, 3, 60, 3, 60, 3, 61, 3, 61, 3, 61, 3, 61,
3, 61, 3, 62, 3, 62, 3, 62, 3, 62, 3, 62, 3, 62, 3, 63, 3, 63, 3, 63, 3,
63, 3, 63, 3, 63, 3, 63, 3, 63, 3, 64, 3, 64, 3, 64, 3, 64, 3, 64, 3, 64,
3, 65, 3, 65, 3, 65, 3, 65, 3, 65, 3, 65, 3, 65, 3, 66, 3, 66, 3, 66, 3,
67, 3, 67, 3, 67, 3, 67, 3, 67, 3, 67, 3, 67, 3, 67, 3, 68, 3, 68, 3, 68,
3, 68, 3, 69, 3, 69, 3, 69, 3, 69, 3, 70, 3, 70, 7, 70, 555, 10, 70, 12,
70, 14, 70, 558, 11, 70, 3, 71, 3, 71, 7, 71, 562, 10, 71, 12, 71, 14,
71, 565, 11, 71, 3, 71, 3, 71, 3, 71, 3, 71, 3, 71, 6, 71, 572, 10, 71,
13, 71, 14, 71, 573, 3, 71, 3, 71, 3, 71, 3, 71, 6, 71, 580, 10, 71, 13,
71, 14, 71, 581, 5, 71, 584, 10, 71, 3, 72, 3, 72, 7, 72, 588, 10, 72,
12, 72, 14, 72, 591, 11, 72, 3, 72, 5, 72, 594, 10, 72, 3, 72, 3, 72, 6,
72, 598, 10, 72, 13, 72, 14, 72, 599, 3, 72, 3, 72, 5, 72, 604, 10, 72,
3, 72, 6, 72, 607, 10, 72, 13, 72, 14, 72, 608, 5, 72, 611, 10, 72, 3,
73, 3, 73, 3, 73, 3, 74, 3, 74, 3, 74, 3, 74, 3, 74, 3, 74, 7, 74, 622,
10, 74, 12, 74, 14, 74, 625, 11, 74, 3, 74, 3, 74, 3, 74, 3, 74, 3, 74,
3, 74, 3, 74, 7, 74, 634, 10, 74, 12, 74, 14, 74, 637, 11, 74, 3, 74, 5,
74, 640, 10, 74, 3, 75, 3, 75, 3, 75, 3, 75, 7, 75, 646, 10, 75, 12, 75,
14, 75, 649, 11, 75, 3, 75, 5, 75, 652, 10, 75, 3, 75, 3, 75, 5, 75, 656,
10, 75, 3, 75, 3, 75, 3, 76, 3, 76, 3, 76, 3, 76, 7, 76, 664, 10, 76, 12,
76, 14, 76, 667, 11, 76, 3, 76, 3, 76, 3, 76, 3, 76, 3, 76, 3, 77, 3, 77,
3, 77, 3, 77, 3, 78, 3, 78, 3, 79, 3, 79, 3, 80, 3, 80, 3, 81, 3, 81, 3,
82, 3, 82, 3, 83, 3, 83, 3, 84, 3, 84, 3, 85, 3, 85, 3, 86, 3, 86, 3, 87,
3, 87, 3, 88, 3, 88, 3, 89, 3, 89, 3, 90, 3, 90, 3, 91, 3, 91, 3, 92, 3,
92, 3, 93, 3, 93, 3, 94, 3, 94, 3, 95, 3, 95, 3, 96, 3, 96, 3, 97, 3, 97,
3, 98, 3, 98, 3, 99, 3, 99, 3, 100, 3, 100, 3, 101, 3, 101, 3, 102, 3,
102, 3, 103, 3, 103, 3, 104, 3, 104, 3, 105, 3, 105, 3, 106, 3, 106, 3,
107, 3, 107, 3, 107, 3, 107, 3, 665, 2, 108, 3, 3, 5, 4, 7, 5, 9, 6, 11,
7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16,
31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25,
49, 26, 51, 27, 53, 28, 55, 29, 57, 30, 59, 31, 61, 32, 63, 33, 65, 34,
67, 35, 69, 36, 71, 37, 73, 38, 75, 39, 77, 40, 79, 41, 81, 42, 83, 43,
85, 44, 87, 45, 89, 46, 91, 47, 93, 48, 95, 49, 97, 50, 99, 51, 101, 52,
103, 53, 105, 54, 107, 55, 109, 56, 111, 57, 113, 58, 115, 59, 117, 60,
119, 61, 121, 62, 123, 63, 125, 64, 127, 65, 129, 66, 131, 67, 133, 68,
135, 69, 137, 70, 139, 71, 141, 72, 143, 73, 145, 74, 147, 75, 149, 76,
151, 77, 153, 78, 155, 79, 157, 2, 159, 2, 161, 2, 163, 2, 165, 2, 167,
2, 169, 2, 171, 2, 173, 2, 175, 2, 177, 2, 179, 2, 181, 2, 183, 2, 185,
2, 187, 2, 189, 2, 191, 2, 193, 2, 195, 2, 197, 2, 199, 2, 201, 2, 203,
2, 205, 2, 207, 2, 209, 2, 211, 2, 213, 80, 3, 2, 39, 5, 2, 67, 92, 97,
97, 99, 124, 6, 2, 50, 59, 67, 92, 97, 97, 99, 124, 3, 2, 51, 59, 3, 2,
50, 51, 4, 2, 45, 45, 47, 47, 4, 2, 41, 41, 94, 94, 4, 2, 36, 36, 94, 94,
4, 2, 12, 12, 15, 15, 5, 2, 11, 13, 15, 15, 34, 34, 5, 2, 50, 59, 67, 72,
99, 104, 3, 2, 50, 59, 4, 2, 67, 67, 99, 99, 4, 2, 68, 68, 100, 100, 4,
2, 69, 69, 101, 101, 4, 2, 70, 70, 102, 102, 4, 2, 71, 71, 103, 103, 4,
2, 72, 72, 104, 104, 4, 2, 73, 73, 105, 105, 4, 2, 74, 74, 106, 106, 4,
2, 75, 75, 107, 107, 4, 2, 76, 76, 108, 108, 4, 2, 77, 77, 109, 109, 4,
2, 78, 78, 110, 110, 4, 2, 79, 79, 111, 111, 4, 2, 80, 80, 112, 112, 4,
2, 81, 81, 113, 113, 4, 2, 82, 82, 114, 114, 4, 2, 83, 83, 115, 115, 4,
2, 84, 84, 116, 116, 4, 2, 85, 85, 117, 117, 4, 2, 86, 86, 118, 118, 4,
2, 87, 87, 119, 119, 4, 2, 88, 88, 120, 120, 4, 2, 89, 89, 121, 121, 4,
2, 90, 90, 122, 122, 4, 2, 91, 91, 123, 123, 4, 2, 92, 92, 124, 124, 2,
738, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2,
2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3,
2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25,
3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2,
33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2,
2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2,
2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2,
2, 2, 2, 57, 3, 2, 2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, 63, 3,
2, 2, 2, 2, 65, 3, 2, 2, 2, 2, 67, 3, 2, 2, 2, 2, 69, 3, 2, 2, 2, 2, 71,
3, 2, 2, 2, 2, 73, 3, 2, 2, 2, 2, 75, 3, 2, 2, 2, 2, 77, 3, 2, 2, 2, 2,
79, 3, 2, 2, 2, 2, 81, 3, 2, 2, 2, 2, 83, 3, 2, 2, 2, 2, 85, 3, 2, 2, 2,
2, 87, 3, 2, 2, 2, 2, 89, 3, 2, 2, 2, 2, 91, 3, 2, 2, 2, 2, 93, 3, 2, 2,
2, 2, 95, 3, 2, 2, 2, 2, 97, 3, 2, 2, 2, 2, 99, 3, 2, 2, 2, 2, 101, 3,
2, 2, 2, 2, 103, 3, 2, 2, 2, 2, 105, 3, 2, 2, 2, 2, 107, 3, 2, 2, 2, 2,
109, 3, 2, 2, 2, 2, 111, 3, 2, 2, 2, 2, 113, 3, 2, 2, 2, 2, 115, 3, 2,
2, 2, 2, 117, 3, 2, 2, 2, 2, 119, 3, 2, 2, 2, 2, 121, 3, 2, 2, 2, 2, 123,
3, 2, 2, 2, 2, 125, 3, 2, 2, 2, 2, 127, 3, 2, 2, 2, 2, 129, 3, 2, 2, 2,
2, 131, 3, 2, 2, 2, 2, 133, 3, 2, 2, 2, 2, 135, 3, 2, 2, 2, 2, 137, 3,
2, 2, 2, 2, 139, 3, 2, 2, 2, 2, 141, 3, 2, 2, 2, 2, 143, 3, 2, 2, 2, 2,
145, 3, 2, 2, 2, 2, 147, 3, 2, 2, 2, 2, 149, 3, 2, 2, 2, 2, 151, 3, 2,
2, 2, 2, 153, 3, 2, 2, 2, 2, 155, 3, 2, 2, 2, 2, 213, 3, 2, 2, 2, 3, 215,
3, 2, 2, 2, 5, 217, 3, 2, 2, 2, 7, 220, 3, 2, 2, 2, 9, 223, 3, 2, 2, 2,
11, 226, 3, 2, 2, 2, 13, 229, 3, 2, 2, 2, 15, 231, 3, 2, 2, 2, 17, 233,
3, 2, 2, 2, 19, 236, 3, 2, 2, 2, 21, 239, 3, 2, 2, 2, 23, 241, 3, 2, 2,
2, 25, 243, 3, 2, 2, 2, 27, 245, 3, 2, 2, 2, 29, 247, 3, 2, 2, 2, 31, 249,
3, 2, 2, 2, 33, 251, 3, 2, 2, 2, 35, 253, 3, 2, 2, 2, 37, 256, 3, 2, 2,
2, 39, 259, 3, 2, 2, 2, 41, 261, 3, 2, 2, 2, 43, 263, 3, 2, 2, 2, 45, 265,
3, 2, 2, 2, 47, 267, 3, 2, 2, 2, 49, 269, 3, 2, 2, 2, 51, 271, 3, 2, 2,
2, 53, 273, 3, 2, 2, 2, 55, 283, 3, 2, 2, 2, 57, 293, 3, 2, 2, 2, 59, 295,
3, 2, 2, 2, 61, 299, 3, 2, 2, 2, 63, 303, 3, 2, 2, 2, 65, 311, 3, 2, 2,
2, 67, 316, 3, 2, 2, 2, 69, 325, 3, 2, 2, 2, 71, 331, 3, 2, 2, 2, 73, 338,
3, 2, 2, 2, 75, 342, 3, 2, 2, 2, 77, 348, 3, 2, 2, 2, 79, 351, 3, 2, 2,
2, 81, 359, 3, 2, 2, 2, 83, 366, 3, 2, 2, 2, 85, 371, 3, 2, 2, 2, 87, 388,
3, 2, 2, 2, 89, 392, 3, 2, 2, 2, 91, 397, 3, 2, 2, 2, 93, 403, 3, 2, 2,
2, 95, 413, 3, 2, 2, 2, 97, 415, 3, 2, 2, 2, 99, 425, 3, 2, 2, 2, 101,
427, 3, 2, 2, 2, 103, 436, 3, 2, 2, 2, 105, 443, 3, 2, 2, 2, 107, 451,
3, 2, 2, 2, 109, 458, 3, 2, 2, 2, 111, 472, 3, 2, 2, 2, 113, 477, 3, 2,
2, 2, 115, 482, 3, 2, 2, 2, 117, 489, 3, 2, 2, 2, 119, 496, 3, 2, 2, 2,
121, 501, 3, 2, 2, 2, 123, 506, 3, 2, 2, 2, 125, 512, 3, 2, 2, 2, 127,
520, 3, 2, 2, 2, 129, 526, 3, 2, 2, 2, 131, 533, 3, 2, 2, 2, 133, 536,
3, 2, 2, 2, 135, 544, 3, 2, 2, 2, 137, 548, 3, 2, 2, 2, 139, 552, 3, 2,
2, 2, 141, 583, 3, 2, 2, 2, 143, 593, 3, 2, 2, 2, 145, 612, 3, 2, 2, 2,
147, 639, 3, 2, 2, 2, 149, 641, 3, 2, 2, 2, 151, 659, 3, 2, 2, 2, 153,
673, 3, 2, 2, 2, 155, 677, 3, 2, 2, 2, 157, 679, 3, 2, 2, 2, 159, 681,
3, 2, 2, 2, 161, 683, 3, 2, 2, 2, 163, 685, 3, 2, 2, 2, 165, 687, 3, 2,
2, 2, 167, 689, 3, 2, 2, 2, 169, 691, 3, 2, 2, 2, 171, 693, 3, 2, 2, 2,
173, 695, 3, 2, 2, 2, 175, 697, 3, 2, 2, 2, 177, 699, 3, 2, 2, 2, 179,
701, 3, 2, 2, 2, 181, 703, 3, 2, 2, 2, 183, 705, 3, 2, 2, 2, 185, 707,
3, 2, 2, 2, 187, 709, 3, 2, 2, 2, 189, 711, 3, 2, 2, 2, 191, 713, 3, 2,
2, 2, 193, 715, 3, 2, 2, 2, 195, 717, 3, 2, 2, 2, 197, 719, 3, 2, 2, 2,
199, 721, 3, 2, 2, 2, 201, 723, 3, 2, 2, 2, 203, 725, 3, 2, 2, 2, 205,
727, 3, 2, 2, 2, 207, 729, 3, 2, 2, 2, 209, 731, 3, 2, 2, 2, 211, 733,
3, 2, 2, 2, 213, 735, 3, 2, 2, 2, 215, 216, 7, 48, 2, 2, 216, 4, 3, 2,
2, 2, 217, 218, 7, 63, 2, 2, 218, 219, 7, 128, 2, 2, 219, 6, 3, 2, 2, 2,
220, 221, 7, 35, 2, 2, 221, 222, 7, 128, 2, 2, 222, 8, 3, 2, 2, 2, 223,
224, 7, 63, 2, 2, 224, 225, 7, 63, 2, 2, 225, 10, 3, 2, 2, 2, 226, 227,
7, 35, 2, 2, 227, 228, 7, 63, 2, 2, 228, 12, 3, 2, 2, 2, 229, 230, 7, 62,
2, 2, 230, 14, 3, 2, 2, 2, 231, 232, 7, 64, 2, 2, 232, 16, 3, 2, 2, 2,
233, 234, 7, 62, 2, 2, 234, 235, 7, 63, 2, 2, 235, 18, 3, 2, 2, 2, 236,
237, 7, 64, 2, 2, 237, 238, 7, 63, 2, 2, 238, 20, 3, 2, 2, 2, 239, 240,
7, 45, 2, 2, 240, 22, 3, 2, 2, 2, 241, 242, 7, 47, 2, 2, 242, 24, 3, 2,
2, 2, 243, 244, 7, 44, 2, 2, 244, 26, 3, 2, 2, 2, 245, 246, 7, 49, 2, 2,
246, 28, 3, 2, 2, 2, 247, 248, 7, 39, 2, 2, 248, 30, 3, 2, 2, 2, 249, 250,
7, 65, 2, 2, 250, 32, 3, 2, 2, 2, 251, 252, 7, 60, 2, 2, 252, 34, 3, 2,
2, 2, 253, 254, 7, 60, 2, 2, 254, 255, 7, 60, 2, 2, 255, 36, 3, 2, 2, 2,
256, 257, 7, 48, 2, 2, 257, 258, 7, 48, 2, 2, 258, 38, 3, 2, 2, 2, 259,
260, 7, 46, 2, 2, 260, 40, 3, 2, 2, 2, 261, 262, 7, 42, 2, 2, 262, 42,
3, 2, 2, 2, 263, 264, 7, 43, 2, 2, 264, 44, 3, 2, 2, 2, 265, 266, 7, 125,
2, 2, 266, 46, 3, 2, 2, 2, 267, 268, 7, 127, 2, 2, 268, 48, 3, 2, 2, 2,
269, 270, 7, 93, 2, 2, 270, 50, 3, 2, 2, 2, 271, 272, 7, 95, 2, 2, 272,
52, 3, 2, 2, 2, 273, 274, 5, 161, 81, 2, 274, 275, 5, 173, 87, 2, 275,
276, 5, 173, 87, 2, 276, 277, 5, 195, 98, 2, 277, 278, 5, 169, 85, 2, 278,
279, 5, 173, 87, 2, 279, 280, 5, 161, 81, 2, 280, 281, 5, 199, 100, 2,
281, 282, 5, 169, 85, 2, 282, 54, 3, 2, 2, 2, 283, 284, 5, 161, 81, 2,
284, 285, 5, 183, 92, 2, 285, 286, 5, 183, 92, 2, 286, 56, 3, 2, 2, 2,
287, 288, 5, 161, 81, 2, 288, 289, 5, 187, 94, 2, 289, 290, 5, 167, 84,
2, 290, 294, 3, 2, 2, 2, 291, 292, 7, 40, 2, 2, 292, 294, 7, 40, 2, 2,
293, 287, 3, 2, 2, 2, 293, 291, 3, 2, 2, 2, 294, 58, 3, 2, 2, 2, 295, 296,
5, 161, 81, 2, 296, 297, 5, 187, 94, 2, 297, 298, 5, 209, 105, 2, 298,
60, 3, 2, 2, 2, 299, 300, 5, 161, 81, 2, 300, 301, 5, 197, 99, 2, 301,
302, 5, 165, 83, 2, 302, 62, 3, 2, 2, 2, 303, 304, 5, 165, 83, 2, 304,
305, 5, 189, 95, 2, 305, 306, 5, 183, 92, 2, 306, 307, 5, 183, 92, 2, 307,
308, 5, 169, 85, 2, 308, 309, 5, 165, 83, 2, 309, 310, 5, 199, 100, 2,
310, 64, 3, 2, 2, 2, 311, 312, 5, 167, 84, 2, 312, 313, 5, 169, 85, 2,
313, 314, 5, 197, 99, 2, 314, 315, 5, 165, 83, 2, 315, 66, 3, 2, 2, 2,
316, 317, 5, 167, 84, 2, 317, 318, 5, 177, 89, 2, 318, 319, 5, 197, 99,
2, 319, 320, 5, 199, 100, 2, 320, 321, 5, 177, 89, 2, 321, 322, 5, 187,
94, 2, 322, 323, 5, 165, 83, 2, 323, 324, 5, 199, 100, 2, 324, 68, 3, 2,
2, 2, 325, 326, 5, 171, 86, 2, 326, 327, 5, 161, 81, 2, 327, 328, 5, 183,
92, 2, 328, 329, 5, 197, 99, 2, 329, 330, 5, 169, 85, 2, 330, 70, 3, 2,
2, 2, 331, 332, 5, 171, 86, 2, 332, 333, 5, 177, 89, 2, 333, 334, 5, 183,
92, 2, 334, 335, 5, 199, 100, 2, 335, 336, 5, 169, 85, 2, 336, 337, 5,
195, 98, 2, 337, 72, 3, 2, 2, 2, 338, 339, 5, 171, 86, 2, 339, 340, 5,
189, 95, 2, 340, 341, 5, 195, 98, 2, 341, 74, 3, 2, 2, 2, 342, 343, 5,
173, 87, 2, 343, 344, 5, 195, 98, 2, 344, 345, 5, 161, 81, 2, 345, 346,
5, 191, 96, 2, 346, 347, 5, 175, 88, 2, 347, 76, 3, 2, 2, 2, 348, 349,
5, 177, 89, 2, 349, 350, 5, 187, 94, 2, 350, 78, 3, 2, 2, 2, 351, 352,
5, 177, 89, 2, 352, 353, 5, 187, 94, 2, 353, 354, 5, 163, 82, 2, 354, 355,
5, 189, 95, 2, 355, 356, 5, 201, 101, 2, 356, 357, 5, 187, 94, 2, 357,
358, 5, 167, 84, 2, 358, 80, 3, 2, 2, 2, 359, 360, 5, 177, 89, 2, 360,
361, 5, 187, 94, 2, 361, 362, 5, 197, 99, 2, 362, 363, 5, 169, 85, 2, 363,
364, 5, 195, 98, 2, 364, 365, 5, 199, 100, 2, 365, 82, 3, 2, 2, 2, 366,
367, 5, 177, 89, 2, 367, 368, 5, 187, 94, 2, 368, 369, 5, 199, 100, 2,
369, 370, 5, 189, 95, 2, 370, 84, 3, 2, 2, 2, 371, 372, 5, 181, 91, 2,
372, 373, 7, 97, 2, 2, 373, 374, 5, 197, 99, 2, 374, 375, 5, 175, 88, 2,
375, 376, 5, 189, 95, 2, 376, 377, 5, 195, 98, 2, 377, 378, 5, 199, 100,
2, 378, 379, 5, 169, 85, 2, 379, 380, 5, 197, 99, 2, 380, 381, 5, 199,
100, 2, 381, 382, 7, 97, 2, 2, 382, 383, 5, 191, 96, 2, 383, 384, 5, 161,
81, 2, 384, 385, 5, 199, 100, 2, 385, 386, 5, 175, 88, 2, 386, 387, 5,
197, 99, 2, 387, 86, 3, 2, 2, 2, 388, 389, 5, 183, 92, 2, 389, 390, 5,
169, 85, 2, 390, 391, 5, 199, 100, 2, 391, 88, 3, 2, 2, 2, 392, 393, 5,
183, 92, 2, 393, 394, 5, 177, 89, 2, 394, 395, 5, 181, 91, 2, 395, 396,
5, 169, 85, 2, 396, 90, 3, 2, 2, 2, 397, 398, 5, 183, 92, 2, 398, 399,
5, 177, 89, 2, 399, 400, 5, 185, 93, 2, 400, 401, 5, 177, 89, 2, 401, 402,
5, 199, 100, 2, 402, 92, 3, 2, 2, 2, 403, 404, 5, 187, 94, 2, 404, 405,
5, 189, 95, 2, 405, 406, 5, 187, 94, 2, 406, 407, 5, 169, 85, 2, 407, 94,
3, 2, 2, 2, 408, 409, 5, 187, 94, 2, 409, 410, 5, 189, 95, 2, 410, 411,
5, 199, 100, 2, 411, 414, 3, 2, 2, 2, 412, 414, 7, 35, 2, 2, 413, 408,
3, 2, 2, 2, 413, 412, 3, 2, 2, 2, 414, 96, 3, 2, 2, 2, 415, 416, 5, 187,
94, 2, 416, 417, 5, 201, 101, 2, 417, 418, 5, 183, 92, 2, 418, 419, 5,
183, 92, 2, 419, 98, 3, 2, 2, 2, 420, 421, 5, 189, 95, 2, 421, 422, 5,
195, 98, 2, 422, 426, 3, 2, 2, 2, 423, 424, 7, 126, 2, 2, 424, 426, 7,
126, 2, 2, 425, 420, 3, 2, 2, 2, 425, 423, 3, 2, 2, 2, 426, 100, 3, 2,
2, 2, 427, 428, 5, 189, 95, 2, 428, 429, 5, 201, 101, 2, 429, 430, 5, 199,
100, 2, 430, 431, 5, 163, 82, 2, 431, 432, 5, 189, 95, 2, 432, 433, 5,
201, 101, 2, 433, 434, 5, 187, 94, 2, 434, 435, 5, 167, 84, 2, 435, 102,
3, 2, 2, 2, 436, 437, 5, 195, 98, 2, 437, 438, 5, 169, 85, 2, 438, 439,
5, 185, 93, 2, 439, 440, 5, 189, 95, 2, 440, 441, 5, 203, 102, 2, 441,
442, 5, 169, 85, 2, 442, 104, 3, 2, 2, 2, 443, 444, 5, 195, 98, 2, 444,
445, 5, 169, 85, 2, 445, 446, 5, 191, 96, 2, 446, 447, 5, 183, 92, 2, 447,
448, 5, 161, 81, 2, 448, 449, 5, 165, 83, 2, 449, 450, 5, 169, 85, 2, 450,
106, 3, 2, 2, 2, 451, 452, 5, 195, 98, 2, 452, 453, 5, 169, 85, 2, 453,
454, 5, 199, 100, 2, 454, 455, 5, 201, 101, 2, 455, 456, 5, 195, 98, 2,
456, 457, 5, 187, 94, 2, 457, 108, 3, 2, 2, 2, 458, 459, 5, 197, 99, 2,
459, 460, 5, 175, 88, 2, 460, 461, 5, 189, 95, 2, 461, 462, 5, 195, 98,
2, 462, 463, 5, 199, 100, 2, 463, 464, 5, 169, 85, 2, 464, 465, 5, 197,
99, 2, 465, 466, 5, 199, 100, 2, 466, 467, 7, 97, 2, 2, 467, 468, 5, 191,
96, 2, 468, 469, 5, 161, 81, 2, 469, 470, 5, 199, 100, 2, 470, 471, 5,
175, 88, 2, 471, 110, 3, 2, 2, 2, 472, 473, 5, 197, 99, 2, 473, 474, 5,
189, 95, 2, 474, 475, 5, 195, 98, 2, 475, 476, 5, 199, 100, 2, 476, 112,
3, 2, 2, 2, 477, 478, 5, 199, 100, 2, 478, 479, 5, 195, 98, 2, 479, 480,
5, 201, 101, 2, 480, 481, 5, 169, 85, 2, 481, 114, 3, 2, 2, 2, 482, 483,
5, 201, 101, 2, 483, 484, 5, 191, 96, 2, 484, 485, 5, 167, 84, 2, 485,
486, 5, 161, 81, 2, 486, 487, 5, 199, 100, 2, 487, 488, 5, 169, 85, 2,
488, 116, 3, 2, 2, 2, 489, 490, 5, 201, 101, 2, 490, 491, 5, 191, 96, 2,
491, 492, 5, 197, 99, 2, 492, 493, 5, 169, 85, 2, 493, 494, 5, 195, 98,
2, 494, 495, 5, 199, 100, 2, 495, 118, 3, 2, 2, 2, 496, 497, 5, 205, 103,
2, 497, 498, 5, 177, 89, 2, 498, 499, 5, 199, 100, 2, 499, 500, 5, 175,
88, 2, 500, 120, 3, 2, 2, 2, 501, 502, 5, 181, 91, 2, 502, 503, 5, 169,
85, 2, 503, 504, 5, 169, 85, 2, 504, 505, 5, 191, 96, 2, 505, 122, 3, 2,
2, 2, 506, 507, 5, 165, 83, 2, 507, 508, 5, 189, 95, 2, 508, 509, 5, 201,
101, 2, 509, 510, 5, 187, 94, 2, 510, 511, 5, 199, 100, 2, 511, 124, 3,
2, 2, 2, 512, 513, 5, 189, 95, 2, 513, 514, 5, 191, 96, 2, 514, 515, 5,
199, 100, 2, 515, 516, 5, 177, 89, 2, 516, 517, 5, 189, 95, 2, 517, 518,
5, 187, 94, 2, 518, 519, 5, 197, 99, 2, 519, 126, 3, 2, 2, 2, 520, 521,
5, 191, 96, 2, 521, 522, 5, 195, 98, 2, 522, 523, 5, 201, 101, 2, 523,
524, 5, 187, 94, 2, 524, 525, 5, 169, 85, 2, 525, 128, 3, 2, 2, 2, 526,
527, 5, 197, 99, 2, 527, 528, 5, 169, 85, 2, 528, 529, 5, 161, 81, 2, 529,
530, 5, 195, 98, 2, 530, 531, 5, 165, 83, 2, 531, 532, 5, 175, 88, 2, 532,
130, 3, 2, 2, 2, 533, 534, 5, 199, 100, 2, 534, 535, 5, 189, 95, 2, 535,
132, 3, 2, 2, 2, 536, 537, 5, 165, 83, 2, 537, 538, 5, 201, 101, 2, 538,
539, 5, 195, 98, 2, 539, 540, 5, 195, 98, 2, 540, 541, 5, 169, 85, 2, 541,
542, 5, 187, 94, 2, 542, 543, 5, 199, 100, 2, 543, 134, 3, 2, 2, 2, 544,
545, 5, 187, 94, 2, 545, 546, 5, 169, 85, 2, 546, 547, 5, 205, 103, 2,
547, 136, 3, 2, 2, 2, 548, 549, 5, 189, 95, 2, 549, 550, 5, 183, 92, 2,
550, 551, 5, 167, 84, 2, 551, 138, 3, 2, 2, 2, 552, 556, 9, 2, 2, 2, 553,
555, 9, 3, 2, 2, 554, 553, 3, 2, 2, 2, 555, 558, 3, 2, 2, 2, 556, 554,
3, 2, 2, 2, 556, 557, 3, 2, 2, 2, 557, 140, 3, 2, 2, 2, 558, 556, 3, 2,
2, 2, 559, 563, 9, 4, 2, 2, 560, 562, 5, 159, 80, 2, 561, 560, 3, 2, 2,
2, 562, 565, 3, 2, 2, 2, 563, 561, 3, 2, 2, 2, 563, 564, 3, 2, 2, 2, 564,
584, 3, 2, 2, 2, 565, 563, 3, 2, 2, 2, 566, 584, 7, 50, 2, 2, 567, 568,
7, 50, 2, 2, 568, 569, 7, 122, 2, 2, 569, 571, 3, 2, 2, 2, 570, 572, 5,
157, 79, 2, 571, 570, 3, 2, 2, 2, 572, 573, 3, 2, 2, 2, 573, 571, 3, 2,
2, 2, 573, 574, 3, 2, 2, 2, 574, 584, 3, 2, 2, 2, 575, 576, 7, 50, 2, 2,
576, 577, 7, 100, 2, 2, 577, 579, 3, 2, 2, 2, 578, 580, 9, 5, 2, 2, 579,
578, 3, 2, 2, 2, 580, 581, 3, 2, 2, 2, 581, 579, 3, 2, 2, 2, 581, 582,
3, 2, 2, 2, 582, 584, 3, 2, 2, 2, 583, 559, 3, 2, 2, 2, 583, 566, 3, 2,
2, 2, 583, 567, 3, 2, 2, 2, 583, 575, 3, 2, 2, 2, 584, 142, 3, 2, 2, 2,
585, 589, 9, 4, 2, 2, 586, 588, 5, 159, 80, 2, 587, 586, 3, 2, 2, 2, 588,
591, 3, 2, 2, 2, 589, 587, 3, 2, 2, 2, 589, 590, 3, 2, 2, 2, 590, 594,
3, 2, 2, 2, 591, 589, 3, 2, 2, 2, 592, 594, 7, 50, 2, 2, 593, 585, 3, 2,
2, 2, 593, 592, 3, 2, 2, 2, 593, 594, 3, 2, 2, 2, 594, 595, 3, 2, 2, 2,
595, 597, 7, 48, 2, 2, 596, 598, 5, 159, 80, 2, 597, 596, 3, 2, 2, 2, 598,
599, 3, 2, 2, 2, 599, 597, 3, 2, 2, 2, 599, 600, 3, 2, 2, 2, 600, 610,
3, 2, 2, 2, 601, 603, 5, 169, 85, 2, 602, 604, 9, 6, 2, 2, 603, 602, 3,
2, 2, 2, 603, 604, 3, 2, 2, 2, 604, 606, 3, 2, 2, 2, 605, 607, 5, 159,
80, 2, 606, 605, 3, 2, 2, 2, 607, 608, 3, 2, 2, 2, 608, 606, 3, 2, 2, 2,
608, 609, 3, 2, 2, 2, 609, 611, 3, 2, 2, 2, 610, 601, 3, 2, 2, 2, 610,
611, 3, 2, 2, 2, 611, 144, 3, 2, 2, 2, 612, 613, 7, 66, 2, 2, 613, 614,
5, 139, 70, 2, 614, 146, 3, 2, 2, 2, 615, 623, 7, 41, 2, 2, 616, 617, 7,
94, 2, 2, 617, 622, 11, 2, 2, 2, 618, 619, 7, 41, 2, 2, 619, 622, 7, 41,
2, 2, 620, 622, 10, 7, 2, 2, 621, 616, 3, 2, 2, 2, 621, 618, 3, 2, 2, 2,
621, 620, 3, 2, 2, 2, 622, 625, 3, 2, 2, 2, 623, 621, 3, 2, 2, 2, 623,
624, 3, 2, 2, 2, 624, 626, 3, 2, 2, 2, 625, 623, 3, 2, 2, 2, 626, 640,
7, 41, 2, 2, 627, 635, 7, 36, 2, 2, 628, 629, 7, 94, 2, 2, 629, 634, 11,
2, 2, 2, 630, 631, 7, 36, 2, 2, 631, 634, 7, 36, 2, 2, 632, 634, 10, 8,
2, 2, 633, 628, 3, 2, 2, 2, 633, 630, 3, 2, 2, 2, 633, 632, 3, 2, 2, 2,
634, 637, 3, 2, 2, 2, 635, 633, 3, 2, 2, 2, 635, 636, 3, 2, 2, 2, 636,
638, 3, 2, 2, 2, 637, 635, 3, 2, 2, 2, 638, 640, 7, 36, 2, 2, 639, 615,
3, 2, 2, 2, 639, 627, 3, 2, 2, 2, 640, 148, 3, 2, 2, 2, 641, 642, 7, 49,
2, 2, 642, 643, 7, 49, 2, 2, 643, 647, 3, 2, 2, 2, 644, 646, 10, 9, 2,
2, 645, 644, 3, 2, 2, 2, 646, 649, 3, 2, 2, 2, 647, 645, 3, 2, 2, 2, 647,
648, 3, 2, 2, 2, 648, 655, 3, 2, 2, 2, 649, 647, 3, 2, 2, 2, 650, 652,
7, 15, 2, 2, 651, 650, 3, 2, 2, 2, 651, 652, 3, 2, 2, 2, 652, 653, 3, 2,
2, 2, 653, 656, 7, 12, 2, 2, 654, 656, 7, 2, 2, 3, 655, 651, 3, 2, 2, 2,
655, 654, 3, 2, 2, 2, 656, 657, 3, 2, 2, 2, 657, 658, 8, 75, 2, 2, 658,
150, 3, 2, 2, 2, 659, 660, 7, 49, 2, 2, 660, 661, 7, 44, 2, 2, 661, 665,
3, 2, 2, 2, 662, 664, 11, 2, 2, 2, 663, 662, 3, 2, 2, 2, 664, 667, 3, 2,
2, 2, 665, 666, 3, 2, 2, 2, 665, 663, 3, 2, 2, 2, 666, 668, 3, 2, 2, 2,
667, 665, 3, 2, 2, 2, 668, 669, 7, 44, 2, 2, 669, 670, 7, 49, 2, 2, 670,
671, 3, 2, 2, 2, 671, 672, 8, 76, 2, 2, 672, 152, 3, 2, 2, 2, 673, 674,
9, 10, 2, 2, 674, 675, 3, 2, 2, 2, 675, 676, 8, 77, 2, 2, 676, 154, 3,
2, 2, 2, 677, 678, 11, 2, 2, 2, 678, 156, 3, 2, 2, 2, 679, 680, 9, 11,
2, 2, 680, 158, 3, 2, 2, 2, 681, 682, 9, 12, 2, 2, 682, 160, 3, 2, 2, 2,
683, 684, 9, 13, 2, 2, 684, 162, 3, 2, 2, 2, 685, 686, 9, 14, 2, 2, 686,
164, 3, 2, 2, 2, 687, 688, 9, 15, 2, 2, 688, 166, 3, 2, 2, 2, 689, 690,
9, 16, 2, 2, 690, 168, 3, 2, 2, 2, 691, 692, 9, 17, 2, 2, 692, 170, 3,
2, 2, 2, 693, 694, 9, 18, 2, 2, 694, 172, 3, 2, 2, 2, 695, 696, 9, 19,
2, 2, 696, 174, 3, 2, 2, 2, 697, 698, 9, 20, 2, 2, 698, 176, 3, 2, 2, 2,
699, 700, 9, 21, 2, 2, 700, 178, 3, 2, 2, 2, 701, 702, 9, 22, 2, 2, 702,
180, 3, 2, 2, 2, 703, 704, 9, 23, 2, 2, 704, 182, 3, 2, 2, 2, 705, 706,
9, 24, 2, 2, 706, 184, 3, 2, 2, 2, 707, 708, 9, 25, 2, 2, 708, 186, 3,
2, 2, 2, 709, 710, 9, 26, 2, 2, 710, 188, 3, 2, 2, 2, 711, 712, 9, 27,
2, 2, 712, 190, 3, 2, 2, 2, 713, 714, 9, 28, 2, 2, 714, 192, 3, 2, 2, 2,
715, 716, 9, 29, 2, 2, 716, 194, 3, 2, 2, 2, 717, 718, 9, 30, 2, 2, 718,
196, 3, 2, 2, 2, 719, 720, 9, 31, 2, 2, 720, 198, 3, 2, 2, 2, 721, 722,
9, 32, 2, 2, 722, 200, 3, 2, 2, 2, 723, 724, 9, 33, 2, 2, 724, 202, 3,
2, 2, 2, 725, 726, 9, 34, 2, 2, 726, 204, 3, 2, 2, 2, 727, 728, 9, 35,
2, 2, 728, 206, 3, 2, 2, 2, 729, 730, 9, 36, 2, 2, 730, 208, 3, 2, 2, 2,
731, 732, 9, 37, 2, 2, 732, 210, 3, 2, 2, 2, 733, 734, 9, 38, 2, 2, 734,
212, 3, 2, 2, 2, 735, 736, 11, 2, 2, 2, 736, 737, 3, 2, 2, 2, 737, 738,
8, 107, 3, 2, 738, 214, 3, 2, 2, 2, 26, 2, 293, 413, 425, 556, 563, 573,
581, 583, 589, 593, 599, 603, 608, 610, 621, 623, 633, 635, 639, 647, 651,
655, 665, 4, 2, 3, 2, 2, 4, 2,
}
var lexerChannelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN", "ERRORCHANNEL",
}
var lexerModeNames = []string{
"DEFAULT_MODE",
}
var lexerLiteralNames = []string{
"", "'.'", "'=~'", "'!~'", "'=='", "'!='", "'<'", "'>'", "'<='", "'>='",
"'+'", "'-'", "'*'", "'/'", "'%'", "'?'", "':'", "'::'", "'..'", "','",
"'('", "')'", "'{'", "'}'", "'['", "']'",
}
var lexerSymbolicNames = []string{
"", "DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT",
"T_GT", "T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD",
"T_QUESTION", "T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN", "T_CLOSE",
"T_OBJECT_OPEN", "T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE", "T_AGGREGATE",
"T_ALL", "T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC", "T_DISTINCT",
"T_FALSE", "T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND", "T_INSERT",
"T_INTO", "T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT", "T_NONE",
"T_NOT", "T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE", "T_RETURN",
"T_SHORTEST_PATH", "T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT", "T_WITH",
"T_KEEP", "T_COUNT", "T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO", "T_CURRENT",
"T_NEW", "T_OLD", "T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER", "T_QUOTED_STRING",
"SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR",
"ERROR_RECONGNIGION",
}
var lexerRuleNames = []string{
"DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT", "T_GT",
"T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD", "T_QUESTION",
"T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN", "T_CLOSE", "T_OBJECT_OPEN",
"T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE", "T_AGGREGATE", "T_ALL",
"T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC", "T_DISTINCT", "T_FALSE",
"T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND", "T_INSERT", "T_INTO",
"T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT", "T_NONE", "T_NOT",
"T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE", "T_RETURN", "T_SHORTEST_PATH",
"T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT", "T_WITH", "T_KEEP", "T_COUNT",
"T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO", "T_CURRENT", "T_NEW", "T_OLD",
"T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER", "T_QUOTED_STRING", "SINGLE_LINE_COMMENT",
"MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR", "HEX_DIGIT", "DIGIT",
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O",
"P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "ERROR_RECONGNIGION",
}
type CAQLLexer struct {
*antlr.BaseLexer
channelNames []string
@@ -403,28 +22,427 @@ type CAQLLexer struct {
// TODO: EOF string
}
// NewCAQLLexer produces a new lexer instance for the optional input antlr.CharStream.
//
// The *CAQLLexer instance produced may be reused by calling the SetInputStream method.
// The initial lexer configuration is expensive to construct, and the object is not thread-safe;
// however, if used within a Golang sync.Pool, the construction cost amortizes well and the
// objects can be used in a thread-safe manner.
func NewCAQLLexer(input antlr.CharStream) *CAQLLexer {
l := new(CAQLLexer)
lexerDeserializer := antlr.NewATNDeserializer(nil)
lexerAtn := lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
lexerDecisionToDFA := make([]*antlr.DFA, len(lexerAtn.DecisionToState))
for index, ds := range lexerAtn.DecisionToState {
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
}
l.BaseLexer = antlr.NewBaseLexer(input)
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
var caqllexerLexerStaticData struct {
once sync.Once
serializedATN []int32
channelNames []string
modeNames []string
literalNames []string
symbolicNames []string
ruleNames []string
predictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
l.channelNames = lexerChannelNames
l.modeNames = lexerModeNames
l.RuleNames = lexerRuleNames
l.LiteralNames = lexerLiteralNames
l.SymbolicNames = lexerSymbolicNames
func caqllexerLexerInit() {
staticData := &caqllexerLexerStaticData
staticData.channelNames = []string{
"DEFAULT_TOKEN_CHANNEL", "HIDDEN", "ERRORCHANNEL",
}
staticData.modeNames = []string{
"DEFAULT_MODE",
}
staticData.literalNames = []string{
"", "'.'", "'=~'", "'!~'", "'=='", "'!='", "'<'", "'>'", "'<='", "'>='",
"'+'", "'-'", "'*'", "'/'", "'%'", "'?'", "':'", "'::'", "'..'", "','",
"'('", "')'", "'{'", "'}'", "'['", "']'",
}
staticData.symbolicNames = []string{
"", "DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT",
"T_GT", "T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD",
"T_QUESTION", "T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN",
"T_CLOSE", "T_OBJECT_OPEN", "T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE",
"T_AGGREGATE", "T_ALL", "T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC",
"T_DISTINCT", "T_FALSE", "T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND",
"T_INSERT", "T_INTO", "T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT",
"T_NONE", "T_NOT", "T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE",
"T_RETURN", "T_SHORTEST_PATH", "T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT",
"T_WITH", "T_KEEP", "T_COUNT", "T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO",
"T_CURRENT", "T_NEW", "T_OLD", "T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER",
"T_QUOTED_STRING", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES",
"UNEXPECTED_CHAR", "ERROR_RECONGNIGION",
}
staticData.ruleNames = []string{
"DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT",
"T_GT", "T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD",
"T_QUESTION", "T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN",
"T_CLOSE", "T_OBJECT_OPEN", "T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE",
"T_AGGREGATE", "T_ALL", "T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC",
"T_DISTINCT", "T_FALSE", "T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND",
"T_INSERT", "T_INTO", "T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT",
"T_NONE", "T_NOT", "T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE",
"T_RETURN", "T_SHORTEST_PATH", "T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT",
"T_WITH", "T_KEEP", "T_COUNT", "T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO",
"T_CURRENT", "T_NEW", "T_OLD", "T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER",
"T_QUOTED_STRING", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES",
"UNEXPECTED_CHAR", "HEX_DIGIT", "DIGIT", "A", "B", "C", "D", "E", "F",
"G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T",
"U", "V", "W", "X", "Y", "Z", "ERROR_RECONGNIGION",
}
staticData.predictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 0, 78, 737, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2,
4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2,
10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15,
7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7,
20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25,
2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2,
31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36,
7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7,
41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46,
2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2,
52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57,
7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7,
62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67,
2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2,
73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78,
7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7,
83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88,
2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2,
94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99,
7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103,
2, 104, 7, 104, 2, 105, 7, 105, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1,
2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1,
7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11,
1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1,
16, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21,
1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1,
25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26,
1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 3, 27, 292, 8, 27, 1, 28, 1,
28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 30,
1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 1, 31, 1, 32, 1,
32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33,
1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1,
35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 36, 1, 37,
1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1,
39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40,
1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1,
41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42,
1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1,
44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46,
1, 46, 3, 46, 412, 8, 46, 1, 47, 1, 47, 1, 47, 1, 47, 1, 47, 1, 48, 1,
48, 1, 48, 1, 48, 1, 48, 3, 48, 424, 8, 48, 1, 49, 1, 49, 1, 49, 1, 49,
1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1,
50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52,
1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 53, 1,
53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 53, 1, 54,
1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1,
56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57,
1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1,
59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61,
1, 61, 1, 61, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1,
62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64,
1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1,
66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 5, 68, 553, 8, 68,
10, 68, 12, 68, 556, 9, 68, 1, 69, 1, 69, 5, 69, 560, 8, 69, 10, 69, 12,
69, 563, 9, 69, 1, 69, 1, 69, 1, 69, 1, 69, 1, 69, 4, 69, 570, 8, 69, 11,
69, 12, 69, 571, 1, 69, 1, 69, 1, 69, 1, 69, 4, 69, 578, 8, 69, 11, 69,
12, 69, 579, 3, 69, 582, 8, 69, 1, 70, 1, 70, 5, 70, 586, 8, 70, 10, 70,
12, 70, 589, 9, 70, 1, 70, 3, 70, 592, 8, 70, 1, 70, 1, 70, 4, 70, 596,
8, 70, 11, 70, 12, 70, 597, 1, 70, 1, 70, 3, 70, 602, 8, 70, 1, 70, 4,
70, 605, 8, 70, 11, 70, 12, 70, 606, 3, 70, 609, 8, 70, 1, 71, 1, 71, 1,
71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 5, 72, 620, 8, 72, 10, 72,
12, 72, 623, 9, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 5,
72, 632, 8, 72, 10, 72, 12, 72, 635, 9, 72, 1, 72, 3, 72, 638, 8, 72, 1,
73, 1, 73, 1, 73, 1, 73, 5, 73, 644, 8, 73, 10, 73, 12, 73, 647, 9, 73,
1, 73, 3, 73, 650, 8, 73, 1, 73, 1, 73, 3, 73, 654, 8, 73, 1, 73, 1, 73,
1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 662, 8, 74, 10, 74, 12, 74, 665, 9,
74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76,
1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 79, 1, 79, 1, 80, 1, 80, 1, 81, 1,
81, 1, 82, 1, 82, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 86, 1, 86,
1, 87, 1, 87, 1, 88, 1, 88, 1, 89, 1, 89, 1, 90, 1, 90, 1, 91, 1, 91, 1,
92, 1, 92, 1, 93, 1, 93, 1, 94, 1, 94, 1, 95, 1, 95, 1, 96, 1, 96, 1, 97,
1, 97, 1, 98, 1, 98, 1, 99, 1, 99, 1, 100, 1, 100, 1, 101, 1, 101, 1, 102,
1, 102, 1, 103, 1, 103, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105,
1, 663, 0, 106, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17,
9, 19, 10, 21, 11, 23, 12, 25, 13, 27, 14, 29, 15, 31, 16, 33, 17, 35,
18, 37, 19, 39, 20, 41, 21, 43, 22, 45, 23, 47, 24, 49, 25, 51, 26, 53,
27, 55, 28, 57, 29, 59, 30, 61, 31, 63, 32, 65, 33, 67, 34, 69, 35, 71,
36, 73, 37, 75, 38, 77, 39, 79, 40, 81, 41, 83, 42, 85, 43, 87, 44, 89,
45, 91, 46, 93, 47, 95, 48, 97, 49, 99, 50, 101, 51, 103, 52, 105, 53,
107, 54, 109, 55, 111, 56, 113, 57, 115, 58, 117, 59, 119, 60, 121, 61,
123, 62, 125, 63, 127, 64, 129, 65, 131, 66, 133, 67, 135, 68, 137, 69,
139, 70, 141, 71, 143, 72, 145, 73, 147, 74, 149, 75, 151, 76, 153, 77,
155, 0, 157, 0, 159, 0, 161, 0, 163, 0, 165, 0, 167, 0, 169, 0, 171, 0,
173, 0, 175, 0, 177, 0, 179, 0, 181, 0, 183, 0, 185, 0, 187, 0, 189, 0,
191, 0, 193, 0, 195, 0, 197, 0, 199, 0, 201, 0, 203, 0, 205, 0, 207, 0,
209, 0, 211, 78, 1, 0, 37, 3, 0, 65, 90, 95, 95, 97, 122, 4, 0, 48, 57,
65, 90, 95, 95, 97, 122, 1, 0, 49, 57, 1, 0, 48, 49, 2, 0, 43, 43, 45,
45, 2, 0, 39, 39, 92, 92, 2, 0, 34, 34, 92, 92, 2, 0, 10, 10, 13, 13, 3,
0, 9, 11, 13, 13, 32, 32, 3, 0, 48, 57, 65, 70, 97, 102, 1, 0, 48, 57,
2, 0, 65, 65, 97, 97, 2, 0, 66, 66, 98, 98, 2, 0, 67, 67, 99, 99, 2, 0,
68, 68, 100, 100, 2, 0, 69, 69, 101, 101, 2, 0, 70, 70, 102, 102, 2, 0,
71, 71, 103, 103, 2, 0, 72, 72, 104, 104, 2, 0, 73, 73, 105, 105, 2, 0,
74, 74, 106, 106, 2, 0, 75, 75, 107, 107, 2, 0, 76, 76, 108, 108, 2, 0,
77, 77, 109, 109, 2, 0, 78, 78, 110, 110, 2, 0, 79, 79, 111, 111, 2, 0,
80, 80, 112, 112, 2, 0, 81, 81, 113, 113, 2, 0, 82, 82, 114, 114, 2, 0,
83, 83, 115, 115, 2, 0, 84, 84, 116, 116, 2, 0, 85, 85, 117, 117, 2, 0,
86, 86, 118, 118, 2, 0, 87, 87, 119, 119, 2, 0, 88, 88, 120, 120, 2, 0,
89, 89, 121, 121, 2, 0, 90, 90, 122, 122, 736, 0, 1, 1, 0, 0, 0, 0, 3,
1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11,
1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0,
19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0,
0, 27, 1, 0, 0, 0, 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0,
0, 0, 35, 1, 0, 0, 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0,
0, 0, 0, 43, 1, 0, 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1,
0, 0, 0, 0, 51, 1, 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 57,
1, 0, 0, 0, 0, 59, 1, 0, 0, 0, 0, 61, 1, 0, 0, 0, 0, 63, 1, 0, 0, 0, 0,
65, 1, 0, 0, 0, 0, 67, 1, 0, 0, 0, 0, 69, 1, 0, 0, 0, 0, 71, 1, 0, 0, 0,
0, 73, 1, 0, 0, 0, 0, 75, 1, 0, 0, 0, 0, 77, 1, 0, 0, 0, 0, 79, 1, 0, 0,
0, 0, 81, 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0,
0, 0, 0, 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 0, 95, 1,
0, 0, 0, 0, 97, 1, 0, 0, 0, 0, 99, 1, 0, 0, 0, 0, 101, 1, 0, 0, 0, 0, 103,
1, 0, 0, 0, 0, 105, 1, 0, 0, 0, 0, 107, 1, 0, 0, 0, 0, 109, 1, 0, 0, 0,
0, 111, 1, 0, 0, 0, 0, 113, 1, 0, 0, 0, 0, 115, 1, 0, 0, 0, 0, 117, 1,
0, 0, 0, 0, 119, 1, 0, 0, 0, 0, 121, 1, 0, 0, 0, 0, 123, 1, 0, 0, 0, 0,
125, 1, 0, 0, 0, 0, 127, 1, 0, 0, 0, 0, 129, 1, 0, 0, 0, 0, 131, 1, 0,
0, 0, 0, 133, 1, 0, 0, 0, 0, 135, 1, 0, 0, 0, 0, 137, 1, 0, 0, 0, 0, 139,
1, 0, 0, 0, 0, 141, 1, 0, 0, 0, 0, 143, 1, 0, 0, 0, 0, 145, 1, 0, 0, 0,
0, 147, 1, 0, 0, 0, 0, 149, 1, 0, 0, 0, 0, 151, 1, 0, 0, 0, 0, 153, 1,
0, 0, 0, 0, 211, 1, 0, 0, 0, 1, 213, 1, 0, 0, 0, 3, 215, 1, 0, 0, 0, 5,
218, 1, 0, 0, 0, 7, 221, 1, 0, 0, 0, 9, 224, 1, 0, 0, 0, 11, 227, 1, 0,
0, 0, 13, 229, 1, 0, 0, 0, 15, 231, 1, 0, 0, 0, 17, 234, 1, 0, 0, 0, 19,
237, 1, 0, 0, 0, 21, 239, 1, 0, 0, 0, 23, 241, 1, 0, 0, 0, 25, 243, 1,
0, 0, 0, 27, 245, 1, 0, 0, 0, 29, 247, 1, 0, 0, 0, 31, 249, 1, 0, 0, 0,
33, 251, 1, 0, 0, 0, 35, 254, 1, 0, 0, 0, 37, 257, 1, 0, 0, 0, 39, 259,
1, 0, 0, 0, 41, 261, 1, 0, 0, 0, 43, 263, 1, 0, 0, 0, 45, 265, 1, 0, 0,
0, 47, 267, 1, 0, 0, 0, 49, 269, 1, 0, 0, 0, 51, 271, 1, 0, 0, 0, 53, 281,
1, 0, 0, 0, 55, 291, 1, 0, 0, 0, 57, 293, 1, 0, 0, 0, 59, 297, 1, 0, 0,
0, 61, 301, 1, 0, 0, 0, 63, 309, 1, 0, 0, 0, 65, 314, 1, 0, 0, 0, 67, 323,
1, 0, 0, 0, 69, 329, 1, 0, 0, 0, 71, 336, 1, 0, 0, 0, 73, 340, 1, 0, 0,
0, 75, 346, 1, 0, 0, 0, 77, 349, 1, 0, 0, 0, 79, 357, 1, 0, 0, 0, 81, 364,
1, 0, 0, 0, 83, 369, 1, 0, 0, 0, 85, 386, 1, 0, 0, 0, 87, 390, 1, 0, 0,
0, 89, 395, 1, 0, 0, 0, 91, 401, 1, 0, 0, 0, 93, 411, 1, 0, 0, 0, 95, 413,
1, 0, 0, 0, 97, 423, 1, 0, 0, 0, 99, 425, 1, 0, 0, 0, 101, 434, 1, 0, 0,
0, 103, 441, 1, 0, 0, 0, 105, 449, 1, 0, 0, 0, 107, 456, 1, 0, 0, 0, 109,
470, 1, 0, 0, 0, 111, 475, 1, 0, 0, 0, 113, 480, 1, 0, 0, 0, 115, 487,
1, 0, 0, 0, 117, 494, 1, 0, 0, 0, 119, 499, 1, 0, 0, 0, 121, 504, 1, 0,
0, 0, 123, 510, 1, 0, 0, 0, 125, 518, 1, 0, 0, 0, 127, 524, 1, 0, 0, 0,
129, 531, 1, 0, 0, 0, 131, 534, 1, 0, 0, 0, 133, 542, 1, 0, 0, 0, 135,
546, 1, 0, 0, 0, 137, 550, 1, 0, 0, 0, 139, 581, 1, 0, 0, 0, 141, 591,
1, 0, 0, 0, 143, 610, 1, 0, 0, 0, 145, 637, 1, 0, 0, 0, 147, 639, 1, 0,
0, 0, 149, 657, 1, 0, 0, 0, 151, 671, 1, 0, 0, 0, 153, 675, 1, 0, 0, 0,
155, 677, 1, 0, 0, 0, 157, 679, 1, 0, 0, 0, 159, 681, 1, 0, 0, 0, 161,
683, 1, 0, 0, 0, 163, 685, 1, 0, 0, 0, 165, 687, 1, 0, 0, 0, 167, 689,
1, 0, 0, 0, 169, 691, 1, 0, 0, 0, 171, 693, 1, 0, 0, 0, 173, 695, 1, 0,
0, 0, 175, 697, 1, 0, 0, 0, 177, 699, 1, 0, 0, 0, 179, 701, 1, 0, 0, 0,
181, 703, 1, 0, 0, 0, 183, 705, 1, 0, 0, 0, 185, 707, 1, 0, 0, 0, 187,
709, 1, 0, 0, 0, 189, 711, 1, 0, 0, 0, 191, 713, 1, 0, 0, 0, 193, 715,
1, 0, 0, 0, 195, 717, 1, 0, 0, 0, 197, 719, 1, 0, 0, 0, 199, 721, 1, 0,
0, 0, 201, 723, 1, 0, 0, 0, 203, 725, 1, 0, 0, 0, 205, 727, 1, 0, 0, 0,
207, 729, 1, 0, 0, 0, 209, 731, 1, 0, 0, 0, 211, 733, 1, 0, 0, 0, 213,
214, 5, 46, 0, 0, 214, 2, 1, 0, 0, 0, 215, 216, 5, 61, 0, 0, 216, 217,
5, 126, 0, 0, 217, 4, 1, 0, 0, 0, 218, 219, 5, 33, 0, 0, 219, 220, 5, 126,
0, 0, 220, 6, 1, 0, 0, 0, 221, 222, 5, 61, 0, 0, 222, 223, 5, 61, 0, 0,
223, 8, 1, 0, 0, 0, 224, 225, 5, 33, 0, 0, 225, 226, 5, 61, 0, 0, 226,
10, 1, 0, 0, 0, 227, 228, 5, 60, 0, 0, 228, 12, 1, 0, 0, 0, 229, 230, 5,
62, 0, 0, 230, 14, 1, 0, 0, 0, 231, 232, 5, 60, 0, 0, 232, 233, 5, 61,
0, 0, 233, 16, 1, 0, 0, 0, 234, 235, 5, 62, 0, 0, 235, 236, 5, 61, 0, 0,
236, 18, 1, 0, 0, 0, 237, 238, 5, 43, 0, 0, 238, 20, 1, 0, 0, 0, 239, 240,
5, 45, 0, 0, 240, 22, 1, 0, 0, 0, 241, 242, 5, 42, 0, 0, 242, 24, 1, 0,
0, 0, 243, 244, 5, 47, 0, 0, 244, 26, 1, 0, 0, 0, 245, 246, 5, 37, 0, 0,
246, 28, 1, 0, 0, 0, 247, 248, 5, 63, 0, 0, 248, 30, 1, 0, 0, 0, 249, 250,
5, 58, 0, 0, 250, 32, 1, 0, 0, 0, 251, 252, 5, 58, 0, 0, 252, 253, 5, 58,
0, 0, 253, 34, 1, 0, 0, 0, 254, 255, 5, 46, 0, 0, 255, 256, 5, 46, 0, 0,
256, 36, 1, 0, 0, 0, 257, 258, 5, 44, 0, 0, 258, 38, 1, 0, 0, 0, 259, 260,
5, 40, 0, 0, 260, 40, 1, 0, 0, 0, 261, 262, 5, 41, 0, 0, 262, 42, 1, 0,
0, 0, 263, 264, 5, 123, 0, 0, 264, 44, 1, 0, 0, 0, 265, 266, 5, 125, 0,
0, 266, 46, 1, 0, 0, 0, 267, 268, 5, 91, 0, 0, 268, 48, 1, 0, 0, 0, 269,
270, 5, 93, 0, 0, 270, 50, 1, 0, 0, 0, 271, 272, 3, 159, 79, 0, 272, 273,
3, 171, 85, 0, 273, 274, 3, 171, 85, 0, 274, 275, 3, 193, 96, 0, 275, 276,
3, 167, 83, 0, 276, 277, 3, 171, 85, 0, 277, 278, 3, 159, 79, 0, 278, 279,
3, 197, 98, 0, 279, 280, 3, 167, 83, 0, 280, 52, 1, 0, 0, 0, 281, 282,
3, 159, 79, 0, 282, 283, 3, 181, 90, 0, 283, 284, 3, 181, 90, 0, 284, 54,
1, 0, 0, 0, 285, 286, 3, 159, 79, 0, 286, 287, 3, 185, 92, 0, 287, 288,
3, 165, 82, 0, 288, 292, 1, 0, 0, 0, 289, 290, 5, 38, 0, 0, 290, 292, 5,
38, 0, 0, 291, 285, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 56, 1, 0, 0,
0, 293, 294, 3, 159, 79, 0, 294, 295, 3, 185, 92, 0, 295, 296, 3, 207,
103, 0, 296, 58, 1, 0, 0, 0, 297, 298, 3, 159, 79, 0, 298, 299, 3, 195,
97, 0, 299, 300, 3, 163, 81, 0, 300, 60, 1, 0, 0, 0, 301, 302, 3, 163,
81, 0, 302, 303, 3, 187, 93, 0, 303, 304, 3, 181, 90, 0, 304, 305, 3, 181,
90, 0, 305, 306, 3, 167, 83, 0, 306, 307, 3, 163, 81, 0, 307, 308, 3, 197,
98, 0, 308, 62, 1, 0, 0, 0, 309, 310, 3, 165, 82, 0, 310, 311, 3, 167,
83, 0, 311, 312, 3, 195, 97, 0, 312, 313, 3, 163, 81, 0, 313, 64, 1, 0,
0, 0, 314, 315, 3, 165, 82, 0, 315, 316, 3, 175, 87, 0, 316, 317, 3, 195,
97, 0, 317, 318, 3, 197, 98, 0, 318, 319, 3, 175, 87, 0, 319, 320, 3, 185,
92, 0, 320, 321, 3, 163, 81, 0, 321, 322, 3, 197, 98, 0, 322, 66, 1, 0,
0, 0, 323, 324, 3, 169, 84, 0, 324, 325, 3, 159, 79, 0, 325, 326, 3, 181,
90, 0, 326, 327, 3, 195, 97, 0, 327, 328, 3, 167, 83, 0, 328, 68, 1, 0,
0, 0, 329, 330, 3, 169, 84, 0, 330, 331, 3, 175, 87, 0, 331, 332, 3, 181,
90, 0, 332, 333, 3, 197, 98, 0, 333, 334, 3, 167, 83, 0, 334, 335, 3, 193,
96, 0, 335, 70, 1, 0, 0, 0, 336, 337, 3, 169, 84, 0, 337, 338, 3, 187,
93, 0, 338, 339, 3, 193, 96, 0, 339, 72, 1, 0, 0, 0, 340, 341, 3, 171,
85, 0, 341, 342, 3, 193, 96, 0, 342, 343, 3, 159, 79, 0, 343, 344, 3, 189,
94, 0, 344, 345, 3, 173, 86, 0, 345, 74, 1, 0, 0, 0, 346, 347, 3, 175,
87, 0, 347, 348, 3, 185, 92, 0, 348, 76, 1, 0, 0, 0, 349, 350, 3, 175,
87, 0, 350, 351, 3, 185, 92, 0, 351, 352, 3, 161, 80, 0, 352, 353, 3, 187,
93, 0, 353, 354, 3, 199, 99, 0, 354, 355, 3, 185, 92, 0, 355, 356, 3, 165,
82, 0, 356, 78, 1, 0, 0, 0, 357, 358, 3, 175, 87, 0, 358, 359, 3, 185,
92, 0, 359, 360, 3, 195, 97, 0, 360, 361, 3, 167, 83, 0, 361, 362, 3, 193,
96, 0, 362, 363, 3, 197, 98, 0, 363, 80, 1, 0, 0, 0, 364, 365, 3, 175,
87, 0, 365, 366, 3, 185, 92, 0, 366, 367, 3, 197, 98, 0, 367, 368, 3, 187,
93, 0, 368, 82, 1, 0, 0, 0, 369, 370, 3, 179, 89, 0, 370, 371, 5, 95, 0,
0, 371, 372, 3, 195, 97, 0, 372, 373, 3, 173, 86, 0, 373, 374, 3, 187,
93, 0, 374, 375, 3, 193, 96, 0, 375, 376, 3, 197, 98, 0, 376, 377, 3, 167,
83, 0, 377, 378, 3, 195, 97, 0, 378, 379, 3, 197, 98, 0, 379, 380, 5, 95,
0, 0, 380, 381, 3, 189, 94, 0, 381, 382, 3, 159, 79, 0, 382, 383, 3, 197,
98, 0, 383, 384, 3, 173, 86, 0, 384, 385, 3, 195, 97, 0, 385, 84, 1, 0,
0, 0, 386, 387, 3, 181, 90, 0, 387, 388, 3, 167, 83, 0, 388, 389, 3, 197,
98, 0, 389, 86, 1, 0, 0, 0, 390, 391, 3, 181, 90, 0, 391, 392, 3, 175,
87, 0, 392, 393, 3, 179, 89, 0, 393, 394, 3, 167, 83, 0, 394, 88, 1, 0,
0, 0, 395, 396, 3, 181, 90, 0, 396, 397, 3, 175, 87, 0, 397, 398, 3, 183,
91, 0, 398, 399, 3, 175, 87, 0, 399, 400, 3, 197, 98, 0, 400, 90, 1, 0,
0, 0, 401, 402, 3, 185, 92, 0, 402, 403, 3, 187, 93, 0, 403, 404, 3, 185,
92, 0, 404, 405, 3, 167, 83, 0, 405, 92, 1, 0, 0, 0, 406, 407, 3, 185,
92, 0, 407, 408, 3, 187, 93, 0, 408, 409, 3, 197, 98, 0, 409, 412, 1, 0,
0, 0, 410, 412, 5, 33, 0, 0, 411, 406, 1, 0, 0, 0, 411, 410, 1, 0, 0, 0,
412, 94, 1, 0, 0, 0, 413, 414, 3, 185, 92, 0, 414, 415, 3, 199, 99, 0,
415, 416, 3, 181, 90, 0, 416, 417, 3, 181, 90, 0, 417, 96, 1, 0, 0, 0,
418, 419, 3, 187, 93, 0, 419, 420, 3, 193, 96, 0, 420, 424, 1, 0, 0, 0,
421, 422, 5, 124, 0, 0, 422, 424, 5, 124, 0, 0, 423, 418, 1, 0, 0, 0, 423,
421, 1, 0, 0, 0, 424, 98, 1, 0, 0, 0, 425, 426, 3, 187, 93, 0, 426, 427,
3, 199, 99, 0, 427, 428, 3, 197, 98, 0, 428, 429, 3, 161, 80, 0, 429, 430,
3, 187, 93, 0, 430, 431, 3, 199, 99, 0, 431, 432, 3, 185, 92, 0, 432, 433,
3, 165, 82, 0, 433, 100, 1, 0, 0, 0, 434, 435, 3, 193, 96, 0, 435, 436,
3, 167, 83, 0, 436, 437, 3, 183, 91, 0, 437, 438, 3, 187, 93, 0, 438, 439,
3, 201, 100, 0, 439, 440, 3, 167, 83, 0, 440, 102, 1, 0, 0, 0, 441, 442,
3, 193, 96, 0, 442, 443, 3, 167, 83, 0, 443, 444, 3, 189, 94, 0, 444, 445,
3, 181, 90, 0, 445, 446, 3, 159, 79, 0, 446, 447, 3, 163, 81, 0, 447, 448,
3, 167, 83, 0, 448, 104, 1, 0, 0, 0, 449, 450, 3, 193, 96, 0, 450, 451,
3, 167, 83, 0, 451, 452, 3, 197, 98, 0, 452, 453, 3, 199, 99, 0, 453, 454,
3, 193, 96, 0, 454, 455, 3, 185, 92, 0, 455, 106, 1, 0, 0, 0, 456, 457,
3, 195, 97, 0, 457, 458, 3, 173, 86, 0, 458, 459, 3, 187, 93, 0, 459, 460,
3, 193, 96, 0, 460, 461, 3, 197, 98, 0, 461, 462, 3, 167, 83, 0, 462, 463,
3, 195, 97, 0, 463, 464, 3, 197, 98, 0, 464, 465, 5, 95, 0, 0, 465, 466,
3, 189, 94, 0, 466, 467, 3, 159, 79, 0, 467, 468, 3, 197, 98, 0, 468, 469,
3, 173, 86, 0, 469, 108, 1, 0, 0, 0, 470, 471, 3, 195, 97, 0, 471, 472,
3, 187, 93, 0, 472, 473, 3, 193, 96, 0, 473, 474, 3, 197, 98, 0, 474, 110,
1, 0, 0, 0, 475, 476, 3, 197, 98, 0, 476, 477, 3, 193, 96, 0, 477, 478,
3, 199, 99, 0, 478, 479, 3, 167, 83, 0, 479, 112, 1, 0, 0, 0, 480, 481,
3, 199, 99, 0, 481, 482, 3, 189, 94, 0, 482, 483, 3, 165, 82, 0, 483, 484,
3, 159, 79, 0, 484, 485, 3, 197, 98, 0, 485, 486, 3, 167, 83, 0, 486, 114,
1, 0, 0, 0, 487, 488, 3, 199, 99, 0, 488, 489, 3, 189, 94, 0, 489, 490,
3, 195, 97, 0, 490, 491, 3, 167, 83, 0, 491, 492, 3, 193, 96, 0, 492, 493,
3, 197, 98, 0, 493, 116, 1, 0, 0, 0, 494, 495, 3, 203, 101, 0, 495, 496,
3, 175, 87, 0, 496, 497, 3, 197, 98, 0, 497, 498, 3, 173, 86, 0, 498, 118,
1, 0, 0, 0, 499, 500, 3, 179, 89, 0, 500, 501, 3, 167, 83, 0, 501, 502,
3, 167, 83, 0, 502, 503, 3, 189, 94, 0, 503, 120, 1, 0, 0, 0, 504, 505,
3, 163, 81, 0, 505, 506, 3, 187, 93, 0, 506, 507, 3, 199, 99, 0, 507, 508,
3, 185, 92, 0, 508, 509, 3, 197, 98, 0, 509, 122, 1, 0, 0, 0, 510, 511,
3, 187, 93, 0, 511, 512, 3, 189, 94, 0, 512, 513, 3, 197, 98, 0, 513, 514,
3, 175, 87, 0, 514, 515, 3, 187, 93, 0, 515, 516, 3, 185, 92, 0, 516, 517,
3, 195, 97, 0, 517, 124, 1, 0, 0, 0, 518, 519, 3, 189, 94, 0, 519, 520,
3, 193, 96, 0, 520, 521, 3, 199, 99, 0, 521, 522, 3, 185, 92, 0, 522, 523,
3, 167, 83, 0, 523, 126, 1, 0, 0, 0, 524, 525, 3, 195, 97, 0, 525, 526,
3, 167, 83, 0, 526, 527, 3, 159, 79, 0, 527, 528, 3, 193, 96, 0, 528, 529,
3, 163, 81, 0, 529, 530, 3, 173, 86, 0, 530, 128, 1, 0, 0, 0, 531, 532,
3, 197, 98, 0, 532, 533, 3, 187, 93, 0, 533, 130, 1, 0, 0, 0, 534, 535,
3, 163, 81, 0, 535, 536, 3, 199, 99, 0, 536, 537, 3, 193, 96, 0, 537, 538,
3, 193, 96, 0, 538, 539, 3, 167, 83, 0, 539, 540, 3, 185, 92, 0, 540, 541,
3, 197, 98, 0, 541, 132, 1, 0, 0, 0, 542, 543, 3, 185, 92, 0, 543, 544,
3, 167, 83, 0, 544, 545, 3, 203, 101, 0, 545, 134, 1, 0, 0, 0, 546, 547,
3, 187, 93, 0, 547, 548, 3, 181, 90, 0, 548, 549, 3, 165, 82, 0, 549, 136,
1, 0, 0, 0, 550, 554, 7, 0, 0, 0, 551, 553, 7, 1, 0, 0, 552, 551, 1, 0,
0, 0, 553, 556, 1, 0, 0, 0, 554, 552, 1, 0, 0, 0, 554, 555, 1, 0, 0, 0,
555, 138, 1, 0, 0, 0, 556, 554, 1, 0, 0, 0, 557, 561, 7, 2, 0, 0, 558,
560, 3, 157, 78, 0, 559, 558, 1, 0, 0, 0, 560, 563, 1, 0, 0, 0, 561, 559,
1, 0, 0, 0, 561, 562, 1, 0, 0, 0, 562, 582, 1, 0, 0, 0, 563, 561, 1, 0,
0, 0, 564, 582, 5, 48, 0, 0, 565, 566, 5, 48, 0, 0, 566, 567, 5, 120, 0,
0, 567, 569, 1, 0, 0, 0, 568, 570, 3, 155, 77, 0, 569, 568, 1, 0, 0, 0,
570, 571, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 572,
582, 1, 0, 0, 0, 573, 574, 5, 48, 0, 0, 574, 575, 5, 98, 0, 0, 575, 577,
1, 0, 0, 0, 576, 578, 7, 3, 0, 0, 577, 576, 1, 0, 0, 0, 578, 579, 1, 0,
0, 0, 579, 577, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 582, 1, 0, 0, 0,
581, 557, 1, 0, 0, 0, 581, 564, 1, 0, 0, 0, 581, 565, 1, 0, 0, 0, 581,
573, 1, 0, 0, 0, 582, 140, 1, 0, 0, 0, 583, 587, 7, 2, 0, 0, 584, 586,
3, 157, 78, 0, 585, 584, 1, 0, 0, 0, 586, 589, 1, 0, 0, 0, 587, 585, 1,
0, 0, 0, 587, 588, 1, 0, 0, 0, 588, 592, 1, 0, 0, 0, 589, 587, 1, 0, 0,
0, 590, 592, 5, 48, 0, 0, 591, 583, 1, 0, 0, 0, 591, 590, 1, 0, 0, 0, 591,
592, 1, 0, 0, 0, 592, 593, 1, 0, 0, 0, 593, 595, 5, 46, 0, 0, 594, 596,
3, 157, 78, 0, 595, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 595, 1,
0, 0, 0, 597, 598, 1, 0, 0, 0, 598, 608, 1, 0, 0, 0, 599, 601, 3, 167,
83, 0, 600, 602, 7, 4, 0, 0, 601, 600, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0,
602, 604, 1, 0, 0, 0, 603, 605, 3, 157, 78, 0, 604, 603, 1, 0, 0, 0, 605,
606, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 609,
1, 0, 0, 0, 608, 599, 1, 0, 0, 0, 608, 609, 1, 0, 0, 0, 609, 142, 1, 0,
0, 0, 610, 611, 5, 64, 0, 0, 611, 612, 3, 137, 68, 0, 612, 144, 1, 0, 0,
0, 613, 621, 5, 39, 0, 0, 614, 615, 5, 92, 0, 0, 615, 620, 9, 0, 0, 0,
616, 617, 5, 39, 0, 0, 617, 620, 5, 39, 0, 0, 618, 620, 8, 5, 0, 0, 619,
614, 1, 0, 0, 0, 619, 616, 1, 0, 0, 0, 619, 618, 1, 0, 0, 0, 620, 623,
1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 624, 1, 0,
0, 0, 623, 621, 1, 0, 0, 0, 624, 638, 5, 39, 0, 0, 625, 633, 5, 34, 0,
0, 626, 627, 5, 92, 0, 0, 627, 632, 9, 0, 0, 0, 628, 629, 5, 34, 0, 0,
629, 632, 5, 34, 0, 0, 630, 632, 8, 6, 0, 0, 631, 626, 1, 0, 0, 0, 631,
628, 1, 0, 0, 0, 631, 630, 1, 0, 0, 0, 632, 635, 1, 0, 0, 0, 633, 631,
1, 0, 0, 0, 633, 634, 1, 0, 0, 0, 634, 636, 1, 0, 0, 0, 635, 633, 1, 0,
0, 0, 636, 638, 5, 34, 0, 0, 637, 613, 1, 0, 0, 0, 637, 625, 1, 0, 0, 0,
638, 146, 1, 0, 0, 0, 639, 640, 5, 47, 0, 0, 640, 641, 5, 47, 0, 0, 641,
645, 1, 0, 0, 0, 642, 644, 8, 7, 0, 0, 643, 642, 1, 0, 0, 0, 644, 647,
1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 653, 1, 0,
0, 0, 647, 645, 1, 0, 0, 0, 648, 650, 5, 13, 0, 0, 649, 648, 1, 0, 0, 0,
649, 650, 1, 0, 0, 0, 650, 651, 1, 0, 0, 0, 651, 654, 5, 10, 0, 0, 652,
654, 5, 0, 0, 1, 653, 649, 1, 0, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655,
1, 0, 0, 0, 655, 656, 6, 73, 0, 0, 656, 148, 1, 0, 0, 0, 657, 658, 5, 47,
0, 0, 658, 659, 5, 42, 0, 0, 659, 663, 1, 0, 0, 0, 660, 662, 9, 0, 0, 0,
661, 660, 1, 0, 0, 0, 662, 665, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 663,
661, 1, 0, 0, 0, 664, 666, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 666, 667,
5, 42, 0, 0, 667, 668, 5, 47, 0, 0, 668, 669, 1, 0, 0, 0, 669, 670, 6,
74, 0, 0, 670, 150, 1, 0, 0, 0, 671, 672, 7, 8, 0, 0, 672, 673, 1, 0, 0,
0, 673, 674, 6, 75, 0, 0, 674, 152, 1, 0, 0, 0, 675, 676, 9, 0, 0, 0, 676,
154, 1, 0, 0, 0, 677, 678, 7, 9, 0, 0, 678, 156, 1, 0, 0, 0, 679, 680,
7, 10, 0, 0, 680, 158, 1, 0, 0, 0, 681, 682, 7, 11, 0, 0, 682, 160, 1,
0, 0, 0, 683, 684, 7, 12, 0, 0, 684, 162, 1, 0, 0, 0, 685, 686, 7, 13,
0, 0, 686, 164, 1, 0, 0, 0, 687, 688, 7, 14, 0, 0, 688, 166, 1, 0, 0, 0,
689, 690, 7, 15, 0, 0, 690, 168, 1, 0, 0, 0, 691, 692, 7, 16, 0, 0, 692,
170, 1, 0, 0, 0, 693, 694, 7, 17, 0, 0, 694, 172, 1, 0, 0, 0, 695, 696,
7, 18, 0, 0, 696, 174, 1, 0, 0, 0, 697, 698, 7, 19, 0, 0, 698, 176, 1,
0, 0, 0, 699, 700, 7, 20, 0, 0, 700, 178, 1, 0, 0, 0, 701, 702, 7, 21,
0, 0, 702, 180, 1, 0, 0, 0, 703, 704, 7, 22, 0, 0, 704, 182, 1, 0, 0, 0,
705, 706, 7, 23, 0, 0, 706, 184, 1, 0, 0, 0, 707, 708, 7, 24, 0, 0, 708,
186, 1, 0, 0, 0, 709, 710, 7, 25, 0, 0, 710, 188, 1, 0, 0, 0, 711, 712,
7, 26, 0, 0, 712, 190, 1, 0, 0, 0, 713, 714, 7, 27, 0, 0, 714, 192, 1,
0, 0, 0, 715, 716, 7, 28, 0, 0, 716, 194, 1, 0, 0, 0, 717, 718, 7, 29,
0, 0, 718, 196, 1, 0, 0, 0, 719, 720, 7, 30, 0, 0, 720, 198, 1, 0, 0, 0,
721, 722, 7, 31, 0, 0, 722, 200, 1, 0, 0, 0, 723, 724, 7, 32, 0, 0, 724,
202, 1, 0, 0, 0, 725, 726, 7, 33, 0, 0, 726, 204, 1, 0, 0, 0, 727, 728,
7, 34, 0, 0, 728, 206, 1, 0, 0, 0, 729, 730, 7, 35, 0, 0, 730, 208, 1,
0, 0, 0, 731, 732, 7, 36, 0, 0, 732, 210, 1, 0, 0, 0, 733, 734, 9, 0, 0,
0, 734, 735, 1, 0, 0, 0, 735, 736, 6, 105, 1, 0, 736, 212, 1, 0, 0, 0,
24, 0, 291, 411, 423, 554, 561, 571, 579, 581, 587, 591, 597, 601, 606,
608, 619, 621, 631, 633, 637, 645, 649, 653, 663, 2, 0, 1, 0, 0, 2, 0,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// CAQLLexerInit initializes any static state used to implement CAQLLexer. By default the
// static state used to implement the lexer is lazily initialized during the first call to
// NewCAQLLexer(). You can call this function if you wish to initialize the static state ahead
// of time.
func CAQLLexerInit() {
staticData := &caqllexerLexerStaticData
staticData.once.Do(caqllexerLexerInit)
}
// NewCAQLLexer produces a new lexer instance for the optional input antlr.CharStream.
func NewCAQLLexer(input antlr.CharStream) *CAQLLexer {
CAQLLexerInit()
l := new(CAQLLexer)
l.BaseLexer = antlr.NewBaseLexer(input)
staticData := &caqllexerLexerStaticData
l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache)
l.channelNames = staticData.channelNames
l.modeNames = staticData.modeNames
l.RuleNames = staticData.ruleNames
l.LiteralNames = staticData.literalNames
l.SymbolicNames = staticData.symbolicNames
l.GrammarFileName = "CAQLLexer.g4"
// TODO: l.EOF = antlr.TokenEOF

View File

@@ -1,161 +1,180 @@
// Code generated from CAQLParser.g4 by ANTLR 4.9.2. DO NOT EDIT.
// Code generated from CAQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package parser // CAQLParser
import (
"fmt"
"reflect"
"strconv"
"sync"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// Suppress unused import errors
var _ = fmt.Printf
var _ = reflect.Copy
var _ = strconv.Itoa
var parserATN = []uint16{
3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 80, 192,
4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7,
4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 3, 2,
3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 32, 10, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3,
48, 10, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 3, 66, 10, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 7, 3, 86, 10, 3, 12, 3, 14, 3, 89, 11, 3, 3, 4, 3, 4,
3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 97, 10, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5,
3, 5, 3, 5, 3, 5, 5, 5, 107, 10, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5,
3, 5, 3, 5, 7, 5, 117, 10, 5, 12, 5, 14, 5, 120, 11, 5, 3, 6, 3, 6, 5,
6, 124, 10, 6, 3, 7, 3, 7, 3, 7, 5, 7, 129, 10, 7, 3, 7, 3, 7, 7, 7, 133,
10, 7, 12, 7, 14, 7, 136, 11, 7, 3, 7, 5, 7, 139, 10, 7, 3, 7, 3, 7, 3,
8, 3, 8, 3, 9, 3, 9, 5, 9, 147, 10, 9, 3, 9, 3, 9, 7, 9, 151, 10, 9, 12,
9, 14, 9, 154, 11, 9, 3, 9, 5, 9, 157, 10, 9, 3, 9, 3, 9, 3, 10, 3, 10,
5, 10, 163, 10, 10, 3, 10, 3, 10, 7, 10, 167, 10, 10, 12, 10, 14, 10, 170,
11, 10, 3, 10, 5, 10, 173, 10, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3,
11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 5, 11, 188, 10, 11,
3, 12, 3, 12, 3, 12, 4, 134, 152, 4, 4, 8, 13, 2, 4, 6, 8, 10, 12, 14,
16, 18, 20, 22, 2, 11, 3, 2, 12, 13, 3, 2, 14, 16, 3, 2, 8, 11, 3, 2, 6,
7, 5, 2, 29, 29, 31, 31, 48, 48, 4, 2, 6, 11, 40, 40, 4, 2, 4, 5, 46, 46,
7, 2, 36, 36, 50, 50, 58, 58, 72, 73, 75, 75, 4, 2, 71, 71, 75, 75, 2,
216, 2, 24, 3, 2, 2, 2, 4, 31, 3, 2, 2, 2, 6, 96, 3, 2, 2, 2, 8, 106, 3,
2, 2, 2, 10, 123, 3, 2, 2, 2, 12, 125, 3, 2, 2, 2, 14, 142, 3, 2, 2, 2,
16, 144, 3, 2, 2, 2, 18, 160, 3, 2, 2, 2, 20, 187, 3, 2, 2, 2, 22, 189,
3, 2, 2, 2, 24, 25, 5, 4, 3, 2, 25, 26, 7, 2, 2, 3, 26, 3, 3, 2, 2, 2,
27, 28, 8, 3, 1, 2, 28, 32, 5, 14, 8, 2, 29, 32, 5, 8, 5, 2, 30, 32, 5,
6, 4, 2, 31, 27, 3, 2, 2, 2, 31, 29, 3, 2, 2, 2, 31, 30, 3, 2, 2, 2, 32,
87, 3, 2, 2, 2, 33, 34, 12, 15, 2, 2, 34, 35, 9, 2, 2, 2, 35, 86, 5, 4,
3, 16, 36, 37, 12, 14, 2, 2, 37, 38, 9, 3, 2, 2, 38, 86, 5, 4, 3, 15, 39,
40, 12, 13, 2, 2, 40, 41, 7, 20, 2, 2, 41, 86, 5, 4, 3, 14, 42, 43, 12,
12, 2, 2, 43, 44, 9, 4, 2, 2, 44, 86, 5, 4, 3, 13, 45, 47, 12, 11, 2, 2,
46, 48, 7, 49, 2, 2, 47, 46, 3, 2, 2, 2, 47, 48, 3, 2, 2, 2, 48, 49, 3,
2, 2, 2, 49, 50, 7, 40, 2, 2, 50, 86, 5, 4, 3, 12, 51, 52, 12, 10, 2, 2,
52, 53, 9, 5, 2, 2, 53, 86, 5, 4, 3, 11, 54, 55, 12, 9, 2, 2, 55, 56, 9,
6, 2, 2, 56, 57, 9, 7, 2, 2, 57, 86, 5, 4, 3, 10, 58, 59, 12, 8, 2, 2,
59, 60, 9, 6, 2, 2, 60, 61, 7, 49, 2, 2, 61, 62, 7, 40, 2, 2, 62, 86, 5,
4, 3, 9, 63, 65, 12, 7, 2, 2, 64, 66, 7, 49, 2, 2, 65, 64, 3, 2, 2, 2,
65, 66, 3, 2, 2, 2, 66, 67, 3, 2, 2, 2, 67, 68, 9, 8, 2, 2, 68, 86, 5,
4, 3, 8, 69, 70, 12, 6, 2, 2, 70, 71, 7, 30, 2, 2, 71, 86, 5, 4, 3, 7,
72, 73, 12, 5, 2, 2, 73, 74, 7, 51, 2, 2, 74, 86, 5, 4, 3, 6, 75, 76, 12,
4, 2, 2, 76, 77, 7, 17, 2, 2, 77, 78, 5, 4, 3, 2, 78, 79, 7, 18, 2, 2,
79, 80, 5, 4, 3, 5, 80, 86, 3, 2, 2, 2, 81, 82, 12, 3, 2, 2, 82, 83, 7,
17, 2, 2, 83, 84, 7, 18, 2, 2, 84, 86, 5, 4, 3, 4, 85, 33, 3, 2, 2, 2,
85, 36, 3, 2, 2, 2, 85, 39, 3, 2, 2, 2, 85, 42, 3, 2, 2, 2, 85, 45, 3,
2, 2, 2, 85, 51, 3, 2, 2, 2, 85, 54, 3, 2, 2, 2, 85, 58, 3, 2, 2, 2, 85,
63, 3, 2, 2, 2, 85, 69, 3, 2, 2, 2, 85, 72, 3, 2, 2, 2, 85, 75, 3, 2, 2,
2, 85, 81, 3, 2, 2, 2, 86, 89, 3, 2, 2, 2, 87, 85, 3, 2, 2, 2, 87, 88,
3, 2, 2, 2, 88, 5, 3, 2, 2, 2, 89, 87, 3, 2, 2, 2, 90, 91, 7, 12, 2, 2,
91, 97, 5, 4, 3, 2, 92, 93, 7, 13, 2, 2, 93, 97, 5, 4, 3, 2, 94, 95, 7,
49, 2, 2, 95, 97, 5, 4, 3, 2, 96, 90, 3, 2, 2, 2, 96, 92, 3, 2, 2, 2, 96,
94, 3, 2, 2, 2, 97, 7, 3, 2, 2, 2, 98, 99, 8, 5, 1, 2, 99, 107, 7, 71,
2, 2, 100, 107, 5, 10, 6, 2, 101, 107, 5, 12, 7, 2, 102, 103, 7, 22, 2,
2, 103, 104, 5, 4, 3, 2, 104, 105, 7, 23, 2, 2, 105, 107, 3, 2, 2, 2, 106,
98, 3, 2, 2, 2, 106, 100, 3, 2, 2, 2, 106, 101, 3, 2, 2, 2, 106, 102, 3,
2, 2, 2, 107, 118, 3, 2, 2, 2, 108, 109, 12, 4, 2, 2, 109, 110, 7, 3, 2,
2, 110, 117, 7, 71, 2, 2, 111, 112, 12, 3, 2, 2, 112, 113, 7, 26, 2, 2,
113, 114, 5, 4, 3, 2, 114, 115, 7, 27, 2, 2, 115, 117, 3, 2, 2, 2, 116,
108, 3, 2, 2, 2, 116, 111, 3, 2, 2, 2, 117, 120, 3, 2, 2, 2, 118, 116,
3, 2, 2, 2, 118, 119, 3, 2, 2, 2, 119, 9, 3, 2, 2, 2, 120, 118, 3, 2, 2,
2, 121, 124, 5, 16, 9, 2, 122, 124, 5, 18, 10, 2, 123, 121, 3, 2, 2, 2,
123, 122, 3, 2, 2, 2, 124, 11, 3, 2, 2, 2, 125, 126, 7, 71, 2, 2, 126,
128, 7, 22, 2, 2, 127, 129, 5, 4, 3, 2, 128, 127, 3, 2, 2, 2, 128, 129,
3, 2, 2, 2, 129, 134, 3, 2, 2, 2, 130, 131, 7, 21, 2, 2, 131, 133, 5, 4,
3, 2, 132, 130, 3, 2, 2, 2, 133, 136, 3, 2, 2, 2, 134, 135, 3, 2, 2, 2,
134, 132, 3, 2, 2, 2, 135, 138, 3, 2, 2, 2, 136, 134, 3, 2, 2, 2, 137,
139, 7, 21, 2, 2, 138, 137, 3, 2, 2, 2, 138, 139, 3, 2, 2, 2, 139, 140,
3, 2, 2, 2, 140, 141, 7, 23, 2, 2, 141, 13, 3, 2, 2, 2, 142, 143, 9, 9,
2, 2, 143, 15, 3, 2, 2, 2, 144, 146, 7, 26, 2, 2, 145, 147, 5, 4, 3, 2,
146, 145, 3, 2, 2, 2, 146, 147, 3, 2, 2, 2, 147, 152, 3, 2, 2, 2, 148,
149, 7, 21, 2, 2, 149, 151, 5, 4, 3, 2, 150, 148, 3, 2, 2, 2, 151, 154,
3, 2, 2, 2, 152, 153, 3, 2, 2, 2, 152, 150, 3, 2, 2, 2, 153, 156, 3, 2,
2, 2, 154, 152, 3, 2, 2, 2, 155, 157, 7, 21, 2, 2, 156, 155, 3, 2, 2, 2,
156, 157, 3, 2, 2, 2, 157, 158, 3, 2, 2, 2, 158, 159, 7, 27, 2, 2, 159,
17, 3, 2, 2, 2, 160, 162, 7, 24, 2, 2, 161, 163, 5, 20, 11, 2, 162, 161,
3, 2, 2, 2, 162, 163, 3, 2, 2, 2, 163, 168, 3, 2, 2, 2, 164, 165, 7, 21,
2, 2, 165, 167, 5, 20, 11, 2, 166, 164, 3, 2, 2, 2, 167, 170, 3, 2, 2,
2, 168, 166, 3, 2, 2, 2, 168, 169, 3, 2, 2, 2, 169, 172, 3, 2, 2, 2, 170,
168, 3, 2, 2, 2, 171, 173, 7, 21, 2, 2, 172, 171, 3, 2, 2, 2, 172, 173,
3, 2, 2, 2, 173, 174, 3, 2, 2, 2, 174, 175, 7, 25, 2, 2, 175, 19, 3, 2,
2, 2, 176, 188, 7, 71, 2, 2, 177, 178, 5, 22, 12, 2, 178, 179, 7, 18, 2,
2, 179, 180, 5, 4, 3, 2, 180, 188, 3, 2, 2, 2, 181, 182, 7, 26, 2, 2, 182,
183, 5, 4, 3, 2, 183, 184, 7, 27, 2, 2, 184, 185, 7, 18, 2, 2, 185, 186,
5, 4, 3, 2, 186, 188, 3, 2, 2, 2, 187, 176, 3, 2, 2, 2, 187, 177, 3, 2,
2, 2, 187, 181, 3, 2, 2, 2, 188, 21, 3, 2, 2, 2, 189, 190, 9, 10, 2, 2,
190, 23, 3, 2, 2, 2, 22, 31, 47, 65, 85, 87, 96, 106, 116, 118, 123, 128,
134, 138, 146, 152, 156, 162, 168, 172, 187,
}
var literalNames = []string{
"", "'.'", "'=~'", "'!~'", "'=='", "'!='", "'<'", "'>'", "'<='", "'>='",
"'+'", "'-'", "'*'", "'/'", "'%'", "'?'", "':'", "'::'", "'..'", "','",
"'('", "')'", "'{'", "'}'", "'['", "']'",
}
var symbolicNames = []string{
"", "DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT",
"T_GT", "T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD",
"T_QUESTION", "T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN", "T_CLOSE",
"T_OBJECT_OPEN", "T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE", "T_AGGREGATE",
"T_ALL", "T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC", "T_DISTINCT",
"T_FALSE", "T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND", "T_INSERT",
"T_INTO", "T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT", "T_NONE",
"T_NOT", "T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE", "T_RETURN",
"T_SHORTEST_PATH", "T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT", "T_WITH",
"T_KEEP", "T_COUNT", "T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO", "T_CURRENT",
"T_NEW", "T_OLD", "T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER", "T_QUOTED_STRING",
"SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES", "UNEXPECTED_CHAR",
"ERROR_RECONGNIGION",
}
var ruleNames = []string{
"parse", "expression", "operator_unary", "reference", "compound_value",
"function_call", "value_literal", "array", "object", "object_element",
"object_element_name",
}
var _ = sync.Once{}
type CAQLParser struct {
*antlr.BaseParser
}
// NewCAQLParser produces a new parser instance for the optional input antlr.TokenStream.
//
// The *CAQLParser instance produced may be reused by calling the SetInputStream method.
// The initial parser configuration is expensive to construct, and the object is not thread-safe;
// however, if used within a Golang sync.Pool, the construction cost amortizes well and the
// objects can be used in a thread-safe manner.
func NewCAQLParser(input antlr.TokenStream) *CAQLParser {
this := new(CAQLParser)
deserializer := antlr.NewATNDeserializer(nil)
deserializedATN := deserializer.DeserializeFromUInt16(parserATN)
decisionToDFA := make([]*antlr.DFA, len(deserializedATN.DecisionToState))
for index, ds := range deserializedATN.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(ds, index)
}
this.BaseParser = antlr.NewBaseParser(input)
var caqlparserParserStaticData struct {
once sync.Once
serializedATN []int32
literalNames []string
symbolicNames []string
ruleNames []string
predictionContextCache *antlr.PredictionContextCache
atn *antlr.ATN
decisionToDFA []*antlr.DFA
}
this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache())
this.RuleNames = ruleNames
this.LiteralNames = literalNames
this.SymbolicNames = symbolicNames
func caqlparserParserInit() {
staticData := &caqlparserParserStaticData
staticData.literalNames = []string{
"", "'.'", "'=~'", "'!~'", "'=='", "'!='", "'<'", "'>'", "'<='", "'>='",
"'+'", "'-'", "'*'", "'/'", "'%'", "'?'", "':'", "'::'", "'..'", "','",
"'('", "')'", "'{'", "'}'", "'['", "']'",
}
staticData.symbolicNames = []string{
"", "DOT", "T_REGEX_MATCH", "T_REGEX_NON_MATCH", "T_EQ", "T_NE", "T_LT",
"T_GT", "T_LE", "T_GE", "T_PLUS", "T_MINUS", "T_TIMES", "T_DIV", "T_MOD",
"T_QUESTION", "T_COLON", "T_SCOPE", "T_RANGE", "T_COMMA", "T_OPEN",
"T_CLOSE", "T_OBJECT_OPEN", "T_OBJECT_CLOSE", "T_ARRAY_OPEN", "T_ARRAY_CLOSE",
"T_AGGREGATE", "T_ALL", "T_AND", "T_ANY", "T_ASC", "T_COLLECT", "T_DESC",
"T_DISTINCT", "T_FALSE", "T_FILTER", "T_FOR", "T_GRAPH", "T_IN", "T_INBOUND",
"T_INSERT", "T_INTO", "T_K_SHORTEST_PATHS", "T_LET", "T_LIKE", "T_LIMIT",
"T_NONE", "T_NOT", "T_NULL", "T_OR", "T_OUTBOUND", "T_REMOVE", "T_REPLACE",
"T_RETURN", "T_SHORTEST_PATH", "T_SORT", "T_TRUE", "T_UPDATE", "T_UPSERT",
"T_WITH", "T_KEEP", "T_COUNT", "T_OPTIONS", "T_PRUNE", "T_SEARCH", "T_TO",
"T_CURRENT", "T_NEW", "T_OLD", "T_STRING", "T_INT", "T_FLOAT", "T_PARAMETER",
"T_QUOTED_STRING", "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "SPACES",
"UNEXPECTED_CHAR", "ERROR_RECONGNIGION",
}
staticData.ruleNames = []string{
"parse", "expression", "operator_unary", "reference", "compound_value",
"function_call", "value_literal", "array", "object", "object_element",
"object_element_name",
}
staticData.predictionContextCache = antlr.NewPredictionContextCache()
staticData.serializedATN = []int32{
4, 1, 78, 190, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7,
4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7,
10, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 30, 8, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 3, 1, 46, 8, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 64, 8, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 5, 1, 84, 8, 1, 10, 1, 12, 1, 87, 9, 1, 1, 2, 1, 2,
1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 95, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1,
3, 1, 3, 1, 3, 3, 3, 105, 8, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1,
3, 1, 3, 5, 3, 115, 8, 3, 10, 3, 12, 3, 118, 9, 3, 1, 4, 1, 4, 3, 4, 122,
8, 4, 1, 5, 1, 5, 1, 5, 3, 5, 127, 8, 5, 1, 5, 1, 5, 5, 5, 131, 8, 5, 10,
5, 12, 5, 134, 9, 5, 1, 5, 3, 5, 137, 8, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1,
7, 1, 7, 3, 7, 145, 8, 7, 1, 7, 1, 7, 5, 7, 149, 8, 7, 10, 7, 12, 7, 152,
9, 7, 1, 7, 3, 7, 155, 8, 7, 1, 7, 1, 7, 1, 8, 1, 8, 3, 8, 161, 8, 8, 1,
8, 1, 8, 5, 8, 165, 8, 8, 10, 8, 12, 8, 168, 9, 8, 1, 8, 3, 8, 171, 8,
8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1,
9, 1, 9, 3, 9, 186, 8, 9, 1, 10, 1, 10, 1, 10, 2, 132, 150, 2, 2, 6, 11,
0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 0, 9, 1, 0, 10, 11, 1, 0, 12, 14,
1, 0, 6, 9, 1, 0, 4, 5, 3, 0, 27, 27, 29, 29, 46, 46, 2, 0, 4, 9, 38, 38,
2, 0, 2, 3, 44, 44, 5, 0, 34, 34, 48, 48, 56, 56, 70, 71, 73, 73, 2, 0,
69, 69, 73, 73, 214, 0, 22, 1, 0, 0, 0, 2, 29, 1, 0, 0, 0, 4, 94, 1, 0,
0, 0, 6, 104, 1, 0, 0, 0, 8, 121, 1, 0, 0, 0, 10, 123, 1, 0, 0, 0, 12,
140, 1, 0, 0, 0, 14, 142, 1, 0, 0, 0, 16, 158, 1, 0, 0, 0, 18, 185, 1,
0, 0, 0, 20, 187, 1, 0, 0, 0, 22, 23, 3, 2, 1, 0, 23, 24, 5, 0, 0, 1, 24,
1, 1, 0, 0, 0, 25, 26, 6, 1, -1, 0, 26, 30, 3, 12, 6, 0, 27, 30, 3, 6,
3, 0, 28, 30, 3, 4, 2, 0, 29, 25, 1, 0, 0, 0, 29, 27, 1, 0, 0, 0, 29, 28,
1, 0, 0, 0, 30, 85, 1, 0, 0, 0, 31, 32, 10, 13, 0, 0, 32, 33, 7, 0, 0,
0, 33, 84, 3, 2, 1, 14, 34, 35, 10, 12, 0, 0, 35, 36, 7, 1, 0, 0, 36, 84,
3, 2, 1, 13, 37, 38, 10, 11, 0, 0, 38, 39, 5, 18, 0, 0, 39, 84, 3, 2, 1,
12, 40, 41, 10, 10, 0, 0, 41, 42, 7, 2, 0, 0, 42, 84, 3, 2, 1, 11, 43,
45, 10, 9, 0, 0, 44, 46, 5, 47, 0, 0, 45, 44, 1, 0, 0, 0, 45, 46, 1, 0,
0, 0, 46, 47, 1, 0, 0, 0, 47, 48, 5, 38, 0, 0, 48, 84, 3, 2, 1, 10, 49,
50, 10, 8, 0, 0, 50, 51, 7, 3, 0, 0, 51, 84, 3, 2, 1, 9, 52, 53, 10, 7,
0, 0, 53, 54, 7, 4, 0, 0, 54, 55, 7, 5, 0, 0, 55, 84, 3, 2, 1, 8, 56, 57,
10, 6, 0, 0, 57, 58, 7, 4, 0, 0, 58, 59, 5, 47, 0, 0, 59, 60, 5, 38, 0,
0, 60, 84, 3, 2, 1, 7, 61, 63, 10, 5, 0, 0, 62, 64, 5, 47, 0, 0, 63, 62,
1, 0, 0, 0, 63, 64, 1, 0, 0, 0, 64, 65, 1, 0, 0, 0, 65, 66, 7, 6, 0, 0,
66, 84, 3, 2, 1, 6, 67, 68, 10, 4, 0, 0, 68, 69, 5, 28, 0, 0, 69, 84, 3,
2, 1, 5, 70, 71, 10, 3, 0, 0, 71, 72, 5, 49, 0, 0, 72, 84, 3, 2, 1, 4,
73, 74, 10, 2, 0, 0, 74, 75, 5, 15, 0, 0, 75, 76, 3, 2, 1, 0, 76, 77, 5,
16, 0, 0, 77, 78, 3, 2, 1, 3, 78, 84, 1, 0, 0, 0, 79, 80, 10, 1, 0, 0,
80, 81, 5, 15, 0, 0, 81, 82, 5, 16, 0, 0, 82, 84, 3, 2, 1, 2, 83, 31, 1,
0, 0, 0, 83, 34, 1, 0, 0, 0, 83, 37, 1, 0, 0, 0, 83, 40, 1, 0, 0, 0, 83,
43, 1, 0, 0, 0, 83, 49, 1, 0, 0, 0, 83, 52, 1, 0, 0, 0, 83, 56, 1, 0, 0,
0, 83, 61, 1, 0, 0, 0, 83, 67, 1, 0, 0, 0, 83, 70, 1, 0, 0, 0, 83, 73,
1, 0, 0, 0, 83, 79, 1, 0, 0, 0, 84, 87, 1, 0, 0, 0, 85, 83, 1, 0, 0, 0,
85, 86, 1, 0, 0, 0, 86, 3, 1, 0, 0, 0, 87, 85, 1, 0, 0, 0, 88, 89, 5, 10,
0, 0, 89, 95, 3, 2, 1, 0, 90, 91, 5, 11, 0, 0, 91, 95, 3, 2, 1, 0, 92,
93, 5, 47, 0, 0, 93, 95, 3, 2, 1, 0, 94, 88, 1, 0, 0, 0, 94, 90, 1, 0,
0, 0, 94, 92, 1, 0, 0, 0, 95, 5, 1, 0, 0, 0, 96, 97, 6, 3, -1, 0, 97, 105,
5, 69, 0, 0, 98, 105, 3, 8, 4, 0, 99, 105, 3, 10, 5, 0, 100, 101, 5, 20,
0, 0, 101, 102, 3, 2, 1, 0, 102, 103, 5, 21, 0, 0, 103, 105, 1, 0, 0, 0,
104, 96, 1, 0, 0, 0, 104, 98, 1, 0, 0, 0, 104, 99, 1, 0, 0, 0, 104, 100,
1, 0, 0, 0, 105, 116, 1, 0, 0, 0, 106, 107, 10, 2, 0, 0, 107, 108, 5, 1,
0, 0, 108, 115, 5, 69, 0, 0, 109, 110, 10, 1, 0, 0, 110, 111, 5, 24, 0,
0, 111, 112, 3, 2, 1, 0, 112, 113, 5, 25, 0, 0, 113, 115, 1, 0, 0, 0, 114,
106, 1, 0, 0, 0, 114, 109, 1, 0, 0, 0, 115, 118, 1, 0, 0, 0, 116, 114,
1, 0, 0, 0, 116, 117, 1, 0, 0, 0, 117, 7, 1, 0, 0, 0, 118, 116, 1, 0, 0,
0, 119, 122, 3, 14, 7, 0, 120, 122, 3, 16, 8, 0, 121, 119, 1, 0, 0, 0,
121, 120, 1, 0, 0, 0, 122, 9, 1, 0, 0, 0, 123, 124, 5, 69, 0, 0, 124, 126,
5, 20, 0, 0, 125, 127, 3, 2, 1, 0, 126, 125, 1, 0, 0, 0, 126, 127, 1, 0,
0, 0, 127, 132, 1, 0, 0, 0, 128, 129, 5, 19, 0, 0, 129, 131, 3, 2, 1, 0,
130, 128, 1, 0, 0, 0, 131, 134, 1, 0, 0, 0, 132, 133, 1, 0, 0, 0, 132,
130, 1, 0, 0, 0, 133, 136, 1, 0, 0, 0, 134, 132, 1, 0, 0, 0, 135, 137,
5, 19, 0, 0, 136, 135, 1, 0, 0, 0, 136, 137, 1, 0, 0, 0, 137, 138, 1, 0,
0, 0, 138, 139, 5, 21, 0, 0, 139, 11, 1, 0, 0, 0, 140, 141, 7, 7, 0, 0,
141, 13, 1, 0, 0, 0, 142, 144, 5, 24, 0, 0, 143, 145, 3, 2, 1, 0, 144,
143, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 150, 1, 0, 0, 0, 146, 147,
5, 19, 0, 0, 147, 149, 3, 2, 1, 0, 148, 146, 1, 0, 0, 0, 149, 152, 1, 0,
0, 0, 150, 151, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0,
152, 150, 1, 0, 0, 0, 153, 155, 5, 19, 0, 0, 154, 153, 1, 0, 0, 0, 154,
155, 1, 0, 0, 0, 155, 156, 1, 0, 0, 0, 156, 157, 5, 25, 0, 0, 157, 15,
1, 0, 0, 0, 158, 160, 5, 22, 0, 0, 159, 161, 3, 18, 9, 0, 160, 159, 1,
0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 166, 1, 0, 0, 0, 162, 163, 5, 19, 0,
0, 163, 165, 3, 18, 9, 0, 164, 162, 1, 0, 0, 0, 165, 168, 1, 0, 0, 0, 166,
164, 1, 0, 0, 0, 166, 167, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166,
1, 0, 0, 0, 169, 171, 5, 19, 0, 0, 170, 169, 1, 0, 0, 0, 170, 171, 1, 0,
0, 0, 171, 172, 1, 0, 0, 0, 172, 173, 5, 23, 0, 0, 173, 17, 1, 0, 0, 0,
174, 186, 5, 69, 0, 0, 175, 176, 3, 20, 10, 0, 176, 177, 5, 16, 0, 0, 177,
178, 3, 2, 1, 0, 178, 186, 1, 0, 0, 0, 179, 180, 5, 24, 0, 0, 180, 181,
3, 2, 1, 0, 181, 182, 5, 25, 0, 0, 182, 183, 5, 16, 0, 0, 183, 184, 3,
2, 1, 0, 184, 186, 1, 0, 0, 0, 185, 174, 1, 0, 0, 0, 185, 175, 1, 0, 0,
0, 185, 179, 1, 0, 0, 0, 186, 19, 1, 0, 0, 0, 187, 188, 7, 8, 0, 0, 188,
21, 1, 0, 0, 0, 20, 29, 45, 63, 83, 85, 94, 104, 114, 116, 121, 126, 132,
136, 144, 150, 154, 160, 166, 170, 185,
}
deserializer := antlr.NewATNDeserializer(nil)
staticData.atn = deserializer.Deserialize(staticData.serializedATN)
atn := staticData.atn
staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState))
decisionToDFA := staticData.decisionToDFA
for index, state := range atn.DecisionToState {
decisionToDFA[index] = antlr.NewDFA(state, index)
}
}
// CAQLParserInit initializes any static state used to implement CAQLParser. By default the
// static state used to implement the parser is lazily initialized during the first call to
// NewCAQLParser(). You can call this function if you wish to initialize the static state ahead
// of time.
func CAQLParserInit() {
staticData := &caqlparserParserStaticData
staticData.once.Do(caqlparserParserInit)
}
// NewCAQLParser produces a new parser instance for the optional input antlr.TokenStream.
func NewCAQLParser(input antlr.TokenStream) *CAQLParser {
CAQLParserInit()
this := new(CAQLParser)
this.BaseParser = antlr.NewBaseParser(input)
staticData := &caqlparserParserStaticData
this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache)
this.RuleNames = staticData.ruleNames
this.LiteralNames = staticData.literalNames
this.SymbolicNames = staticData.symbolicNames
this.GrammarFileName = "CAQLParser.g4"
return this
@@ -298,7 +317,13 @@ func NewParseContext(parser antlr.Parser, parent antlr.ParserRuleContext, invoki
func (s *ParseContext) GetParser() antlr.Parser { return s.parser }
func (s *ParseContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -332,6 +357,9 @@ func (s *ParseContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Parse() (localctx IParseContext) {
this := p
_ = this
localctx = NewParseContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 0, CAQLParserRULE_parse)
@@ -414,7 +442,13 @@ func (s *ExpressionContext) GetEq_op() antlr.Token { return s.eq_op }
func (s *ExpressionContext) SetEq_op(v antlr.Token) { s.eq_op = v }
func (s *ExpressionContext) Value_literal() IValue_literalContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IValue_literalContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IValue_literalContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -424,7 +458,13 @@ func (s *ExpressionContext) Value_literal() IValue_literalContext {
}
func (s *ExpressionContext) Reference() IReferenceContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IReferenceContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IReferenceContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -434,7 +474,13 @@ func (s *ExpressionContext) Reference() IReferenceContext {
}
func (s *ExpressionContext) Operator_unary() IOperator_unaryContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IOperator_unaryContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IOperator_unaryContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -444,12 +490,20 @@ func (s *ExpressionContext) Operator_unary() IOperator_unaryContext {
}
func (s *ExpressionContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@@ -457,7 +511,17 @@ func (s *ExpressionContext) AllExpression() []IExpressionContext {
}
func (s *ExpressionContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@@ -587,6 +651,9 @@ func (p *CAQLParser) Expression() (localctx IExpressionContext) {
}
func (p *CAQLParser) expression(_p int) (localctx IExpressionContext) {
this := p
_ = this
var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
_parentState := p.GetState()
localctx = NewExpressionContext(p, p.GetParserRuleContext(), _parentState)
@@ -1038,7 +1105,13 @@ func (s *Operator_unaryContext) T_PLUS() antlr.TerminalNode {
}
func (s *Operator_unaryContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1076,6 +1149,9 @@ func (s *Operator_unaryContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Operator_unary() (localctx IOperator_unaryContext) {
this := p
_ = this
localctx = NewOperator_unaryContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 4, CAQLParserRULE_operator_unary)
@@ -1180,7 +1256,13 @@ func (s *ReferenceContext) T_STRING() antlr.TerminalNode {
}
func (s *ReferenceContext) Compound_value() ICompound_valueContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*ICompound_valueContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(ICompound_valueContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1190,7 +1272,13 @@ func (s *ReferenceContext) Compound_value() ICompound_valueContext {
}
func (s *ReferenceContext) Function_call() IFunction_callContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IFunction_callContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IFunction_callContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1204,7 +1292,13 @@ func (s *ReferenceContext) T_OPEN() antlr.TerminalNode {
}
func (s *ReferenceContext) Expression() IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1218,7 +1312,13 @@ func (s *ReferenceContext) T_CLOSE() antlr.TerminalNode {
}
func (s *ReferenceContext) Reference() IReferenceContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IReferenceContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IReferenceContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1264,6 +1364,9 @@ func (p *CAQLParser) Reference() (localctx IReferenceContext) {
}
func (p *CAQLParser) reference(_p int) (localctx IReferenceContext) {
this := p
_ = this
var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext()
_parentState := p.GetState()
localctx = NewReferenceContext(p, p.GetParserRuleContext(), _parentState)
@@ -1429,7 +1532,13 @@ func NewCompound_valueContext(parser antlr.Parser, parent antlr.ParserRuleContex
func (s *Compound_valueContext) GetParser() antlr.Parser { return s.parser }
func (s *Compound_valueContext) Array() IArrayContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IArrayContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IArrayContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1439,7 +1548,13 @@ func (s *Compound_valueContext) Array() IArrayContext {
}
func (s *Compound_valueContext) Object() IObjectContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IObjectContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObjectContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -1469,6 +1584,9 @@ func (s *Compound_valueContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Compound_value() (localctx ICompound_valueContext) {
this := p
_ = this
localctx = NewCompound_valueContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 8, CAQLParserRULE_compound_value)
@@ -1563,12 +1681,20 @@ func (s *Function_callContext) T_CLOSE() antlr.TerminalNode {
}
func (s *Function_callContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@@ -1576,7 +1702,17 @@ func (s *Function_callContext) AllExpression() []IExpressionContext {
}
func (s *Function_callContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@@ -1614,6 +1750,9 @@ func (s *Function_callContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Function_call() (localctx IFunction_callContext) {
this := p
_ = this
localctx = NewFunction_callContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 10, CAQLParserRULE_function_call)
var _la int
@@ -1778,6 +1917,9 @@ func (s *Value_literalContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Value_literal() (localctx IValue_literalContext) {
this := p
_ = this
localctx = NewValue_literalContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 12, CAQLParserRULE_value_literal)
var _la int
@@ -1861,12 +2003,20 @@ func (s *ArrayContext) T_ARRAY_CLOSE() antlr.TerminalNode {
}
func (s *ArrayContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@@ -1874,7 +2024,17 @@ func (s *ArrayContext) AllExpression() []IExpressionContext {
}
func (s *ArrayContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@@ -1912,6 +2072,9 @@ func (s *ArrayContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Array() (localctx IArrayContext) {
this := p
_ = this
localctx = NewArrayContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 14, CAQLParserRULE_array)
var _la int
@@ -2036,12 +2199,20 @@ func (s *ObjectContext) T_OBJECT_CLOSE() antlr.TerminalNode {
}
func (s *ObjectContext) AllObject_element() []IObject_elementContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IObject_elementContext)(nil)).Elem())
var tst = make([]IObject_elementContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IObject_elementContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IObject_elementContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IObject_elementContext); ok {
tst[i] = t.(IObject_elementContext)
i++
}
}
@@ -2049,7 +2220,17 @@ func (s *ObjectContext) AllObject_element() []IObject_elementContext {
}
func (s *ObjectContext) Object_element(i int) IObject_elementContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IObject_elementContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObject_elementContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@@ -2087,6 +2268,9 @@ func (s *ObjectContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Object() (localctx IObjectContext) {
this := p
_ = this
localctx = NewObjectContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 16, CAQLParserRULE_object)
var _la int
@@ -2207,7 +2391,13 @@ func (s *Object_elementContext) T_STRING() antlr.TerminalNode {
}
func (s *Object_elementContext) Object_element_name() IObject_element_nameContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IObject_element_nameContext)(nil)).Elem(), 0)
var t antlr.RuleContext
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IObject_element_nameContext); ok {
t = ctx.(antlr.RuleContext)
break
}
}
if t == nil {
return nil
@@ -2221,12 +2411,20 @@ func (s *Object_elementContext) T_COLON() antlr.TerminalNode {
}
func (s *Object_elementContext) AllExpression() []IExpressionContext {
var ts = s.GetTypedRuleContexts(reflect.TypeOf((*IExpressionContext)(nil)).Elem())
var tst = make([]IExpressionContext, len(ts))
children := s.GetChildren()
len := 0
for _, ctx := range children {
if _, ok := ctx.(IExpressionContext); ok {
len++
}
}
for i, t := range ts {
if t != nil {
tst := make([]IExpressionContext, len)
i := 0
for _, ctx := range children {
if t, ok := ctx.(IExpressionContext); ok {
tst[i] = t.(IExpressionContext)
i++
}
}
@@ -2234,7 +2432,17 @@ func (s *Object_elementContext) AllExpression() []IExpressionContext {
}
func (s *Object_elementContext) Expression(i int) IExpressionContext {
var t = s.GetTypedRuleContext(reflect.TypeOf((*IExpressionContext)(nil)).Elem(), i)
var t antlr.RuleContext
j := 0
for _, ctx := range s.GetChildren() {
if _, ok := ctx.(IExpressionContext); ok {
if j == i {
t = ctx.(antlr.RuleContext)
break
}
j++
}
}
if t == nil {
return nil
@@ -2272,6 +2480,9 @@ func (s *Object_elementContext) ExitRule(listener antlr.ParseTreeListener) {
}
func (p *CAQLParser) Object_element() (localctx IObject_elementContext) {
this := p
_ = this
localctx = NewObject_elementContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 18, CAQLParserRULE_object_element)
@@ -2409,6 +2620,9 @@ func (s *Object_element_nameContext) ExitRule(listener antlr.ParseTreeListener)
}
func (p *CAQLParser) Object_element_name() (localctx IObject_element_nameContext) {
this := p
_ = this
localctx = NewObject_element_nameContext(p, p.GetParserRuleContext(), p.GetState())
p.EnterRule(localctx, 20, CAQLParserRULE_object_element_name)
var _la int
@@ -2467,6 +2681,9 @@ func (p *CAQLParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex in
}
func (p *CAQLParser) Expression_Sempred(localctx antlr.RuleContext, predIndex int) bool {
this := p
_ = this
switch predIndex {
case 0:
return p.Precpred(p.GetParserRuleContext(), 13)
@@ -2513,6 +2730,9 @@ func (p *CAQLParser) Expression_Sempred(localctx antlr.RuleContext, predIndex in
}
func (p *CAQLParser) Reference_Sempred(localctx antlr.RuleContext, predIndex int) bool {
this := p
_ = this
switch predIndex {
case 13:
return p.Precpred(p.GetParserRuleContext(), 2)

View File

@@ -1,4 +1,4 @@
// Code generated from CAQLParser.g4 by ANTLR 4.9.2. DO NOT EDIT.
// Code generated from CAQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package parser // CAQLParser

View File

@@ -1,4 +1,4 @@
// Code generated from CAQLParser.g4 by ANTLR 4.9.2. DO NOT EDIT.
// Code generated from CAQLParser.g4 by ANTLR 4.10.1. DO NOT EDIT.
package parser // CAQLParser

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More