Compare commits

...

10 Commits

Author SHA1 Message Date
Jonas Plum
619c5c65ce refactor: improve setup and maintainability (#1067) 2024-07-08 00:16:37 +02:00
Jonas Plum
f5fcee0096 Enforce semantic PR titles (#1070) 2024-07-08 00:04:37 +02:00
dependabot[bot]
451f200612 Bump google.golang.org/protobuf from 1.31.0 to 1.33.0 (#1058)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-03-20 22:03:56 +01:00
dependabot[bot]
639b5a8b1e Bump github.com/go-jose/go-jose/v3 from 3.0.1 to 3.0.3 (#1056)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-03-20 22:03:48 +01:00
dependabot[bot]
81931383fd Bump golang.org/x/crypto from 0.14.0 to 0.17.0 (#1050)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-23 23:11:58 +01:00
dependabot[bot]
233b5451b7 Bump ip from 1.1.8 to 1.1.9 in /ui (#1055)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-23 23:11:43 +01:00
dependabot[bot]
ccba3f81c0 Bump github.com/containerd/containerd from 1.6.8 to 1.6.26 (#1051)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-23 23:11:36 +01:00
dependabot[bot]
82e68a9a44 Bump follow-redirects from 1.15.2 to 1.15.4 in /ui (#1053)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-02-23 23:11:27 +01:00
dependabot[bot]
99360cc387 Bump github.com/go-jose/go-jose/v3 from 3.0.0 to 3.0.1 (#1049)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-11-30 21:36:18 +01:00
dependabot[bot]
4af2ad7644 Bump browserify-sign from 4.2.1 to 4.2.2 in /ui (#1045)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2023-10-28 00:28:36 +02:00
554 changed files with 11300 additions and 91664 deletions

View File

@@ -4,10 +4,6 @@ on:
pull_request:
release: { types: [ published ] }
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
lint:
name: Lint
@@ -15,116 +11,23 @@ jobs:
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with: { go-version: '1.21' }
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- uses: golangci/golangci-lint-action@v3
with: { version: 'v1.54' }
with: { go-version: '1.22' }
- uses: oven-sh/setup-bun@v1
- run: make build-ui
- uses: golangci/golangci-lint-action@v6
with: { version: 'v1.59' }
test:
name: Test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with: { node-version: '14', cache: 'yarn', cache-dependency-path: 'ui/yarn.lock' }
- uses: actions/setup-go@v4
with: { go-version: '1.21' }
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: docker compose up --quiet-pull --detach
working-directory: dev
- run: go test -coverprofile=cover.out -coverpkg=./... ./...
- run: go tool cover -func=cover.out
- uses: codecov/codecov-action@v3
with: { go-version: '1.22' }
- uses: oven-sh/setup-bun@v1
cypress:
strategy:
matrix:
test: [ tickets, templates, playbooks ]
auth: [ authelia ]
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with: { go-version: '1.21' }
- uses: actions/setup-node@v4
with: { node-version: '14' }
# run UI
- run: |
yarn install
yarn serve &
working-directory: ui
- run: curl --head -X GET --retry 60 --retry-all-errors --retry-delay 10 http://localhost:8080
# run containers
- run: sed -i 's/host.docker.internal/172.17.0.1/g' dev/nginx.conf
- run: docker compose up --quiet-pull --detach
working-directory: dev
if: matrix.auth == 'authelia'
- run: curl --head -X GET --retry 60 --retry-all-errors --retry-delay 10 http://localhost:8082
if: matrix.auth == 'authelia'
# run catalyst
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: go mod download
- run: bash start_dev.sh &
working-directory: dev
if: matrix.auth == 'authelia'
- run: curl --head -X GET --retry 60 --retry-all-errors --retry-delay 10 http://localhost:8000
# run cypress
- uses: cypress-io/github-action@v4
env:
CYPRESS_AUTH: ${{ matrix.auth }}
CYPRESS_TEST: ${{ matrix.test }}
with:
browser: chrome
working-directory: ui
- run: make build-ui
build-npm:
name: Build npm
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-node@v4
with: { node-version: '14', cache: 'yarn', cache-dependency-path: 'ui/yarn.lock' }
- run: yarn install && yarn build
working-directory: ui
- uses: actions/upload-artifact@v3
with: { name: ui, path: ui/dist, retention-days: 1 }
build:
if: github.event_name != 'pull_request'
name: Build
runs-on: ubuntu-latest
needs: [ build-npm, test ]
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v4
with: { go-version: '1.21' }
- uses: actions/download-artifact@v3
with: { name: ui, path: ui/dist }
- name: Version
if: github.ref_type == 'tag' && github.ref_name != ''
run: |
echo ${{ github.ref_name }}
echo ${{ github.ref_name }} > VERSION
- run: go build -o catalyst ./cmd/catalyst/.
- uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- uses: docker/build-push-action@v5
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
- run: make test

32
.github/workflows/goreleaser.yml vendored Normal file
View File

@@ -0,0 +1,32 @@
name: goreleaser
on:
pull_request:
push:
tags:
- "*"
permissions:
contents: write
jobs:
goreleaser:
runs-on: ubuntu-latest
steps:
-
name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
-
name: Set up Go
uses: actions/setup-go@v5
-
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: '~> v2'
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -0,0 +1,24 @@
name: Semantic Pull Request
on:
pull_request_target:
types:
- opened
- edited
- synchronize
- reopened
permissions:
pull-requests: read
jobs:
main:
name: Validate PR title
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v5
with:
disallowScopes: ".*"
subjectPattern: "^(?![A-Z]).+$"
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

113
.gitignore vendored
View File

@@ -1,87 +1,36 @@
.idea
.antlr
.DS_Store
uploads
gen
*.bleve
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
venv/
.venv/
.python-version
.pytest_cache
# Translations
*.mo
*.pot
# Django stuff:
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# Sphinx documentation
docs/_build/
# PyBuilder
target/
#Ipython Notebook
.ipynb_checkpoints
# npm
wwwroot/*.js
typings
dist
node_modules
.DS_Store
dist-ssr
coverage
*.local
profile.cov
cover.out
/cypress/videos/
/cypress/screenshots/
generated/caql/parser/*.interp
generated/caql/parser/*.tokens
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
*.tsbuildinfo
bin
dist
pb_data
catalyst
catalyst_data

View File

@@ -1,67 +1,9 @@
run:
go: "1.19"
go: "1.22"
timeout: 5m
skip-dirs:
- generated
- internal
linters:
enable:
- asciicheck
- containedctx
- decorder
- dogsled
- durationcheck
- errchkjson
- errname
- errorlint
- exhaustive
- exportloopref
- forbidigo
- forcetypeassert
- gci
- gocritic
- godot
- gofmt
- gofumpt
- goheader
- goimports
- gomodguard
- goprintffuncname
- gosec
- grouper
- importas
- ireturn
- misspell
- nakedret
- nilnil
- nlreturn
- nolintlint
- paralleltest
- predeclared
- promlinter
- revive
- tenv
- thelper
- unconvert
- whitespace
enable-all: true
disable:
# go 1.18
- bodyclose
- contextcheck
- gosimple
- ifshort
- nilerr
- noctx
- rowserrcheck
- sqlclosecheck
- staticcheck
- stylecheck
- tparallel
- unparam
- unused
- wastedassign
# complexity
- cyclop
- gocognit
@@ -72,19 +14,22 @@ linters:
# disable
- depguard
- dupl
- exhaustivestruct
- err113
- execinquery
- exhaustruct
- funlen
- gochecknoglobals
- gochecknoinits
- goconst
- godox
- goerr113
- gomnd
- gomoddirectives
- ireturn
- lll
- makezero
- mnd
- prealloc
- structcheck
- tagalign
- tagliatelle
- testpackage
- varnamelen
@@ -96,28 +41,3 @@ linters-settings:
- standard
- default
- prefix(github.com/SecurityBrewery/catalyst)
ireturn:
allow:
- error
- context.Context
- go-driver.Cursor
- go-driver.Collection
- go-driver.Database
- go-driver.Client
- chi.Router
issues:
exclude-rules:
- path: caql
text: "var-naming: don't use underscores"
- path: database/user.go
text: "G404"
linters: [ gosec ]
- path: caql/function.go
text: "G404"
linters: [ gosec ]
- path: caql
linters: [ forcetypeassert ]
- text: github.com/go-chi/chi/v5.Router
linters: [ ireturn ]
- path: ui/ui.go
linters: [ typecheck ]

30
.goreleaser.yaml Normal file
View File

@@ -0,0 +1,30 @@
version: 2
before:
hooks:
- go mod tidy
builds:
- env:
- CGO_ENABLED=0
goos:
- linux
- darwin
archives:
- format: tar.gz
# this name template makes the OS and Arch compatible with the results of `uname`.
name_template: >-
{{ .ProjectName }}_
{{- title .Os }}_
{{- if eq .Arch "amd64" }}x86_64
{{- else if eq .Arch "386" }}i386
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end }}
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
changelog:
sort: asc

View File

@@ -1 +0,0 @@
* @cugu

View File

@@ -1,12 +0,0 @@
FROM ubuntu:23.04
RUN apt-get update -y && apt-get -y install curl gnupg2 software-properties-common
RUN curl -OL https://download.arangodb.com/arangodb34/DEBIAN/Release.key
RUN apt-key add Release.key
RUN apt-add-repository 'deb https://download.arangodb.com/arangodb34/DEBIAN/ /'
RUN apt-get update -y && apt-get -y install arangodb3
COPY catalyst /app/catalyst
CMD /app/catalyst
EXPOSE 8000

View File

@@ -1,9 +1,47 @@
.PHONY: lint
lint:
golangci-lint run ./...
.PHONY: install
install:
@echo "Installing..."
go install github.com/bombsimon/wsl/v4/cmd...@master
go install mvdan.cc/gofumpt@latest
go install github.com/daixiang0/gci@latest
.PHONY: fmt
fmt:
@echo "Formatting..."
go mod tidy
go fmt ./...
gci write -s standard -s default -s "prefix(github.com/SecurityBrewery/catalyst)" .
# gofumpt -l -w .
# wsl --fix ./...
gofumpt -l -w .
wsl -fix ./... || true
cd ui && bun format
.PHONY: lint
lint:
golangci-lint version
golangci-lint run ./...
.PHONY: test
test:
@echo "Testing..."
go test -v ./...
cd ui && bun test
.PHONY: build-ui
build-ui:
@echo "Building..."
cd ui && bun install
cd ui && bun build-only
.PHONY: dev
dev:
@echo "Running..."
rm -rf catalyst_data
go run . bootstrap
go run . admin create admin@catalyst-soar.com 1234567890
go run . set-feature-flags dev
go run . fake-data
go run . serve
.PHONY: dev-ui
serve-ui:
cd ui && bun dev --port 3000

16
NOTICE
View File

@@ -1,16 +0,0 @@
The following components are included in this product:
Badgerodon Collections
https://github.com/badgerodon/collections
Copyright (c) 2012 Caleb Doxsey
Licensed under the MIT License
go-toposort
https://github.com/philopon/go-toposort
Copyright (c) 2017 Hirotomo Moriwaki
Licensed under the MIT License
The Go programming language
https://go.dev/
Copyright (c) 2009 The Go Authors
See https://go.dev/LICENSE for license details.

111
README.md
View File

@@ -1,97 +1,84 @@
<h1 align="center">
<img width="30" alt="Screenshot of the playbook part of a ticket" src="ui/public/flask_white.svg" />
<picture>
<source media="(prefers-color-scheme: dark)" srcset="ui/src/assets/flask_white.svg">
<img width="30" alt="Shows an illustrated sun in light color mode and a moon with stars in dark color mode." src="ui/src/assets/flask.svg">
</picture>
Catalyst</h1>
<h3 align="center">Speed up your reactions</h3>
<h4 align="center">
<a href="https://catalyst-soar.com">Website</a>
<a href="https://catalyst.security-brewery.com/">Website</a>
-
<a href="https://catalyst-soar.com/docs/category/catalyst-handbook">The Catalyst Handbook (Documentation)</a>
<a href="https://catalyst.security-brewery.com/docs/category/catalyst-handbook">The Catalyst Handbook (Documentation)</a>
-
<a href="https://try.catalyst-soar.com">Try online</a>
<a href="https://try.catalyst.security-brewery.com/">Demo</a>
</h4>
Catalyst is an incident response platform or SOAR (Security Orchestration, Automation and Response) system. It can help
you to automate your alert handling and incident response procedures.
Catalyst is an incident response platform.
It can help to automate your alert handling and incident response procedures.
## Features
### Ticket (Alert & Incident) Management
![Screenshot of a ticket](docs/screenshots/ticket.png)
Tickets are the core of Catalyst. They represent alerts, incidents, forensics
investigations, threat hunts or any other event you want to handle in your
organisation.
### Ticket Templates
Tickets are the core of Catalyst.
They represent alerts, incidents, forensics investigations,
threat hunts or any other event you want to handle in your organisation.
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/details.png" />
<a href="docs/screenshots/ticket.png">
<img alt="Screenshot of a ticket" src="docs/screenshots/ticket.png" />
</a>
</center>
Templates define the custom information for tickets. The core information for
tickets like title, creation date or closing status is kept quite minimal and other
information like criticality, description or MITRE ATT&CK information can be
added individually.
### Ticket Types
### Conditional Custom Fields
Templates define the custom information for tickets.
The core information for tickets like title, creation date or closing status is kept quite minimal
and other information like criticality, description or MITRE ATT&CK information can be added individually.
### Timelines
Timelines are used to document the progress of an investigation.
They can be used to document the steps taken during an investigation, the findings or the results of the investigation.
### Tasks
Tasks are the smallest unit of work in Catalyst. They can be assigned to users and have a status.
Tasks can be used to document the progress of an investigation or to assign work to different users.
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/conditional_custom_field_a.png" />
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/conditional_custom_field_b.png" />
<a href="docs/screenshots/tasks.png">
<img alt="Screenshot of the tasks part of a ticket" src="docs/screenshots/tasks.png" />
</a>
</center>
Custom Fields can be dependent on each other. So if you, for example choose
"malware" as an incident type a custom field ask you to define it further as
ransomware, worm, etc. which a "phishing" incident would ask for the number
of received mails in that campaign.
### Custom Fields
### Playbooks
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/phishing_playbook.png" />
</center>
Playbooks represent processes that can be attached to tickets. Playbooks can
contain manual and automated tasks. Complex workflows with different workflow
branches, parallel tasks and task dependencies can be modeled.
### Automations
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/script.png" />
</center>
Automations are scripts that automate tasks or enrich artifacts. Automations are
run in their own Docker containers. This enables them to be created in different
scripting languages and run securely in their own environment.
Custom fields can be added to tickets to store additional information.
They can be used to store information like the affected system, the attacker's IP address or the type of malware.
Custom fields can be added to ticket types and are then available for all tickets of this type.
### Dashboards
Catalyst comes with a dashboard that presents the most important information at a glance.
<center>
<img alt="Screenshot of the dashboard editor" src="docs/screenshots/dashboard.png" />
<a href="docs/screenshots/dashboard.png">
<img alt="Screenshot of the dashboard" src="docs/screenshots/dashboard.png" />
</a>
</center>
Catalyst comes with a dashboard editor that allows you to create custom dashboards
for your organisation. Dashboards can be created with line, bar, and pie charts.
### Webhooks
Catalyst can send webhooks to other systems.
This can be used to trigger actions in other systems and create automated workflows.
### Users
Catalyst has two different types of users, normal users accessing the platform
via OIDC authentication and API keys for external script. A
fine-grained access model is available for both types and allows to define
possible actions for each user.
Catalyst supports authentication via username and password
or via OAuth2 with an external identity provider like Google, GitHub or GitLab.
## License
### More
Copyright (c) 2021-present Jonas Plum
Portions of this software are licensed as follows:
* All third party components incorporated into Catalyst are licensed under the
original license provided by the owner of the applicable component. Those
files contain a license notice on top of the file and are listed in the
[NOTICE](NOTICE) file.
* Content outside the above-mentioned files above is
available under the [GNU Affero General Public License v3.0](LICENSE).
Catalyst supports a lot more features like: Links, Files, or Comments on tickets.

View File

@@ -1 +0,0 @@
0.0.0-dev

108
auth.go
View File

@@ -1,108 +0,0 @@
package catalyst
import (
"context"
"crypto/sha256"
"errors"
"fmt"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type catalystResolver struct {
database *database.Database
}
func newCatalystResolver(db *database.Database) *catalystResolver {
return &catalystResolver{
database: db,
}
}
func (c *catalystResolver) UserCreateIfNotExists(ctx context.Context, user *maut.User, password string) (err error) {
if user != nil {
if _, err := c.database.UserGet(ctx, user.ID); err == nil {
return nil
}
}
if user == nil || user.APIKey {
// ignore error, because we don't care if the user already exists
_, _ = c.database.UserCreateSetupAPIKey(ctx, password)
} else {
_, err = c.database.UserCreate(ctx, &model.UserForm{
Apikey: user.APIKey,
Blocked: user.Blocked,
ID: user.ID,
Roles: user.Roles,
})
if err != nil {
return err
}
err = c.database.UserDataCreate(ctx, user.ID, &model.UserData{
Email: user.Email,
Image: nil,
Name: user.Name,
Timeformat: nil,
})
}
return err
}
func (c *catalystResolver) User(ctx context.Context, userID string) (*maut.User, error) {
user, err := c.database.UserGet(ctx, userID)
if err != nil {
return nil, err
}
return mapMautUser(user), nil
}
func (c *catalystResolver) UserAPIKeyByHash(ctx context.Context, key string) (*maut.User, error) {
sha256Hash := fmt.Sprintf("%x", sha256.Sum256([]byte(key)))
user, err := c.database.UserAPIKeyByHash(ctx, sha256Hash)
if err != nil {
return nil, err
}
return mapMautUser(user), nil
}
func (c *catalystResolver) UserByIDAndPassword(ctx context.Context, username string, password string) (*maut.User, error) {
user, err := c.database.UserByIDAndPassword(ctx, username, password)
if err != nil {
return nil, err
}
return mapMautUser(user), nil
}
func (c *catalystResolver) Role(_ context.Context, roleID string) (r *maut.Role, err error) {
switch roleID {
case "admin":
return Admin, nil
case "engineer":
return engineer, nil
case "analyst":
return analyst, nil
}
return nil, errors.New("role not found")
}
func mapMautUser(user *model.UserResponse) *maut.User {
return &maut.User{
ID: user.ID,
APIKey: user.Apikey,
Blocked: user.Blocked,
// Email: user.Email, // TODO
// Groups: user.Groups, // TODO
// Name: user.Name, // TODO
Roles: user.Roles,
}
}

50
bootstrapcmd.go Normal file
View File

@@ -0,0 +1,50 @@
package main
import (
"log"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase"
"github.com/pocketbase/pocketbase/migrations"
"github.com/pocketbase/pocketbase/migrations/logs"
"github.com/pocketbase/pocketbase/tools/migrate"
"github.com/spf13/cobra"
)
func bootstrapCmd(app *pocketbase.PocketBase) *cobra.Command {
return &cobra.Command{
Use: "bootstrap",
Run: func(_ *cobra.Command, _ []string) {
if err := app.Bootstrap(); err != nil {
log.Fatal(err)
}
if err := migrateDBs(app); err != nil {
log.Fatal(err)
}
},
}
}
type migration struct {
db *dbx.DB
migrations migrate.MigrationsList
}
func migrateDBs(app *pocketbase.PocketBase) error {
for _, m := range []migration{
{db: app.DB(), migrations: migrations.AppMigrations},
{db: app.LogsDB(), migrations: logs.LogsMigrations},
} {
runner, err := migrate.NewRunner(m.db, m.migrations)
if err != nil {
return err
}
if _, err := runner.Up(); err != nil {
return err
}
}
return nil
}

View File

@@ -1,69 +0,0 @@
package bus
import (
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type ResultMsg struct {
Automation string `json:"automation"`
Data map[string]any `json:"data,omitempty"`
Target *model.Origin `json:"target"`
}
type RequestMsg struct {
IDs []driver.DocumentID `json:"ids"`
Function string `json:"function"`
User string `json:"user"`
}
type JobMsg struct {
ID string `json:"id"`
Automation string `json:"automation"`
Origin *model.Origin `json:"origin"`
Message *model.Message `json:"message"`
}
type DatabaseUpdateType string
const (
DatabaseEntryRead DatabaseUpdateType = "read"
DatabaseEntryCreated DatabaseUpdateType = "created"
DatabaseEntryUpdated DatabaseUpdateType = "updated"
)
type DatabaseUpdateMsg struct {
IDs []driver.DocumentID `json:"ids"`
Type DatabaseUpdateType `json:"type"`
}
type Bus struct {
ResultChannel *Channel[*ResultMsg]
RequestChannel *Channel[*RequestMsg]
JobChannel *Channel[*JobMsg]
DatabaseChannel *Channel[*DatabaseUpdateMsg]
}
func New() *Bus {
return &Bus{
ResultChannel: &Channel[*ResultMsg]{},
RequestChannel: &Channel[*RequestMsg]{},
JobChannel: &Channel[*JobMsg]{},
DatabaseChannel: &Channel[*DatabaseUpdateMsg]{},
}
}
type Channel[T any] struct {
Subscriber []func(T)
}
func (c *Channel[T]) Publish(msg T) {
for _, s := range c.Subscriber {
go s(msg)
}
}
func (c *Channel[T]) Subscribe(handler func(T)) {
c.Subscriber = append(c.Subscriber, handler)
}

View File

@@ -1,53 +0,0 @@
package busservice
import (
"context"
"log"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
type busService struct {
db *database.Database
apiURL string
apiKey string
catalystBus *bus.Bus
network string
}
func New(apiURL, apikey, network string, catalystBus *bus.Bus, db *database.Database) {
h := &busService{db: db, apiURL: apiURL, apiKey: apikey, network: network, catalystBus: catalystBus}
catalystBus.RequestChannel.Subscribe(h.logRequest)
catalystBus.ResultChannel.Subscribe(h.handleResult)
catalystBus.JobChannel.Subscribe(h.handleJob)
}
func busContext() context.Context {
// TODO: change roles?
bot := &maut.User{ID: "bot", Roles: []string{maut.AdminRole}}
return maut.UserContext(context.Background(), bot, nil) // TODO add permissions ?
}
func (h *busService) logRequest(msg *bus.RequestMsg) {
var logEntries []*model.LogEntry
for _, i := range msg.IDs {
logEntries = append(logEntries, &model.LogEntry{
Type: "request",
Reference: i.String(),
Creator: msg.User,
Message: msg.Function,
Created: time.Now().UTC(),
})
}
if err := h.db.LogBatchCreate(busContext(), logEntries); err != nil {
log.Println(err)
}
}

View File

@@ -1,203 +0,0 @@
package busservice
import (
"archive/tar"
"bufio"
"bytes"
"context"
"fmt"
"io"
"log"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/client"
"github.com/docker/docker/pkg/stdcopy"
"github.com/SecurityBrewery/catalyst/database"
)
func createContainer(ctx context.Context, image, script, data, network string) (string, string, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return "", "", err
}
logs, err := pullImage(ctx, cli, image)
if err != nil {
return "", logs, err
}
config := &container.Config{
Image: image,
Cmd: []string{"/script", data},
WorkingDir: "/home",
AttachStderr: true,
AttachStdout: true,
}
hostConfig := &container.HostConfig{
NetworkMode: container.NetworkMode(network),
}
resp, err := cli.ContainerCreate(ctx, config, hostConfig, nil, "")
if err != nil {
return "", logs, err
}
if err := copyFile(ctx, cli, "/script", script, resp.ID); err != nil {
return "", logs, err
}
return resp.ID, logs, nil
}
func pullImage(ctx context.Context, cli *client.Client, image string) (string, error) {
reader, err := cli.ImagePull(ctx, image, types.ImagePullOptions{})
if err != nil {
return "", err
}
defer reader.Close()
buf := &bytes.Buffer{}
_, err = io.Copy(buf, reader)
return buf.String(), err
}
func copyFile(ctx context.Context, cli *client.Client, path string, contentString string, id string) error {
tarBuf := &bytes.Buffer{}
tw := tar.NewWriter(tarBuf)
header := &tar.Header{Name: path, Mode: 0o755, Size: int64(len(contentString))}
if err := tw.WriteHeader(header); err != nil {
return err
}
if _, err := tw.Write([]byte(contentString)); err != nil {
return err
}
if err := tw.Close(); err != nil {
return err
}
return cli.CopyToContainer(ctx, id, "/", tarBuf, types.CopyToContainerOptions{})
}
func runDocker(ctx context.Context, jobID, containerID string, db *database.Database) (stdout []byte, stderr []byte, err error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return nil, nil, err
}
defer func(cli *client.Client, ctx context.Context, containerID string, options types.ContainerRemoveOptions) {
err := cli.ContainerRemove(ctx, containerID, options)
if err != nil {
log.Println(err)
}
}(cli, ctx, containerID, types.ContainerRemoveOptions{Force: true})
if err := cli.ContainerStart(ctx, containerID, types.ContainerStartOptions{}); err != nil {
return nil, nil, err
}
stderrBuf, err := streamStdErr(ctx, cli, jobID, containerID, db)
if err != nil {
return nil, nil, err
}
if err := waitForContainer(ctx, cli, containerID, stderrBuf); err != nil {
return nil, nil, err
}
output, err := getStdOut(ctx, cli, containerID)
if err != nil {
log.Println(err)
}
return output.Bytes(), stderrBuf.Bytes(), nil
}
func streamStdErr(ctx context.Context, cli *client.Client, jobID, containerID string, db *database.Database) (*bytes.Buffer, error) {
stderrBuf := &bytes.Buffer{}
containerLogs, err := cli.ContainerLogs(ctx, containerID, types.ContainerLogsOptions{ShowStderr: true, Follow: true})
if err != nil {
return nil, err
}
go func() {
err := scanLines(ctx, jobID, containerLogs, stderrBuf, db)
if err != nil {
log.Println(err)
return
}
if err := containerLogs.Close(); err != nil {
log.Println(err)
return
}
}()
return stderrBuf, nil
}
func scanLines(ctx context.Context, jobID string, input io.ReadCloser, output io.Writer, db *database.Database) error {
r, w := io.Pipe()
go func() {
_, err := stdcopy.StdCopy(w, w, input)
if err != nil {
log.Println(err)
return
}
if err := w.Close(); err != nil {
log.Println(err)
return
}
}()
s := bufio.NewScanner(r)
for s.Scan() {
b := s.Bytes()
_, _ = output.Write(b)
_, _ = output.Write([]byte("\n"))
if err := db.JobLogAppend(ctx, jobID, string(b)+"\n"); err != nil {
log.Println(err)
continue
}
}
return s.Err()
}
func waitForContainer(ctx context.Context, cli *client.Client, containerID string, stderrBuf *bytes.Buffer) error {
statusCh, errCh := cli.ContainerWait(ctx, containerID, container.WaitConditionNotRunning)
select {
case err := <-errCh:
if err != nil {
return err
}
case exitStatus := <-statusCh:
if exitStatus.StatusCode != 0 {
return fmt.Errorf("container returned status code %d: stderr: %s", exitStatus.StatusCode, stderrBuf.String())
}
}
return nil
}
func getStdOut(ctx context.Context, cli *client.Client, containerID string) (*bytes.Buffer, error) {
output := &bytes.Buffer{}
containerLogs, err := cli.ContainerLogs(ctx, containerID, types.ContainerLogsOptions{ShowStdout: true, Follow: true})
if err != nil {
return nil, err
}
defer containerLogs.Close()
_, err = stdcopy.StdCopy(output, output, containerLogs)
if err != nil {
return nil, err
}
return output, nil
}

View File

@@ -1,120 +0,0 @@
package busservice
import (
"encoding/json"
"fmt"
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (h *busService) handleJob(automationMsg *bus.JobMsg) {
ctx := busContext()
job, err := h.db.JobCreate(ctx, automationMsg.ID, &model.JobForm{
Automation: automationMsg.Automation,
Payload: automationMsg.Message.Payload,
Origin: automationMsg.Origin,
})
if err != nil {
log.Println(err)
return
}
automation, err := h.db.AutomationGet(ctx, automationMsg.Automation)
if err != nil {
log.Println(err)
return
}
if automation.Script == "" {
log.Println("automation is empty")
return
}
if automationMsg.Message.Secrets == nil {
automationMsg.Message.Secrets = map[string]string{}
}
automationMsg.Message.Secrets["catalyst_apikey"] = h.apiKey
automationMsg.Message.Secrets["catalyst_apiurl"] = h.apiURL
scriptMessage, err := json.Marshal(automationMsg.Message)
if err != nil {
log.Println(err)
return
}
containerID, logs, err := createContainer(ctx, automation.Image, automation.Script, string(scriptMessage), h.network)
if err != nil {
log.Println(err)
return
}
if _, err := h.db.JobUpdate(ctx, automationMsg.ID, &model.JobUpdate{
Container: &containerID,
Running: true,
Output: job.Output,
Log: &logs,
Status: job.Status,
}); err != nil {
log.Println(err)
return
}
var result map[string]any
stdout, _, err := runDocker(ctx, automationMsg.ID, containerID, h.db)
if err != nil {
result = map[string]any{"error": fmt.Sprintf("error running script %s %s", err, string(stdout))}
} else {
var data map[string]any
if err := json.Unmarshal(stdout, &data); err != nil {
result = map[string]any{"error": string(stdout)}
} else {
result = data
}
}
h.catalystBus.ResultChannel.Publish(&bus.ResultMsg{Automation: automationMsg.Automation, Data: result, Target: automationMsg.Origin})
if err := h.db.JobComplete(ctx, automationMsg.ID, result); err != nil {
log.Println(err)
return
}
}
/*
func getAutomation(automationID string, config *Config) (*model.AutomationResponse, error) {
req, err := http.NewRequest(http.MethodGet, config.CatalystAPIUrl+"/automations/"+automationID, nil)
if err != nil {
return nil, err
}
req.Header.Set("PRIVATE-TOKEN", config.CatalystAPIKey)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
b, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var automation model.AutomationResponse
if err := json.Unmarshal(b, &automation); err != nil {
return nil, err
}
return &automation, nil
}
*/

View File

@@ -1,35 +0,0 @@
package busservice
import (
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (h *busService) handleResult(resultMsg *bus.ResultMsg) {
if resultMsg.Target != nil {
ctx := busContext()
switch {
case resultMsg.Target.TaskOrigin != nil:
if _, err := h.db.TaskComplete(
ctx,
resultMsg.Target.TaskOrigin.TicketId,
resultMsg.Target.TaskOrigin.PlaybookId,
resultMsg.Target.TaskOrigin.TaskId,
resultMsg.Data,
); err != nil {
log.Println(err)
}
case resultMsg.Target.ArtifactOrigin != nil:
enrichment := &model.EnrichmentForm{
Data: resultMsg.Data,
Name: resultMsg.Automation,
}
_, err := h.db.EnrichArtifact(ctx, resultMsg.Target.ArtifactOrigin.TicketId, resultMsg.Target.ArtifactOrigin.Artifact, enrichment)
if err != nil {
log.Println(err)
}
}
}
}

View File

@@ -1,155 +0,0 @@
package caql
import (
"errors"
"fmt"
"strconv"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
var ErrTooComplex = errors.New("unsupported features for index queries, use advanced search instead")
type bleveBuilder struct {
*parser.BaseCAQLParserListener
stack []string
err error
}
// push is a helper function for pushing new node to the listener Stack.
func (s *bleveBuilder) push(i string) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *bleveBuilder) pop() (n string) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
panic(ErrStack)
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *bleveBuilder) binaryPop() (any, any) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *bleveBuilder) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
// pass
case ctx.Reference() != nil:
// pass
case ctx.Operator_unary() != nil:
s.err = ErrTooComplex
case ctx.T_PLUS() != nil:
fallthrough
case ctx.T_MINUS() != nil:
fallthrough
case ctx.T_TIMES() != nil:
fallthrough
case ctx.T_DIV() != nil:
fallthrough
case ctx.T_MOD() != nil:
s.err = ErrTooComplex
case ctx.T_RANGE() != nil:
s.err = ErrTooComplex
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:<%s", left, right))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:>%s", left, right))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:<=%s", left, right))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:>=%s", left, right))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.err = ErrTooComplex
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:%s", left, right))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("-%s:%s", left, right))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
s.err = ErrTooComplex
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
s.err = ErrTooComplex
case ctx.T_LIKE() != nil:
s.err = errors.New("index queries are like queries by default")
case ctx.T_REGEX_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.err = ErrTooComplex
} else {
s.push(fmt.Sprintf("%s:/%s/", left, right))
}
case ctx.T_REGEX_NON_MATCH() != nil:
s.err = errors.New("index query cannot contain regex non matches, use advanced search instead")
case ctx.T_AND() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s %s", left, right))
case ctx.T_OR() != nil:
s.err = errors.New("index query cannot contain OR, use advanced search instead")
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
default:
panic("unknown expression")
}
}
// ExitReference is called when production reference is exited.
func (s *bleveBuilder) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
s.push(fmt.Sprintf("%s.%s", reference, ctx.T_STRING().GetText()))
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.Compound_value() != nil:
s.err = ErrTooComplex
case ctx.Function_call() != nil:
s.err = ErrTooComplex
case ctx.T_OPEN() != nil:
s.err = ErrTooComplex
case ctx.T_ARRAY_OPEN() != nil:
s.err = ErrTooComplex
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitValue_literal is called when production value_literal is exited.
func (s *bleveBuilder) ExitValue_literal(ctx *parser.Value_literalContext) {
if ctx.T_QUOTED_STRING() != nil {
st, err := unquote(ctx.GetText())
if err != nil {
panic(err)
}
s.push(strconv.Quote(st))
} else {
s.push(ctx.GetText())
}
}

View File

@@ -1,60 +0,0 @@
package caql_test
import (
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
func TestBleveBuilder(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
wantBleve string
wantParseErr bool
wantRebuildErr bool
}{
{name: "Search 1", saql: `"Bob"`, wantBleve: `"Bob"`},
{name: "Search 2", saql: `"Bob" AND title == 'Name'`, wantBleve: `"Bob" title:"Name"`},
{name: "Search 3", saql: `"Bob" OR title == 'Name'`, wantRebuildErr: true},
{name: "Search 4", saql: `title == 'malware' AND 'wannacry'`, wantBleve: `title:"malware" "wannacry"`},
}
for _, tt := range tests {
tt := tt
parser := &caql.Parser{}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.BleveString()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantBleve {
t.Errorf("String() got = %v, want %v", got, tt.wantBleve)
}
})
}
}

View File

@@ -1,322 +0,0 @@
package caql
import (
"fmt"
"strconv"
"strings"
"golang.org/x/exp/slices"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type Searcher interface {
Search(term string) (ids []string, err error)
}
type aqlBuilder struct {
*parser.BaseCAQLParserListener
searcher Searcher
stack []string
prefix string
}
// push is a helper function for pushing new node to the listener Stack.
func (s *aqlBuilder) push(i string) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *aqlBuilder) pop() (n string) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
panic(ErrStack)
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *aqlBuilder) binaryPop() (string, string) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *aqlBuilder) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
if ctx.GetParent().GetParent() == nil {
s.push(s.toBoolString(s.pop()))
}
case ctx.Reference() != nil:
ref := s.pop()
if ref == "d.id" {
s.push("d._key")
} else {
s.push(ref)
}
// pass
case ctx.Operator_unary() != nil:
s.push(s.toBoolString(s.pop()))
case ctx.T_PLUS() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s + %s", left, right))
case ctx.T_MINUS() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s - %s", left, right))
case ctx.T_TIMES() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s * %s", left, right))
case ctx.T_DIV() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s / %s", left, right))
case ctx.T_MOD() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s %% %s", left, right))
case ctx.T_RANGE() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s..%s", left, right))
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s < %s", left, right))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s > %s", left, right))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s <= %s", left, right))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s >= %s", left, right))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT IN %s", left, right))
} else {
s.push(fmt.Sprintf("%s IN %s", left, right))
}
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s == %s", left, right))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s != %s", left, right))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ALL %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ANY %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s NONE %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ALL IN %s", left, right))
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ANY IN %s", left, right))
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s NONE IN %s", left, right))
case ctx.T_LIKE() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT LIKE %s", left, right))
} else {
s.push(fmt.Sprintf("%s LIKE %s", left, right))
}
case ctx.T_REGEX_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT =~ %s", left, right))
} else {
s.push(fmt.Sprintf("%s =~ %s", left, right))
}
case ctx.T_REGEX_NON_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT !~ %s", left, right))
} else {
s.push(fmt.Sprintf("%s !~ %s", left, right))
}
case ctx.T_AND() != nil:
left, right := s.binaryPop()
left = s.toBoolString(left)
right = s.toBoolString(right)
s.push(fmt.Sprintf("%s AND %s", left, right))
case ctx.T_OR() != nil:
left, right := s.binaryPop()
left = s.toBoolString(left)
right = s.toBoolString(right)
s.push(fmt.Sprintf("%s OR %s", left, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
right, middle, left := s.pop(), s.pop(), s.pop()
s.push(fmt.Sprintf("%s ? %s : %s", left, middle, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ? : %s", left, right))
default:
panic("unknown expression")
}
}
func (s *aqlBuilder) toBoolString(v string) string {
_, err := unquote(v)
if err == nil {
ids, err := s.searcher.Search(v)
if err != nil {
panic("invalid search " + err.Error())
}
return fmt.Sprintf(`d._key IN ["%s"]`, strings.Join(ids, `","`))
}
return v
}
// ExitOperator_unary is called when production operator_unary is exited.
func (s *aqlBuilder) ExitOperator_unary(ctx *parser.Operator_unaryContext) {
value := s.pop()
switch {
case ctx.T_PLUS() != nil:
s.push(value)
case ctx.T_MINUS() != nil:
s.push(fmt.Sprintf("-%s", value))
case ctx.T_NOT() != nil:
s.push(fmt.Sprintf("NOT %s", value))
default:
panic(fmt.Sprintf("unexpected operation: %s", ctx.GetText()))
}
}
// ExitReference is called when production reference is exited.
func (s *aqlBuilder) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
if s.prefix != "" && !strings.HasPrefix(reference, s.prefix) {
reference = s.prefix + reference
}
s.push(fmt.Sprintf("%s.%s", reference, ctx.T_STRING().GetText()))
case ctx.T_STRING() != nil:
reference := ctx.T_STRING().GetText()
if s.prefix != "" && !strings.HasPrefix(reference, s.prefix) {
reference = s.prefix + reference
}
s.push(reference)
case ctx.Compound_value() != nil:
// pass
case ctx.Function_call() != nil:
// pass
case ctx.T_OPEN() != nil:
s.push(fmt.Sprintf("(%s)", s.pop()))
case ctx.T_ARRAY_OPEN() != nil:
key := s.pop()
reference := s.pop()
s.push(fmt.Sprintf("%s[%s]", reference, key))
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitCompound_value is called when production compound_value is exited.
func (s *aqlBuilder) ExitCompound_value(_ *parser.Compound_valueContext) {
// pass
}
// ExitFunction_call is called when production function_call is exited.
func (s *aqlBuilder) ExitFunction_call(ctx *parser.Function_callContext) {
var array []string
for range ctx.AllExpression() {
// prepend element
array = append([]string{s.pop()}, array...)
}
parameter := strings.Join(array, ", ")
if !slices.Contains(functionNames, strings.ToUpper(ctx.T_STRING().GetText())) {
panic("unknown function")
}
s.push(fmt.Sprintf("%s(%s)", strings.ToUpper(ctx.T_STRING().GetText()), parameter))
}
// ExitValue_literal is called when production value_literal is exited.
func (s *aqlBuilder) ExitValue_literal(ctx *parser.Value_literalContext) {
if ctx.T_QUOTED_STRING() != nil {
st, err := unquote(ctx.GetText())
if err != nil {
panic(err)
}
s.push(strconv.Quote(st))
} else {
s.push(ctx.GetText())
}
}
// ExitArray is called when production array is exited.
func (s *aqlBuilder) ExitArray(ctx *parser.ArrayContext) {
var elements []string
for range ctx.AllExpression() {
// elements = append(elements, s.pop())
elements = append([]string{s.pop()}, elements...)
}
s.push("[" + strings.Join(elements, ", ") + "]")
}
// ExitObject is called when production object is exited.
func (s *aqlBuilder) ExitObject(ctx *parser.ObjectContext) {
var elements []string
for range ctx.AllObject_element() {
key, value := s.pop(), s.pop()
elements = append([]string{fmt.Sprintf("%s: %v", key, value)}, elements...)
}
// s.push(object)
s.push("{" + strings.Join(elements, ", ") + "}")
}
// ExitObject_element is called when production object_element is exited.
func (s *aqlBuilder) ExitObject_element(ctx *parser.Object_elementContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.GetText())
s.push(ctx.GetText())
case ctx.Object_element_name() != nil, ctx.T_ARRAY_OPEN() != nil:
key, value := s.pop(), s.pop()
s.push(key)
s.push(value)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitObject_element_name is called when production object_element_name is exited.
func (s *aqlBuilder) ExitObject_element_name(ctx *parser.Object_element_nameContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.T_QUOTED_STRING() != nil:
s.push(ctx.T_QUOTED_STRING().GetText())
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}

View File

@@ -1,8 +0,0 @@
package caql
import "errors"
var (
ErrStack = errors.New("unexpected operator stack")
ErrUndefined = errors.New("variable not defined")
)

View File

@@ -1,742 +0,0 @@
package caql
import (
"errors"
"fmt"
"math"
"math/rand"
"sort"
"strings"
"unicode/utf8"
"github.com/imdario/mergo"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
switch strings.ToUpper(ctx.T_STRING().GetText()) {
default:
s.appendErrors(errors.New("unknown function"))
// Array https://www.arangodb.com/docs/stable/aql/functions-array.html
case "APPEND":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
seen := map[any]bool{}
values, anyArray := s.pop().([]any), s.pop().([]any)
if u {
for _, e := range anyArray {
seen[e] = true
}
}
for _, e := range values {
_, ok := seen[e]
if !ok || !u {
seen[e] = true
anyArray = append(anyArray, e)
}
}
s.push(anyArray)
case "COUNT_DISTINCT", "COUNT_UNIQUE":
count := 0
seen := map[any]bool{}
array := s.pop().([]any)
for _, e := range array {
_, ok := seen[e]
if !ok {
seen[e] = true
count++
}
}
s.push(float64(count))
case "FIRST":
array := s.pop().([]any)
if len(array) == 0 {
s.push(nil)
} else {
s.push(array[0])
}
// case "FLATTEN":
// case "INTERLEAVE":
case "INTERSECTION":
iset := NewSet(s.pop().([]any)...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Intersection(NewSet(s.pop().([]any)...))
}
s.push(iset.Values())
// case "JACCARD":
case "LAST":
array := s.pop().([]any)
if len(array) == 0 {
s.push(nil)
} else {
s.push(array[len(array)-1])
}
case "COUNT", "LENGTH":
switch v := s.pop().(type) {
case nil:
s.push(float64(0))
case bool:
if v {
s.push(float64(1))
} else {
s.push(float64(0))
}
case float64:
s.push(float64(len(fmt.Sprint(v))))
case string:
s.push(float64(utf8.RuneCountInString(v)))
case []any:
s.push(float64(len(v)))
case map[string]any:
s.push(float64(len(v)))
default:
panic("unknown type")
}
case "MINUS":
var sets []*Set
for i := 0; i < len(ctx.AllExpression()); i++ {
sets = append(sets, NewSet(s.pop().([]any)...))
}
iset := sets[len(sets)-1]
// for i := len(sets)-1; i > 0; i-- {
for i := 0; i < len(sets)-1; i++ {
iset = iset.Minus(sets[i])
}
s.push(iset.Values())
case "NTH":
pos := s.pop().(float64)
array := s.pop().([]any)
if int(pos) >= len(array) || pos < 0 {
s.push(nil)
} else {
s.push(array[int64(pos)])
}
// case "OUTERSECTION":
// array := s.pop().([]interface{})
// union := NewSet(array...)
// intersection := NewSet(s.pop().([]interface{})...)
// for i := 1; i < len(ctx.AllExpression()); i++ {
// array = s.pop().([]interface{})
// union = union.Union(NewSet(array...))
// intersection = intersection.Intersection(NewSet(array...))
// }
// s.push(union.Minus(intersection).Values())
case "POP":
array := s.pop().([]any)
s.push(array[:len(array)-1])
case "POSITION", "CONTAINS_ARRAY":
returnIndex := false
if len(ctx.AllExpression()) == 3 {
returnIndex = s.pop().(bool)
}
search := s.pop()
array := s.pop().([]any)
for idx, e := range array {
if e == search {
if returnIndex {
s.push(float64(idx))
} else {
s.push(true)
}
}
}
if returnIndex {
s.push(float64(-1))
} else {
s.push(false)
}
case "PUSH":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]any)
if u && contains(array, element) {
s.push(array)
} else {
s.push(append(array, element))
}
case "REMOVE_NTH":
position := s.pop().(float64)
anyArray := s.pop().([]any)
if position < 0 {
position = float64(len(anyArray) + int(position))
}
result := []any{}
for idx, e := range anyArray {
if idx != int(position) {
result = append(result, e)
}
}
s.push(result)
case "REPLACE_NTH":
defaultPaddingValue := ""
if len(ctx.AllExpression()) == 4 {
defaultPaddingValue = s.pop().(string)
}
replaceValue := s.pop().(string)
position := s.pop().(float64)
anyArray := s.pop().([]any)
if position < 0 {
position = float64(len(anyArray) + int(position))
if position < 0 {
position = 0
}
}
switch {
case int(position) < len(anyArray):
anyArray[int(position)] = replaceValue
case int(position) == len(anyArray):
anyArray = append(anyArray, replaceValue)
default:
if defaultPaddingValue == "" {
panic("missing defaultPaddingValue")
}
for len(anyArray) < int(position) {
anyArray = append(anyArray, defaultPaddingValue)
}
anyArray = append(anyArray, replaceValue)
}
s.push(anyArray)
case "REMOVE_VALUE":
limit := math.Inf(1)
if len(ctx.AllExpression()) == 3 {
limit = s.pop().(float64)
}
value := s.pop()
array := s.pop().([]any)
result := []any{}
for idx, e := range array {
if e != value || float64(idx) > limit {
result = append(result, e)
}
}
s.push(result)
case "REMOVE_VALUES":
values := s.pop().([]any)
array := s.pop().([]any)
result := []any{}
for _, e := range array {
if !contains(values, e) {
result = append(result, e)
}
}
s.push(result)
case "REVERSE":
array := s.pop().([]any)
var reverse []any
for _, e := range array {
reverse = append([]any{e}, reverse...)
}
s.push(reverse)
case "SHIFT":
s.push(s.pop().([]any)[1:])
case "SLICE":
length := float64(-1)
full := true
if len(ctx.AllExpression()) == 3 {
length = s.pop().(float64)
full = false
}
start := int64(s.pop().(float64))
array := s.pop().([]any)
if start < 0 {
start = int64(len(array)) + start
}
if full {
length = float64(int64(len(array)) - start)
}
end := int64(0)
if length < 0 {
end = int64(len(array)) + int64(length)
} else {
end = start + int64(length)
}
s.push(array[start:end])
case "SORTED":
array := s.pop().([]any)
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "SORTED_UNIQUE":
array := s.pop().([]any)
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(unique(array))
case "UNION":
array := s.pop().([]any)
for i := 1; i < len(ctx.AllExpression()); i++ {
array = append(array, s.pop().([]any)...)
}
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "UNION_DISTINCT":
iset := NewSet(s.pop().([]any)...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Union(NewSet(s.pop().([]any)...))
}
s.push(unique(iset.Values()))
case "UNIQUE":
s.push(unique(s.pop().([]any)))
case "UNSHIFT":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]any)
if u && contains(array, element) {
s.push(array)
} else {
s.push(append([]any{element}, array...))
}
// Bit https://www.arangodb.com/docs/stable/aql/functions-bit.html
// case "BIT_AND":
// case "BIT_CONSTRUCT":
// case "BIT_DECONSTRUCT":
// case "BIT_FROM_STRING":
// case "BIT_NEGATE":
// case "BIT_OR":
// case "BIT_POPCOUNT":
// case "BIT_SHIFT_LEFT":
// case "BIT_SHIFT_RIGHT":
// case "BIT_TEST":
// case "BIT_TO_STRING":
// case "BIT_XOR":
// Date https://www.arangodb.com/docs/stable/aql/functions-date.html
// case "DATE_NOW":
// case "DATE_ISO8601":
// case "DATE_TIMESTAMP":
// case "IS_DATESTRING":
// case "DATE_DAYOFWEEK":
// case "DATE_YEAR":
// case "DATE_MONTH":
// case "DATE_DAY":
// case "DATE_HOUR":
// case "DATE_MINUTE":
// case "DATE_SECOND":
// case "DATE_MILLISECOND":
// case "DATE_DAYOFYEAR":
// case "DATE_ISOWEEK":
// case "DATE_LEAPYEAR":
// case "DATE_QUARTER":
// case "DATE_DAYS_IN_MONTH":
// case "DATE_TRUNC":
// case "DATE_ROUND":
// case "DATE_FORMAT":
// case "DATE_ADD":
// case "DATE_SUBTRACT":
// case "DATE_DIFF":
// case "DATE_COMPARE":
// Document https://www.arangodb.com/docs/stable/aql/functions-document.html
case "ATTRIBUTES":
if len(ctx.AllExpression()) == 3 {
s.pop() // always sort
}
removeInternal := false
if len(ctx.AllExpression()) >= 2 {
removeInternal = s.pop().(bool)
}
var keys []any
for k := range s.pop().(map[string]any) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
keys = append(keys, k)
}
}
sort.Slice(keys, func(i, j int) bool { return lt(keys[i], keys[j]) })
s.push(keys)
// case "COUNT":
case "HAS":
right, left := s.pop(), s.pop()
_, ok := left.(map[string]any)[right.(string)]
s.push(ok)
// case "KEEP":
// case "LENGTH":
// case "MATCHES":
case "MERGE":
var docs []map[string]any
if len(ctx.AllExpression()) == 1 {
for _, doc := range s.pop().([]any) {
docs = append([]map[string]any{doc.(map[string]any)}, docs...)
}
} else {
for i := 0; i < len(ctx.AllExpression()); i++ {
docs = append(docs, s.pop().(map[string]any))
}
}
doc := docs[len(docs)-1]
for i := len(docs) - 2; i >= 0; i-- {
for k, v := range docs[i] {
doc[k] = v
}
}
s.push(doc)
case "MERGE_RECURSIVE":
var doc map[string]any
for i := 0; i < len(ctx.AllExpression()); i++ {
err := mergo.Merge(&doc, s.pop().(map[string]any))
if err != nil {
panic(err)
}
}
s.push(doc)
// case "PARSE_IDENTIFIER":
// case "TRANSLATE":
// case "UNSET":
// case "UNSET_RECURSIVE":
case "VALUES":
removeInternal := false
if len(ctx.AllExpression()) == 2 {
removeInternal = s.pop().(bool)
}
var values []any
for k, v := range s.pop().(map[string]any) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
values = append(values, v)
}
}
sort.Slice(values, func(i, j int) bool { return lt(values[i], values[j]) })
s.push(values)
// case "ZIP":
// Numeric https://www.arangodb.com/docs/stable/aql/functions-numeric.html
case "ABS":
s.push(math.Abs(s.pop().(float64)))
case "ACOS":
v := s.pop().(float64)
asin := math.Acos(v)
if v > 1 || v < -1 {
s.push(nil)
} else {
s.push(asin)
}
case "ASIN":
v := s.pop().(float64)
asin := math.Asin(v)
if v > 1 || v < -1 {
s.push(nil)
} else {
s.push(asin)
}
case "ATAN":
s.push(math.Atan(s.pop().(float64)))
case "ATAN2":
s.push(math.Atan2(s.pop().(float64), s.pop().(float64)))
case "AVERAGE", "AVG":
count := 0
sum := float64(0)
array := s.pop().([]any)
for _, element := range array {
if element != nil {
count++
sum += toNumber(element)
}
}
if count == 0 {
s.push(nil)
} else {
s.push(sum / float64(count))
}
case "CEIL":
s.push(math.Ceil(s.pop().(float64)))
case "COS":
s.push(math.Cos(s.pop().(float64)))
case "DEGREES":
s.push(s.pop().(float64) * 180 / math.Pi)
case "EXP":
s.push(math.Exp(s.pop().(float64)))
case "EXP2":
s.push(math.Exp2(s.pop().(float64)))
case "FLOOR":
s.push(math.Floor(s.pop().(float64)))
case "LOG":
l := math.Log(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "LOG2":
l := math.Log2(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "LOG10":
l := math.Log10(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "MAX":
var set bool
var max float64
array := s.pop().([]any)
for _, element := range array {
if element != nil {
if !set || toNumber(element) > max {
max = toNumber(element)
set = true
}
}
}
if set {
s.push(max)
} else {
s.push(nil)
}
case "MEDIAN":
array := s.pop().([]any)
var numbers []float64
for _, element := range array {
if f, ok := element.(float64); ok {
numbers = append(numbers, f)
}
}
sort.Float64s(numbers) // sort the numbers
middlePos := len(numbers) / 2
switch {
case len(numbers) == 0:
s.push(nil)
case len(numbers)%2 == 1:
s.push(numbers[middlePos])
default:
s.push((numbers[middlePos-1] + numbers[middlePos]) / 2)
}
case "MIN":
var set bool
var min float64
array := s.pop().([]any)
for _, element := range array {
if element != nil {
if !set || toNumber(element) < min {
min = toNumber(element)
set = true
}
}
}
if set {
s.push(min)
} else {
s.push(nil)
}
// case "PERCENTILE":
case "PI":
s.push(math.Pi)
case "POW":
right, left := s.pop(), s.pop()
s.push(math.Pow(left.(float64), right.(float64)))
case "PRODUCT":
product := float64(1)
array := s.pop().([]any)
for _, element := range array {
if element != nil {
product *= toNumber(element)
}
}
s.push(product)
case "RADIANS":
s.push(s.pop().(float64) * math.Pi / 180)
case "RAND":
s.push(rand.Float64())
case "RANGE":
var array []any
var start, end, step float64
if len(ctx.AllExpression()) == 2 {
right, left := s.pop(), s.pop()
start = math.Trunc(left.(float64))
end = math.Trunc(right.(float64))
step = 1
} else {
middle, right, left := s.pop(), s.pop(), s.pop()
start = left.(float64)
end = right.(float64)
step = middle.(float64)
}
for i := start; i <= end; i += step {
array = append(array, i)
}
s.push(array)
case "ROUND":
x := s.pop().(float64)
t := math.Trunc(x)
if math.Abs(x-t) == 0.5 {
s.push(x + 0.5)
} else {
s.push(math.Round(x))
}
case "SIN":
s.push(math.Sin(s.pop().(float64)))
case "SQRT":
s.push(math.Sqrt(s.pop().(float64)))
// case "STDDEV_POPULATION":
// case "STDDEV_SAMPLE":
// case "STDDEV":
case "SUM":
sum := float64(0)
array := s.pop().([]any)
for _, element := range array {
sum += toNumber(element)
}
s.push(sum)
case "TAN":
s.push(math.Tan(s.pop().(float64)))
// case "VARIANCE_POPULATION", "VARIANCE":
// case "VARIANCE_SAMPLE":
// String https://www.arangodb.com/docs/stable/aql/functions-string.html
// case "CHAR_LENGTH":
// case "CONCAT":
// case "CONCAT_SEPARATOR":
// case "CONTAINS":
// case "CRC32":
// case "ENCODE_URI_COMPONENT":
// case "FIND_FIRST":
// case "FIND_LAST":
// case "FNV64":
// case "IPV4_FROM_NUMBER":
// case "IPV4_TO_NUMBER":
// case "IS_IPV4":
// case "JSON_PARSE":
// case "JSON_STRINGIFY":
// case "LEFT":
// case "LENGTH":
// case "LEVENSHTEIN_DISTANCE":
// case "LIKE":
case "LOWER":
s.push(strings.ToLower(s.pop().(string)))
// case "LTRIM":
// case "MD5":
// case "NGRAM_POSITIONAL_SIMILARITY":
// case "NGRAM_SIMILARITY":
// case "RANDOM_TOKEN":
// case "REGEX_MATCHES":
// case "REGEX_SPLIT":
// case "REGEX_TEST":
// case "REGEX_REPLACE":
// case "REVERSE":
// case "RIGHT":
// case "RTRIM":
// case "SHA1":
// case "SHA512":
// case "SOUNDEX":
// case "SPLIT":
// case "STARTS_WITH":
// case "SUBSTITUTE":
// case "SUBSTRING":
// case "TOKENS":
// case "TO_BASE64":
// case "TO_HEX":
// case "TRIM":
case "UPPER":
s.push(strings.ToUpper(s.pop().(string)))
// case "UUID":
// Type cast https://www.arangodb.com/docs/stable/aql/functions-type-cast.html
case "TO_BOOL":
s.push(toBool(s.pop()))
case "TO_NUMBER":
s.push(toNumber(s.pop()))
// case "TO_STRING":
// case "TO_ARRAY":
// case "TO_LIST":
// case "IS_NULL":
// case "IS_BOOL":
// case "IS_NUMBER":
// case "IS_STRING":
// case "IS_ARRAY":
// case "IS_LIST":
// case "IS_OBJECT":
// case "IS_DOCUMENT":
// case "IS_DATESTRING":
// case "IS_IPV4":
// case "IS_KEY":
// case "TYPENAME":
}
}
func unique(array []any) []any {
seen := map[any]bool{}
var filtered []any
for _, e := range array {
_, ok := seen[e]
if !ok {
seen[e] = true
filtered = append(filtered, e)
}
}
return filtered
}
func contains(values []any, e any) bool {
for _, v := range values {
if e == v {
return true
}
}
return false
}
var functionNames = []string{
"APPEND", "COUNT_DISTINCT", "COUNT_UNIQUE", "FIRST", "FLATTEN", "INTERLEAVE", "INTERSECTION", "JACCARD", "LAST",
"COUNT", "LENGTH", "MINUS", "NTH", "OUTERSECTION", "POP", "POSITION", "CONTAINS_ARRAY", "PUSH", "REMOVE_NTH",
"REPLACE_NTH", "REMOVE_VALUE", "REMOVE_VALUES", "REVERSE", "SHIFT", "SLICE", "SORTED", "SORTED_UNIQUE", "UNION",
"UNION_DISTINCT", "UNIQUE", "UNSHIFT", "BIT_AND", "BIT_CONSTRUCT", "BIT_DECONSTRUCT", "BIT_FROM_STRING",
"BIT_NEGATE", "BIT_OR", "BIT_POPCOUNT", "BIT_SHIFT_LEFT", "BIT_SHIFT_RIGHT", "BIT_TEST", "BIT_TO_STRING",
"BIT_XOR", "DATE_NOW", "DATE_ISO8601", "DATE_TIMESTAMP", "IS_DATESTRING", "DATE_DAYOFWEEK", "DATE_YEAR",
"DATE_MONTH", "DATE_DAY", "DATE_HOUR", "DATE_MINUTE", "DATE_SECOND", "DATE_MILLISECOND", "DATE_DAYOFYEAR",
"DATE_ISOWEEK", "DATE_LEAPYEAR", "DATE_QUARTER", "DATE_DAYS_IN_MONTH", "DATE_TRUNC", "DATE_ROUND", "DATE_FORMAT",
"DATE_ADD", "DATE_SUBTRACT", "DATE_DIFF", "DATE_COMPARE", "ATTRIBUTES", "COUNT", "HAS", "KEEP", "LENGTH",
"MATCHES", "MERGE", "MERGE_RECURSIVE", "PARSE_IDENTIFIER", "TRANSLATE", "UNSET", "UNSET_RECURSIVE", "VALUES",
"ZIP", "ABS", "ACOS", "ASIN", "ATAN", "ATAN2", "AVERAGE", "AVG", "CEIL", "COS", "DEGREES", "EXP", "EXP2", "FLOOR",
"LOG", "LOG2", "LOG10", "MAX", "MEDIAN", "MIN", "PERCENTILE", "PI", "POW", "PRODUCT", "RADIANS", "RAND", "RANGE",
"ROUND", "SIN", "SQRT", "STDDEV_POPULATION", "STDDEV_SAMPLE", "STDDEV", "SUM", "TAN", "VARIANCE_POPULATION",
"VARIANCE", "VARIANCE_SAMPLE", "CHAR_LENGTH", "CONCAT", "CONCAT_SEPARATOR", "CONTAINS", "CRC32",
"ENCODE_URI_COMPONENT", "FIND_FIRST", "FIND_LAST", "FNV64", "IPV4_FROM_NUMBER", "IPV4_TO_NUMBER", "IS_IPV4",
"JSON_PARSE", "JSON_STRINGIFY", "LEFT", "LENGTH", "LEVENSHTEIN_DISTANCE", "LIKE", "LOWER", "LTRIM", "MD5",
"NGRAM_POSITIONAL_SIMILARITY", "NGRAM_SIMILARITY", "RANDOM_TOKEN", "REGEX_MATCHES", "REGEX_SPLIT", "REGEX_TEST",
"REGEX_REPLACE", "REVERSE", "RIGHT", "RTRIM", "SHA1", "SHA512", "SOUNDEX", "SPLIT", "STARTS_WITH", "SUBSTITUTE",
"SUBSTRING", "TOKENS", "TO_BASE64", "TO_HEX", "TRIM", "UPPER", "UUID", "TO_BOOL", "TO_NUMBER", "TO_STRING",
"TO_ARRAY", "TO_LIST", "IS_NULL", "IS_BOOL", "IS_NUMBER", "IS_STRING", "IS_ARRAY", "IS_LIST", "IS_OBJECT",
"IS_DOCUMENT", "IS_DATESTRING", "IS_IPV4", "IS_KEY", "TYPENAME",
}

View File

@@ -1,392 +0,0 @@
package caql_test
import (
"encoding/json"
"math"
"reflect"
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
func TestFunctions(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
wantRebuild string
wantValue any
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
values string
}{
// https://www.arangodb.com/docs/3.7/aql/functions-array.html
{name: "APPEND", saql: `APPEND([1, 2, 3], [5, 6, 9])`, wantRebuild: `APPEND([1, 2, 3], [5, 6, 9])`, wantValue: jsonParse(`[1, 2, 3, 5, 6, 9]`)},
{name: "APPEND", saql: `APPEND([1, 2, 3], [3, 4, 5, 2, 9], true)`, wantRebuild: `APPEND([1, 2, 3], [3, 4, 5, 2, 9], true)`, wantValue: jsonParse(`[1, 2, 3, 4, 5, 9]`)},
{name: "COUNT_DISTINCT", saql: `COUNT_DISTINCT([1, 2, 3])`, wantRebuild: `COUNT_DISTINCT([1, 2, 3])`, wantValue: 3},
{name: "COUNT_DISTINCT", saql: `COUNT_DISTINCT(["yes", "no", "yes", "sauron", "no", "yes"])`, wantRebuild: `COUNT_DISTINCT(["yes", "no", "yes", "sauron", "no", "yes"])`, wantValue: 3},
{name: "FIRST", saql: `FIRST([1, 2, 3])`, wantRebuild: `FIRST([1, 2, 3])`, wantValue: 1},
{name: "FIRST", saql: `FIRST([])`, wantRebuild: `FIRST([])`, wantValue: nil},
// {name: "FLATTEN", saql: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]])`, wantRebuild: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]])`, wantValue:},
// {name: "FLATTEN", saql: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]], 2)`, wantRebuild: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]], 2)`, wantValue:},
// {name: "INTERLEAVE", saql: `INTERLEAVE([1, 1, 1], [2, 2, 2], [3, 3, 3])`, wantRebuild: `INTERLEAVE([1, 1, 1], [2, 2, 2], [3, 3, 3])`, wantValue:},
// {name: "INTERLEAVE", saql: `INTERLEAVE([1], [2, 2], [3, 3, 3])`, wantRebuild: `INTERLEAVE([1], [2, 2], [3, 3, 3])`, wantValue:},
{name: "INTERSECTION", saql: `INTERSECTION([1,2,3,4,5], [2,3,4,5,6], [3,4,5,6,7])`, wantRebuild: `INTERSECTION([1, 2, 3, 4, 5], [2, 3, 4, 5, 6], [3, 4, 5, 6, 7])`, wantValue: jsonParse(`[3, 4, 5]`)},
{name: "INTERSECTION", saql: `INTERSECTION([2,4,6], [8,10,12], [14,16,18])`, wantRebuild: `INTERSECTION([2, 4, 6], [8, 10, 12], [14, 16, 18])`, wantValue: jsonParse(`[]`)},
// {name: "JACCARD", saql: `JACCARD([1,2,3,4], [3,4,5,6])`, wantRebuild: `JACCARD([1,2,3,4], [3,4,5,6])`, wantValue: 0.3333333333333333},
// {name: "JACCARD", saql: `JACCARD([1,1,2,2,2,3], [2,2,3,4])`, wantRebuild: `JACCARD([1,1,2,2,2,3], [2,2,3,4])`, wantValue: 0.5},
// {name: "JACCARD", saql: `JACCARD([1,2,3], [])`, wantRebuild: `JACCARD([1, 2, 3], [])`, wantValue: 0},
// {name: "JACCARD", saql: `JACCARD([], [])`, wantRebuild: `JACCARD([], [])`, wantValue: 1},
{name: "LAST", saql: `LAST([1,2,3,4,5])`, wantRebuild: `LAST([1, 2, 3, 4, 5])`, wantValue: 5},
{name: "LENGTH", saql: `LENGTH("🥑")`, wantRebuild: `LENGTH("🥑")`, wantValue: 1},
{name: "LENGTH", saql: `LENGTH(1234)`, wantRebuild: `LENGTH(1234)`, wantValue: 4},
{name: "LENGTH", saql: `LENGTH([1,2,3,4,5,6,7])`, wantRebuild: `LENGTH([1, 2, 3, 4, 5, 6, 7])`, wantValue: 7},
{name: "LENGTH", saql: `LENGTH(false)`, wantRebuild: `LENGTH(false)`, wantValue: 0},
{name: "LENGTH", saql: `LENGTH({a:1, b:2, c:3, d:4, e:{f:5,g:6}})`, wantRebuild: `LENGTH({a: 1, b: 2, c: 3, d: 4, e: {f: 5, g: 6}})`, wantValue: 5},
{name: "MINUS", saql: `MINUS([1,2,3,4], [3,4,5,6], [5,6,7,8])`, wantRebuild: `MINUS([1, 2, 3, 4], [3, 4, 5, 6], [5, 6, 7, 8])`, wantValue: jsonParse(`[1, 2]`)},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], 2)`, wantRebuild: `NTH(["foo", "bar", "baz"], 2)`, wantValue: "baz"},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], 3)`, wantRebuild: `NTH(["foo", "bar", "baz"], 3)`, wantValue: nil},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], -1)`, wantRebuild: `NTH(["foo", "bar", "baz"], -1)`, wantValue: nil},
// {name: "OUTERSECTION", saql: `OUTERSECTION([1, 2, 3], [2, 3, 4], [3, 4, 5])`, wantRebuild: `OUTERSECTION([1, 2, 3], [2, 3, 4], [3, 4, 5])`, wantValue: jsonParse(`[1, 5]`)},
{name: "POP", saql: `POP([1, 2, 3, 4])`, wantRebuild: `POP([1, 2, 3, 4])`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "POP", saql: `POP([1])`, wantRebuild: `POP([1])`, wantValue: jsonParse(`[]`)},
{name: "POSITION", saql: `POSITION([2,4,6,8], 4)`, wantRebuild: `POSITION([2, 4, 6, 8], 4)`, wantValue: true},
{name: "POSITION", saql: `POSITION([2,4,6,8], 4, true)`, wantRebuild: `POSITION([2, 4, 6, 8], 4, true)`, wantValue: 1},
{name: "PUSH", saql: `PUSH([1, 2, 3], 4)`, wantRebuild: `PUSH([1, 2, 3], 4)`, wantValue: jsonParse(`[1, 2, 3, 4]`)},
{name: "PUSH", saql: `PUSH([1, 2, 2, 3], 2, true)`, wantRebuild: `PUSH([1, 2, 2, 3], 2, true)`, wantValue: jsonParse(`[1, 2, 2, 3]`)},
{name: "REMOVE_NTH", saql: `REMOVE_NTH(["a", "b", "c", "d", "e"], 1)`, wantRebuild: `REMOVE_NTH(["a", "b", "c", "d", "e"], 1)`, wantValue: jsonParse(`["a", "c", "d", "e"]`)},
{name: "REMOVE_NTH", saql: `REMOVE_NTH(["a", "b", "c", "d", "e"], -2)`, wantRebuild: `REMOVE_NTH(["a", "b", "c", "d", "e"], -2)`, wantValue: jsonParse(`["a", "b", "c", "e"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 1 , "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 1, "z")`, wantValue: jsonParse(`["a", "z", "c"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 3 , "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 3, "z")`, wantValue: jsonParse(`["a", "b", "c", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 6, "z", "y")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 6, "z", "y")`, wantValue: jsonParse(`["a", "b", "c", "y", "y", "y", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], -1, "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], -1, "z")`, wantValue: jsonParse(`["a", "b", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], -9, "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], -9, "z")`, wantValue: jsonParse(`["z", "b", "c"]`)},
{name: "REMOVE_VALUE", saql: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a")`, wantRebuild: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a")`, wantValue: jsonParse(`["b", "b", "c"]`)},
{name: "REMOVE_VALUE", saql: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a", 1)`, wantRebuild: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a", 1)`, wantValue: jsonParse(`["b", "b", "a", "c"]`)},
{name: "REMOVE_VALUES", saql: `REMOVE_VALUES(["a", "a", "b", "c", "d", "e", "f"], ["a", "f", "d"])`, wantRebuild: `REMOVE_VALUES(["a", "a", "b", "c", "d", "e", "f"], ["a", "f", "d"])`, wantValue: jsonParse(`["b", "c", "e"]`)},
{name: "REVERSE", saql: `REVERSE ([2,4,6,8,10])`, wantRebuild: `REVERSE([2, 4, 6, 8, 10])`, wantValue: jsonParse(`[10, 8, 6, 4, 2]`)},
{name: "SHIFT", saql: `SHIFT([1, 2, 3, 4])`, wantRebuild: `SHIFT([1, 2, 3, 4])`, wantValue: jsonParse(`[2, 3, 4]`)},
{name: "SHIFT", saql: `SHIFT([1])`, wantRebuild: `SHIFT([1])`, wantValue: jsonParse(`[]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 0, 1)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 0, 1)`, wantValue: jsonParse(`[1]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 1, 2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 1, 2)`, wantValue: jsonParse(`[2, 3]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 3)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 3)`, wantValue: jsonParse(`[4, 5]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 1, -1)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 1, -1)`, wantValue: jsonParse(`[2, 3, 4]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 0, -2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 0, -2)`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], -3, 2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], -3, 2)`, wantValue: jsonParse(`[3, 4]`)},
{name: "SORTED", saql: `SORTED([8,4,2,10,6])`, wantRebuild: `SORTED([8, 4, 2, 10, 6])`, wantValue: jsonParse(`[2, 4, 6, 8, 10]`)},
{name: "SORTED_UNIQUE", saql: `SORTED_UNIQUE([8,4,2,10,6,2,8,6,4])`, wantRebuild: `SORTED_UNIQUE([8, 4, 2, 10, 6, 2, 8, 6, 4])`, wantValue: jsonParse(`[2, 4, 6, 8, 10]`)},
{name: "UNION", saql: `UNION([1, 2, 3], [1, 2])`, wantRebuild: `UNION([1, 2, 3], [1, 2])`, wantValue: jsonParse(`[1, 1, 2, 2, 3]`)},
{name: "UNION_DISTINCT", saql: `UNION_DISTINCT([1, 2, 3], [1, 2])`, wantRebuild: `UNION_DISTINCT([1, 2, 3], [1, 2])`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "UNIQUE", saql: `UNIQUE([1,2,2,3,3,3,4,4,4,4,5,5,5,5,5])`, wantRebuild: `UNIQUE([1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5])`, wantValue: jsonParse(`[1, 2, 3, 4, 5]`)},
{name: "UNSHIFT", saql: `UNSHIFT([1, 2, 3], 4)`, wantRebuild: `UNSHIFT([1, 2, 3], 4)`, wantValue: jsonParse(`[4, 1, 2, 3]`)},
{name: "UNSHIFT", saql: `UNSHIFT([1, 2, 3], 2, true)`, wantRebuild: `UNSHIFT([1, 2, 3], 2, true)`, wantValue: jsonParse(`[1, 2, 3]`)},
// https://www.arangodb.com/docs/3.7/aql/functions-bit.html
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([1, 2, 3])`, wantRebuild: `BIT_CONSTRUCT([1, 2, 3])`, wantValue: 14},
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([0, 4, 8])`, wantRebuild: `BIT_CONSTRUCT([0, 4, 8])`, wantValue: 273},
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([0, 1, 10, 31])`, wantRebuild: `BIT_CONSTRUCT([0, 1, 10, 31])`, wantValue: 2147484675},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(14)`, wantRebuild: `BIT_DECONSTRUCT(14) `, wantValue: []interface{}{1, 2, 3}},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(273)`, wantRebuild: `BIT_DECONSTRUCT(273)`, wantValue: []interface{}{0, 4, 8}},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(2147484675)`, wantRebuild: `BIT_DECONSTRUCT(2147484675)`, wantValue: []interface{}{0, 1, 10, 31}},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("0111")`, wantRebuild: `BIT_FROM_STRING("0111")`, wantValue: 7},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("000000000000010")`, wantRebuild: `BIT_FROM_STRING("000000000000010")`, wantValue: 2},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("11010111011101")`, wantRebuild: `BIT_FROM_STRING("11010111011101")`, wantValue: 13789},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("100000000000000000000")`, wantRebuild: `BIT_FROM_STRING("100000000000000000000")`, wantValue: 1048756},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(0, 8)`, wantRebuild: `BIT_NEGATE(0, 8)`, wantValue: 255},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(0, 10)`, wantRebuild: `BIT_NEGATE(0, 10)`, wantValue: 1023},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(3, 4)`, wantRebuild: `BIT_NEGATE(3, 4)`, wantValue: 12},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(446359921, 32)`, wantRebuild: `BIT_NEGATE(446359921, 32)`, wantValue: 3848607374},
// {name: "BIT_OR", saql: `BIT_OR([1, 4, 8, 16])`, wantRebuild: `BIT_OR([1, 4, 8, 16])`, wantValue: 29},
// {name: "BIT_OR", saql: `BIT_OR([3, 7, 63])`, wantRebuild: `BIT_OR([3, 7, 63])`, wantValue: 63},
// {name: "BIT_OR", saql: `BIT_OR([255, 127, null, 63])`, wantRebuild: `BIT_OR([255, 127, null, 63])`, wantValue: 255},
// {name: "BIT_OR", saql: `BIT_OR(255, 127)`, wantRebuild: `BIT_OR(255, 127)`, wantValue: 255},
// {name: "BIT_OR", saql: `BIT_OR("foo")`, wantRebuild: `BIT_OR("foo")`, wantValue: nil},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(0)`, wantRebuild: `BIT_POPCOUNT(0)`, wantValue: 0},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(255)`, wantRebuild: `BIT_POPCOUNT(255)`, wantValue: 8},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(69399252)`, wantRebuild: `BIT_POPCOUNT(69399252)`, wantValue: 12},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT("foo")`, wantRebuild: `BIT_POPCOUNT("foo")`, wantValue: nil},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(0, 1, 8)`, wantRebuild: `BIT_SHIFT_LEFT(0, 1, 8)`, wantValue: 0},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(7, 1, 16)`, wantRebuild: `BIT_SHIFT_LEFT(7, 1, 16)`, wantValue: 14},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(2, 10, 16)`, wantRebuild: `BIT_SHIFT_LEFT(2, 10, 16)`, wantValue: 2048},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(878836, 16, 32)`, wantRebuild: `BIT_SHIFT_LEFT(878836, 16, 32)`, wantValue: 1760821248},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(0, 1, 8)`, wantRebuild: `BIT_SHIFT_RIGHT(0, 1, 8)`, wantValue: 0},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(33, 1, 16)`, wantRebuild: `BIT_SHIFT_RIGHT(33, 1, 16)`, wantValue: 16},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(65536, 13, 16)`, wantRebuild: `BIT_SHIFT_RIGHT(65536, 13, 16)`, wantValue: 8},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(878836, 4, 32)`, wantRebuild: `BIT_SHIFT_RIGHT(878836, 4, 32)`, wantValue: 54927},
// {name: "BIT_TEST", saql: `BIT_TEST(0, 3)`, wantRebuild: `BIT_TEST(0, 3)`, wantValue: false},
// {name: "BIT_TEST", saql: `BIT_TEST(255, 0)`, wantRebuild: `BIT_TEST(255, 0)`, wantValue: true},
// {name: "BIT_TEST", saql: `BIT_TEST(7, 2)`, wantRebuild: `BIT_TEST(7, 2)`, wantValue: true},
// {name: "BIT_TEST", saql: `BIT_TEST(255, 8)`, wantRebuild: `BIT_TEST(255, 8)`, wantValue: false},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(7, 4)`, wantRebuild: `BIT_TO_STRING(7, 4)`, wantValue: "0111"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(255, 8)`, wantRebuild: `BIT_TO_STRING(255, 8)`, wantValue: "11111111"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(60, 8)`, wantRebuild: `BIT_TO_STRING(60, 8)`, wantValue: "00011110"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(1048576, 32)`, wantRebuild: `BIT_TO_STRING(1048576, 32)`, wantValue: "00000000000100000000000000000000"},
// {name: "BIT_XOR", saql: `BIT_XOR([1, 4, 8, 16])`, wantRebuild: `BIT_XOR([1, 4, 8, 16])`, wantValue: 29},
// {name: "BIT_XOR", saql: `BIT_XOR([3, 7, 63])`, wantRebuild: `BIT_XOR([3, 7, 63])`, wantValue: 59},
// {name: "BIT_XOR", saql: `BIT_XOR([255, 127, null, 63])`, wantRebuild: `BIT_XOR([255, 127, null, 63])`, wantValue: 191},
// {name: "BIT_XOR", saql: `BIT_XOR(255, 257)`, wantRebuild: `BIT_XOR(255, 257)`, wantValue: 510},
// {name: "BIT_XOR", saql: `BIT_XOR("foo")`, wantRebuild: `BIT_XOR("foo")`, wantValue: nil},
// https://www.arangodb.com/docs/3.7/aql/functions-date.html
// DATE_TIMESTAMP("2014-05-07T14:19:09.522")
// DATE_TIMESTAMP("2014-05-07T14:19:09.522Z")
// DATE_TIMESTAMP("2014-05-07 14:19:09.522")
// DATE_TIMESTAMP("2014-05-07 14:19:09.522Z")
// DATE_TIMESTAMP(2014, 5, 7, 14, 19, 9, 522)
// DATE_TIMESTAMP(1399472349522)
// DATE_ISO8601("2014-05-07T14:19:09.522Z")
// DATE_ISO8601("2014-05-07 14:19:09.522Z")
// DATE_ISO8601(2014, 5, 7, 14, 19, 9, 522)
// DATE_ISO8601(1399472349522)
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(2016, 12, -1)`, wantRebuild: `DATE_TIMESTAMP(2016, 12, -1)`, wantValue: nil},
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(2016, 2, 32)`, wantRebuild: `DATE_TIMESTAMP(2016, 2, 32)`, wantValue: 1456963200000},
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(1970, 1, 1, 26)`, wantRebuild: `DATE_TIMESTAMP(1970, 1, 1, 26)`, wantValue: 93600000},
// {name: "DATE_TRUNC", saql: `DATE_TRUNC('2017-02-03', 'month')`, wantRebuild: `DATE_TRUNC('2017-02-03', 'month')`, wantValue: "2017-02-01T00:00:00.000Z"},
// {name: "DATE_TRUNC", saql: `DATE_TRUNC('2017-02-03 04:05:06', 'hours')`, wantRebuild: `DATE_TRUNC('2017-02-03 04:05:06', 'hours')`, wantValue: "2017-02-03 04:00:00.000Z"},
// {name: "DATE_ROUND", saql: `DATE_ROUND('2000-04-28T11:11:11.111Z', 1, 'day')`, wantRebuild: `DATE_ROUND('2000-04-28T11:11:11.111Z', 1, 'day')`, wantValue: "2000-04-28T00:00:00.000Z"},
// {name: "DATE_ROUND", saql: `DATE_ROUND('2000-04-10T11:39:29Z', 15, 'minutes')`, wantRebuild: `DATE_ROUND('2000-04-10T11:39:29Z', 15, 'minutes')`, wantValue: "2000-04-10T11:30:00.000Z"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT(DATE_NOW(), "%q/%yyyy")`, wantRebuild: `DATE_FORMAT(DATE_NOW(), "%q/%yyyy")`},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss,%fff")`, wantRebuild: `DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss,%fff")`, wantValue: "18.09.2015 15:30:49,374"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("1969", "Summer of '%yy")`, wantRebuild: `DATE_FORMAT("1969", "Summer of '%yy")`, wantValue: "Summer of '69"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("2016", "%%l = %l")`, wantRebuild: `DATE_FORMAT("2016", "%%l = %l")`, wantValue: "%l = 1"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("2016-03-01", "%xxx%")`, wantRebuild: `DATE_FORMAT("2016-03-01", "%xxx%")`, wantValue: "063, trailing % ignored"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), -1, "day")`, wantRebuild: `DATE_ADD(DATE_NOW(), -1, "day")`, wantValue: "yesterday; also see DATE_SUBTRACT()"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), 3, "months")`, wantRebuild: `DATE_ADD(DATE_NOW(), 3, "months")`, wantValue: "in three months"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_ADD("2015-04-01", 5, "years"), 1, "month")`, wantRebuild: `DATE_ADD(DATE_ADD("2015-04-01", 5, "years"), 1, "month")`, wantValue: "May 1st 2020"},
// {name: "DATE_ADD", saql: `DATE_ADD("2015-04-01", 12*5 + 1, "months")`, wantRebuild: `DATE_ADD("2015-04-01", 12*5 + 1, "months")`, wantValue: "also May 1st 2020"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), -4, "years")`, wantRebuild: `DATE_ADD(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), -4, "years")`, wantValue: "Christmas four years ago"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_ADD("2016-02", "month", 1), -1, "day")`, wantRebuild: `DATE_ADD(DATE_ADD("2016-02", "month", 1), -1, "day")`, wantValue: "last day of February (29th, because 2016 is a leap year!)"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P1Y")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P1Y")`},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P3M2W")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P3M2W")`},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P5DT26H")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P5DT26H")`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "PT4H")`, wantRebuild: `DATE_ADD("2000-01-01", "PT4H")`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "PT30M44.4S"`, wantRebuild: `DATE_ADD("2000-01-01", "PT30M44.4S"`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "P1Y2M3W4DT5H6M7.89S"`, wantRebuild: `DATE_ADD("2000-01-01", "P1Y2M3W4DT5H6M7.89S"`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), 1, "day")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), 1, "day")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), 4, "years")`, wantRebuild: `DATE_SUBTRACT(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), 4, "years")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_ADD("2016-02", "month", 1), 1, "day")`, wantRebuild: `DATE_SUBTRACT(DATE_ADD("2016-02", "month", 1), 1, "day")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), "P4D")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), "P4D")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), "PT1H3M")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), "PT1H3M")`},
// DATE_COMPARE("1985-04-04", DATE_NOW(), "months", "days")
// DATE_COMPARE("1984-02-29", DATE_NOW(), "months", "days")
// DATE_COMPARE("2001-01-01T15:30:45.678Z", "2001-01-01T08:08:08.008Z", "years", "days")
// https://www.arangodb.com/docs/3.7/aql/functions-document.html
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"})`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"})`, wantValue: jsonParse(`["_custom", "_key", "foo"]`)},
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, true)`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, true)`, wantValue: jsonParse(`["foo"]`)},
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, false, true)`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, false, true)`, wantValue: jsonParse(`["_custom", "_key", "foo"]`)},
{name: "HAS", saql: `HAS({name: "Jane"}, "name")`, wantRebuild: `HAS({name: "Jane"}, "name")`, wantValue: true},
{name: "HAS", saql: `HAS({name: "Jane"}, "age")`, wantRebuild: `HAS({name: "Jane"}, "age")`, wantValue: false},
{name: "HAS", saql: `HAS({name: null}, "name")`, wantRebuild: `HAS({name: null}, "name")`, wantValue: true},
// KEEP(doc, "firstname", "name", "likes")
// KEEP(doc, ["firstname", "name", "likes"])
// MATCHES({name: "jane", age: 27, active: true}, {age: 27, active: true})
// MATCHES({"test": 1}, [{"test": 1, "foo": "bar"}, {"foo": 1}, {"test": 1}], true)
{name: "MERGE", saql: `MERGE({"user1": {"name": "Jane"}}, {"user2": {"name": "Tom"}})`, wantRebuild: `MERGE({"user1": {"name": "Jane"}}, {"user2": {"name": "Tom"}})`, wantValue: jsonParse(`{"user1": {"name": "Jane"}, "user2": {"name": "Tom"}}`)},
{name: "MERGE", saql: `MERGE({"users": {"name": "Jane"}}, {"users": {"name": "Tom"}})`, wantRebuild: `MERGE({"users": {"name": "Jane"}}, {"users": {"name": "Tom"}})`, wantValue: jsonParse(`{"users": {"name": "Tom"}}`)},
{name: "MERGE", saql: `MERGE([{foo: "bar"}, {quux: "quetzalcoatl", ruled: true}, {bar: "baz", foo: "done"}])`, wantRebuild: `MERGE([{foo: "bar"}, {quux: "quetzalcoatl", ruled: true}, {bar: "baz", foo: "done"}])`, wantValue: jsonParse(`{"foo": "done", "quux": "quetzalcoatl", "ruled": true, "bar": "baz"}`)},
{name: "MERGE_RECURSIVE", saql: `MERGE_RECURSIVE({"user-1": {"name": "Jane", "livesIn": {"city": "LA"}}}, {"user-1": {"age": 42, "livesIn": {"state": "CA"}}})`, wantRebuild: `MERGE_RECURSIVE({"user-1": {"name": "Jane", "livesIn": {"city": "LA"}}}, {"user-1": {"age": 42, "livesIn": {"state": "CA"}}})`, wantValue: jsonParse(`{"user-1": {"name": "Jane", "livesIn": {"city": "LA", "state": "CA"}, "age": 42}}`)},
// {name: "TRANSLATE", saql: `TRANSLATE("FR", {US: "United States", UK: "United Kingdom", FR: "France"})`, wantRebuild: `TRANSLATE("FR", {US: "United States", UK: "United Kingdom", FR: "France"})`, wantValue: "France"},
// {name: "TRANSLATE", saql: `TRANSLATE(42, {foo: "bar", bar: "baz"})`, wantRebuild: `TRANSLATE(42, {foo: "bar", bar: "baz"})`, wantValue: 42},
// {name: "TRANSLATE", saql: `TRANSLATE(42, {foo: "bar", bar: "baz"}, "not found!")`, wantRebuild: `TRANSLATE(42, {foo: "bar", bar: "baz"}, "not found!")`, wantValue: "not found!"},
// UNSET(doc, "_id", "_key", "foo", "bar")
// UNSET(doc, ["_id", "_key", "foo", "bar"])
// UNSET_RECURSIVE(doc, "_id", "_key", "foo", "bar")
// UNSET_RECURSIVE(doc, ["_id", "_key", "foo", "bar"])
{name: "VALUES", saql: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35})`, wantRebuild: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35})`, wantValue: jsonParse(`[35, "Jane", "users/jane"]`)},
{name: "VALUES", saql: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35}, true)`, wantRebuild: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35}, true)`, wantValue: jsonParse(`[35, "Jane"]`)},
// {name: "ZIP", saql: `ZIP(["name", "active", "hobbies"], ["some user", true, ["swimming", "riding"]])`, wantRebuild: `ZIP(["name", "active", "hobbies"], ["some user", true, ["swimming", "riding"]])`, wantValue: jsonParse(`{"name": "some user", "active": true, "hobbies": ["swimming", "riding"]}`)},
// https://www.arangodb.com/docs/3.7/aql/functions-numeric.html
{name: "ABS", saql: `ABS(-5)`, wantRebuild: `ABS(-5)`, wantValue: 5},
{name: "ABS", saql: `ABS(+5)`, wantRebuild: `ABS(5)`, wantValue: 5},
{name: "ABS", saql: `ABS(3.5)`, wantRebuild: `ABS(3.5)`, wantValue: 3.5},
{name: "ACOS", saql: `ACOS(-1)`, wantRebuild: `ACOS(-1)`, wantValue: 3.141592653589793},
{name: "ACOS", saql: `ACOS(0)`, wantRebuild: `ACOS(0)`, wantValue: 1.5707963267948966},
{name: "ACOS", saql: `ACOS(1)`, wantRebuild: `ACOS(1)`, wantValue: 0},
{name: "ACOS", saql: `ACOS(2)`, wantRebuild: `ACOS(2)`, wantValue: nil},
{name: "ASIN", saql: `ASIN(1)`, wantRebuild: `ASIN(1)`, wantValue: 1.5707963267948966},
{name: "ASIN", saql: `ASIN(0)`, wantRebuild: `ASIN(0)`, wantValue: 0},
{name: "ASIN", saql: `ASIN(-1)`, wantRebuild: `ASIN(-1)`, wantValue: -1.5707963267948966},
{name: "ASIN", saql: `ASIN(2)`, wantRebuild: `ASIN(2)`, wantValue: nil},
{name: "ATAN", saql: `ATAN(-1)`, wantRebuild: `ATAN(-1)`, wantValue: -0.7853981633974483},
{name: "ATAN", saql: `ATAN(0)`, wantRebuild: `ATAN(0)`, wantValue: 0},
{name: "ATAN", saql: `ATAN(10)`, wantRebuild: `ATAN(10)`, wantValue: 1.4711276743037347},
{name: "AVERAGE", saql: `AVERAGE([5, 2, 9, 2])`, wantRebuild: `AVERAGE([5, 2, 9, 2])`, wantValue: 4.5},
{name: "AVERAGE", saql: `AVERAGE([-3, -5, 2])`, wantRebuild: `AVERAGE([-3, -5, 2])`, wantValue: -2},
{name: "AVERAGE", saql: `AVERAGE([999, 80, 4, 4, 4, 3, 3, 3])`, wantRebuild: `AVERAGE([999, 80, 4, 4, 4, 3, 3, 3])`, wantValue: 137.5},
{name: "CEIL", saql: `CEIL(2.49)`, wantRebuild: `CEIL(2.49)`, wantValue: 3},
{name: "CEIL", saql: `CEIL(2.50)`, wantRebuild: `CEIL(2.50)`, wantValue: 3},
{name: "CEIL", saql: `CEIL(-2.50)`, wantRebuild: `CEIL(-2.50)`, wantValue: -2},
{name: "CEIL", saql: `CEIL(-2.51)`, wantRebuild: `CEIL(-2.51)`, wantValue: -2},
{name: "COS", saql: `COS(1)`, wantRebuild: `COS(1)`, wantValue: 0.5403023058681398},
{name: "COS", saql: `COS(0)`, wantRebuild: `COS(0)`, wantValue: 1},
{name: "COS", saql: `COS(-3.141592653589783)`, wantRebuild: `COS(-3.141592653589783)`, wantValue: -1},
{name: "COS", saql: `COS(RADIANS(45))`, wantRebuild: `COS(RADIANS(45))`, wantValue: 0.7071067811865476},
{name: "DEGREES", saql: `DEGREES(0.7853981633974483)`, wantRebuild: `DEGREES(0.7853981633974483)`, wantValue: 45},
{name: "DEGREES", saql: `DEGREES(0)`, wantRebuild: `DEGREES(0)`, wantValue: 0},
{name: "DEGREES", saql: `DEGREES(3.141592653589793)`, wantRebuild: `DEGREES(3.141592653589793)`, wantValue: 180},
{name: "EXP", saql: `EXP(1)`, wantRebuild: `EXP(1)`, wantValue: 2.718281828459045},
{name: "EXP", saql: `EXP(10)`, wantRebuild: `EXP(10)`, wantValue: 22026.46579480671},
{name: "EXP", saql: `EXP(0)`, wantRebuild: `EXP(0)`, wantValue: 1},
{name: "EXP2", saql: `EXP2(16)`, wantRebuild: `EXP2(16)`, wantValue: 65536},
{name: "EXP2", saql: `EXP2(1)`, wantRebuild: `EXP2(1)`, wantValue: 2},
{name: "EXP2", saql: `EXP2(0)`, wantRebuild: `EXP2(0)`, wantValue: 1},
{name: "FLOOR", saql: `FLOOR(2.49)`, wantRebuild: `FLOOR(2.49)`, wantValue: 2},
{name: "FLOOR", saql: `FLOOR(2.50)`, wantRebuild: `FLOOR(2.50)`, wantValue: 2},
{name: "FLOOR", saql: `FLOOR(-2.50)`, wantRebuild: `FLOOR(-2.50)`, wantValue: -3},
{name: "FLOOR", saql: `FLOOR(-2.51)`, wantRebuild: `FLOOR(-2.51)`, wantValue: -3},
{name: "LOG", saql: `LOG(2.718281828459045)`, wantRebuild: `LOG(2.718281828459045)`, wantValue: 1},
{name: "LOG", saql: `LOG(10)`, wantRebuild: `LOG(10)`, wantValue: 2.302585092994046},
{name: "LOG", saql: `LOG(0)`, wantRebuild: `LOG(0)`, wantValue: nil},
{name: "LOG2", saql: `LOG2(1024)`, wantRebuild: `LOG2(1024)`, wantValue: 10},
{name: "LOG2", saql: `LOG2(8)`, wantRebuild: `LOG2(8)`, wantValue: 3},
{name: "LOG2", saql: `LOG2(0)`, wantRebuild: `LOG2(0)`, wantValue: nil},
{name: "LOG10", saql: `LOG10(10000)`, wantRebuild: `LOG10(10000)`, wantValue: 4},
{name: "LOG10", saql: `LOG10(10)`, wantRebuild: `LOG10(10)`, wantValue: 1},
{name: "LOG10", saql: `LOG10(0)`, wantRebuild: `LOG10(0)`, wantValue: nil},
{name: "MAX", saql: `MAX([5, 9, -2, null, 1])`, wantRebuild: `MAX([5, 9, -2, null, 1])`, wantValue: 9},
{name: "MAX", saql: `MAX([null, null])`, wantRebuild: `MAX([null, null])`, wantValue: nil},
{name: "MEDIAN", saql: `MEDIAN([1, 2, 3])`, wantRebuild: `MEDIAN([1, 2, 3])`, wantValue: 2},
{name: "MEDIAN", saql: `MEDIAN([1, 2, 3, 4])`, wantRebuild: `MEDIAN([1, 2, 3, 4])`, wantValue: 2.5},
{name: "MEDIAN", saql: `MEDIAN([4, 2, 3, 1])`, wantRebuild: `MEDIAN([4, 2, 3, 1])`, wantValue: 2.5},
{name: "MEDIAN", saql: `MEDIAN([999, 80, 4, 4, 4, 3, 3, 3])`, wantRebuild: `MEDIAN([999, 80, 4, 4, 4, 3, 3, 3])`, wantValue: 4},
{name: "MIN", saql: `MIN([5, 9, -2, null, 1])`, wantRebuild: `MIN([5, 9, -2, null, 1])`, wantValue: -2},
{name: "MIN", saql: `MIN([null, null])`, wantRebuild: `MIN([null, null])`, wantValue: nil},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50)`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50)`, wantValue: 2},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50, "rank")`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50, "rank")`, wantValue: 2},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50, "interpolation")`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50, "interpolation")`, wantValue: 2.5},
{name: "PI", saql: `PI()`, wantRebuild: `PI()`, wantValue: 3.141592653589793},
{name: "POW", saql: `POW(2, 4)`, wantRebuild: `POW(2, 4)`, wantValue: 16},
{name: "POW", saql: `POW(5, -1)`, wantRebuild: `POW(5, -1)`, wantValue: 0.2},
{name: "POW", saql: `POW(5, 0)`, wantRebuild: `POW(5, 0)`, wantValue: 1},
{name: "PRODUCT", saql: `PRODUCT([1, 2, 3, 4])`, wantRebuild: `PRODUCT([1, 2, 3, 4])`, wantValue: 24},
{name: "PRODUCT", saql: `PRODUCT([null, -5, 6])`, wantRebuild: `PRODUCT([null, -5, 6])`, wantValue: -30},
{name: "PRODUCT", saql: `PRODUCT([])`, wantRebuild: `PRODUCT([])`, wantValue: 1},
{name: "RADIANS", saql: `RADIANS(180)`, wantRebuild: `RADIANS(180)`, wantValue: 3.141592653589793},
{name: "RADIANS", saql: `RADIANS(90)`, wantRebuild: `RADIANS(90)`, wantValue: 1.5707963267948966},
{name: "RADIANS", saql: `RADIANS(0)`, wantRebuild: `RADIANS(0)`, wantValue: 0},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.3503170117504508},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.6138226173882478},
{name: "RANGE", saql: `RANGE(1, 4)`, wantRebuild: `RANGE(1, 4)`, wantValue: []any{float64(1), float64(2), float64(3), float64(4)}},
{name: "RANGE", saql: `RANGE(1, 4, 2)`, wantRebuild: `RANGE(1, 4, 2)`, wantValue: []any{float64(1), float64(3)}},
{name: "RANGE", saql: `RANGE(1, 4, 3)`, wantRebuild: `RANGE(1, 4, 3)`, wantValue: []any{float64(1), float64(4)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5)`, wantRebuild: `RANGE(1.5, 2.5)`, wantValue: []any{float64(1), float64(2)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 1)`, wantRebuild: `RANGE(1.5, 2.5, 1)`, wantValue: []any{1.5, 2.5}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 0.5)`, wantRebuild: `RANGE(1.5, 2.5, 0.5)`, wantValue: []any{1.5, 2.0, 2.5}},
{name: "RANGE", saql: `RANGE(-0.75, 1.1, 0.5)`, wantRebuild: `RANGE(-0.75, 1.1, 0.5)`, wantValue: []any{-0.75, -0.25, 0.25, 0.75}},
{name: "ROUND", saql: `ROUND(2.49)`, wantRebuild: `ROUND(2.49)`, wantValue: 2},
{name: "ROUND", saql: `ROUND(2.50)`, wantRebuild: `ROUND(2.50)`, wantValue: 3},
{name: "ROUND", saql: `ROUND(-2.50)`, wantRebuild: `ROUND(-2.50)`, wantValue: -2},
{name: "ROUND", saql: `ROUND(-2.51)`, wantRebuild: `ROUND(-2.51)`, wantValue: -3},
{name: "SQRT", saql: `SQRT(9)`, wantRebuild: `SQRT(9)`, wantValue: 3},
{name: "SQRT", saql: `SQRT(2)`, wantRebuild: `SQRT(2)`, wantValue: 1.4142135623730951},
{name: "POW", saql: `POW(4096, 1/4)`, wantRebuild: `POW(4096, 1 / 4)`, wantValue: 8},
{name: "POW", saql: `POW(27, 1/3)`, wantRebuild: `POW(27, 1 / 3)`, wantValue: 3},
{name: "POW", saql: `POW(9, 1/2)`, wantRebuild: `POW(9, 1 / 2)`, wantValue: 3},
// {name: "STDDEV_POPULATION", saql: `STDDEV_POPULATION([1, 3, 6, 5, 2])`, wantRebuild: `STDDEV_POPULATION([1, 3, 6, 5, 2])`, wantValue: 1.854723699099141},
// {name: "STDDEV_SAMPLE", saql: `STDDEV_SAMPLE([1, 3, 6, 5, 2])`, wantRebuild: `STDDEV_SAMPLE([1, 3, 6, 5, 2])`, wantValue: 2.0736441353327724},
{name: "SUM", saql: `SUM([1, 2, 3, 4])`, wantRebuild: `SUM([1, 2, 3, 4])`, wantValue: 10},
{name: "SUM", saql: `SUM([null, -5, 6])`, wantRebuild: `SUM([null, -5, 6])`, wantValue: 1},
{name: "SUM", saql: `SUM([])`, wantRebuild: `SUM([])`, wantValue: 0},
{name: "TAN", saql: `TAN(10)`, wantRebuild: `TAN(10)`, wantValue: 0.6483608274590866},
{name: "TAN", saql: `TAN(5)`, wantRebuild: `TAN(5)`, wantValue: -3.380515006246586},
{name: "TAN", saql: `TAN(0)`, wantRebuild: `TAN(0)`, wantValue: 0},
// {name: "VARIANCE_POPULATION", saql: `VARIANCE_POPULATION([1, 3, 6, 5, 2])`, wantRebuild: `VARIANCE_POPULATION([1, 3, 6, 5, 2])`, wantValue: 3.4400000000000004},
// {name: "VARIANCE_SAMPLE", saql: `VARIANCE_SAMPLE([1, 3, 6, 5, 2])`, wantRebuild: `VARIANCE_SAMPLE([1, 3, 6, 5, 2])`, wantValue: 4.300000000000001},
// Errors
{name: "Function Error 1", saql: "UNKNOWN(value)", wantRebuild: "UNKNOWN(value)", wantRebuildErr: true, wantEvalErr: true, values: `{"value": true}`},
{name: "Function Error 2", saql: "ABS(value, value2)", wantRebuild: "ABS(value, value2)", wantEvalErr: true, values: `{"value": true, "value2": false}`},
{name: "Function Error 3", saql: `ABS("abs")`, wantRebuild: `ABS("abs")`, wantEvalErr: true},
}
for _, tt := range tests {
tt := tt
parser := &caql.Parser{}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.String()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantRebuild {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJSON map[string]any
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJSON)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJSON)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
wantValue := tt.wantValue
if i, ok := wantValue.(int); ok {
wantValue = float64(i)
}
valueFloat, ok := value.(float64)
wantValueFloat, ok2 := wantValue.(float64)
if ok && ok2 {
if math.Abs(valueFloat-wantValueFloat) > 0.0001 {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
} else {
if !reflect.DeepEqual(value, wantValue) {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
}
})
}
}
func jsonParse(s string) any {
if s == "" {
return nil
}
var j any
err := json.Unmarshal([]byte(s), &j)
if err != nil {
panic(s + err.Error())
}
return j
}

View File

@@ -1,351 +0,0 @@
package caql
import (
"fmt"
"strconv"
"strings"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type aqlInterpreter struct {
*parser.BaseCAQLParserListener
values map[string]any
stack []any
errs []error
}
// push is a helper function for pushing new node to the listener Stack.
func (s *aqlInterpreter) push(i any) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *aqlInterpreter) pop() (n any) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
s.appendErrors(ErrStack)
return
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *aqlInterpreter) binaryPop() (any, any) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *aqlInterpreter) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
// pass
case ctx.Reference() != nil:
// pass
case ctx.Operator_unary() != nil:
// pass
case ctx.T_PLUS() != nil:
s.push(plus(s.binaryPop()))
case ctx.T_MINUS() != nil:
s.push(minus(s.binaryPop()))
case ctx.T_TIMES() != nil:
s.push(times(s.binaryPop()))
case ctx.T_DIV() != nil:
s.push(div(s.binaryPop()))
case ctx.T_MOD() != nil:
s.push(mod(s.binaryPop()))
case ctx.T_RANGE() != nil:
s.push(aqlrange(s.binaryPop()))
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
s.push(lt(s.binaryPop()))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
s.push(gt(s.binaryPop()))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
s.push(le(s.binaryPop()))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
s.push(ge(s.binaryPop()))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.push(maybeNot(ctx, in(s.binaryPop())))
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
s.push(eq(s.binaryPop()))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
s.push(ne(s.binaryPop()))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(anyElement(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]any), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]any), in, right))
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(anyElement(left.([]any), in, right))
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]any), in, right))
case ctx.T_LIKE() != nil:
m, err := like(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_REGEX_MATCH() != nil:
m, err := regexMatch(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_REGEX_NON_MATCH() != nil:
m, err := regexNonMatch(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_AND() != nil:
s.push(and(s.binaryPop()))
case ctx.T_OR() != nil:
s.push(or(s.binaryPop()))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
right, middle, left := s.pop(), s.pop(), s.pop()
s.push(ternary(left, middle, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
right, left := s.pop(), s.pop()
s.push(ternary(left, nil, right))
default:
panic("unknown expression")
}
}
func (s *aqlInterpreter) appendErrors(err error) {
if err != nil {
s.errs = append(s.errs, err)
}
}
// ExitOperator_unary is called when production operator_unary is exited.
func (s *aqlInterpreter) ExitOperator_unary(ctx *parser.Operator_unaryContext) {
value := s.pop()
switch {
case ctx.T_PLUS() != nil:
s.push(value.(float64))
case ctx.T_MINUS() != nil:
s.push(-value.(float64))
case ctx.T_NOT() != nil:
s.push(!toBool(value))
default:
panic(fmt.Sprintf("unexpected operation: %s", ctx.GetText()))
}
}
// ExitReference is called when production reference is exited.
func (s *aqlInterpreter) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
s.push(reference.(map[string]any)[ctx.T_STRING().GetText()])
case ctx.T_STRING() != nil:
s.push(s.getVar(ctx.T_STRING().GetText()))
case ctx.Compound_value() != nil:
// pass
case ctx.Function_call() != nil:
// pass
case ctx.T_OPEN() != nil:
// pass
case ctx.T_ARRAY_OPEN() != nil:
key := s.pop()
reference := s.pop()
if f, ok := key.(float64); ok {
index := int(f)
if index < 0 {
index = len(reference.([]any)) + index
}
s.push(reference.([]any)[index])
return
}
s.push(reference.(map[string]any)[key.(string)])
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitCompound_value is called when production compound_value is exited.
func (s *aqlInterpreter) ExitCompound_value(_ *parser.Compound_valueContext) {
// pass
}
// ExitFunction_call is called when production function_call is exited.
func (s *aqlInterpreter) ExitFunction_call(ctx *parser.Function_callContext) {
s.function(ctx)
}
// ExitValue_literal is called when production value_literal is exited.
func (s *aqlInterpreter) ExitValue_literal(ctx *parser.Value_literalContext) {
switch {
case ctx.T_QUOTED_STRING() != nil:
st, err := unquote(ctx.GetText())
s.appendErrors(err)
s.push(st)
case ctx.T_INT() != nil:
t := ctx.GetText()
switch {
case strings.HasPrefix(strings.ToLower(t), "0b"):
i64, err := strconv.ParseInt(t[2:], 2, 64)
s.appendErrors(err)
s.push(float64(i64))
case strings.HasPrefix(strings.ToLower(t), "0x"):
i64, err := strconv.ParseInt(t[2:], 16, 64)
s.appendErrors(err)
s.push(float64(i64))
default:
i, err := strconv.Atoi(t)
s.appendErrors(err)
s.push(float64(i))
}
case ctx.T_FLOAT() != nil:
i, err := strconv.ParseFloat(ctx.GetText(), 64)
s.appendErrors(err)
s.push(i)
case ctx.T_NULL() != nil:
s.push(nil)
case ctx.T_TRUE() != nil:
s.push(true)
case ctx.T_FALSE() != nil:
s.push(false)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitArray is called when production array is exited.
func (s *aqlInterpreter) ExitArray(ctx *parser.ArrayContext) {
array := []any{}
for range ctx.AllExpression() {
// prepend element
array = append([]any{s.pop()}, array...)
}
s.push(array)
}
// ExitObject is called when production object is exited.
func (s *aqlInterpreter) ExitObject(ctx *parser.ObjectContext) {
object := map[string]any{}
for range ctx.AllObject_element() {
key, value := s.pop(), s.pop()
object[key.(string)] = value
}
s.push(object)
}
// ExitObject_element is called when production object_element is exited.
func (s *aqlInterpreter) ExitObject_element(ctx *parser.Object_elementContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.GetText())
s.push(s.getVar(ctx.GetText()))
case ctx.Object_element_name() != nil, ctx.T_ARRAY_OPEN() != nil:
key, value := s.pop(), s.pop()
s.push(key)
s.push(value)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitObject_element_name is called when production object_element_name is exited.
func (s *aqlInterpreter) ExitObject_element_name(ctx *parser.Object_element_nameContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.T_QUOTED_STRING() != nil:
st, err := unquote(ctx.T_QUOTED_STRING().GetText())
if err != nil {
s.appendErrors(fmt.Errorf("%w: %s", err, ctx.GetText()))
}
s.push(st)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
func (s *aqlInterpreter) getVar(identifier string) any {
v, ok := s.values[identifier]
if !ok {
s.appendErrors(ErrUndefined)
}
return v
}
func maybeNot(ctx *parser.ExpressionContext, m bool) bool {
if ctx.T_NOT() != nil {
return !m
}
return m
}
func getOp(tokenType int) func(left, right any) bool {
switch tokenType {
case parser.CAQLLexerT_EQ:
return eq
case parser.CAQLLexerT_NE:
return ne
case parser.CAQLLexerT_LT:
return lt
case parser.CAQLLexerT_GT:
return gt
case parser.CAQLLexerT_LE:
return le
case parser.CAQLLexerT_GE:
return ge
case parser.CAQLLexerT_IN:
return in
default:
panic("unknown token type")
}
}
func all(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if !op(e, expr) {
return false
}
}
return true
}
func anyElement(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if op(e, expr) {
return true
}
}
return false
}
func none(slice []any, op func(any, any) bool, expr any) bool {
for _, e := range slice {
if op(e, expr) {
return false
}
}
return true
}

View File

@@ -1,520 +0,0 @@
package caql
import (
"math"
"regexp"
"sort"
"strconv"
"strings"
)
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
func or(left, right any) any {
if toBool(left) {
return left
}
return right
}
func and(left, right any) any {
if !toBool(left) {
return left
}
return right
}
func toBool(i any) bool {
switch v := i.(type) {
case nil:
return false
case bool:
return v
case int:
return v != 0
case float64:
return v != 0
case string:
return v != ""
case []any:
return true
case map[string]any:
return true
default:
panic("bool conversion failed")
}
}
// Arithmetic operators https://www.arangodb.com/docs/3.7/aql/operators.html#arithmetic-operators
func plus(left, right any) float64 {
return toNumber(left) + toNumber(right)
}
func minus(left, right any) float64 {
return toNumber(left) - toNumber(right)
}
func times(left, right any) float64 {
return round(toNumber(left) * toNumber(right))
}
func round(r float64) float64 {
return math.Round(r*100000) / 100000
}
func div(left, right any) float64 {
b := toNumber(right)
if b == 0 {
return 0
}
return round(toNumber(left) / b)
}
func mod(left, right any) float64 {
return math.Mod(toNumber(left), toNumber(right))
}
func toNumber(i any) float64 {
switch v := i.(type) {
case nil:
return 0
case bool:
if v {
return 1
}
return 0
case float64:
switch {
case math.IsNaN(v):
return 0
case math.IsInf(v, 0):
return 0
}
return v
case string:
f, err := strconv.ParseFloat(strings.TrimSpace(v), 64)
if err != nil {
return 0
}
return f
case []any:
if len(v) == 0 {
return 0
}
if len(v) == 1 {
return toNumber(v[0])
}
return 0
case map[string]any:
return 0
default:
panic("number conversion error")
}
}
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
// Order https://www.arangodb.com/docs/3.7/aql/fundamentals-type-value-order.html
func eq(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return false
}
switch l := left.(type) {
case nil:
return true
case bool, float64, string:
return left == right
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li any
var rai any
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return false
}
}
return true
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return false
}
}
return true
default:
panic("unknown type")
}
}
func ne(left, right any) bool {
return !eq(left, right)
}
func lt(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV < rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) < toNumber(right)
case int:
return l < right.(int)
case float64:
return l < right.(float64)
case string:
return l < right.(string)
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li any
var rai any
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return false
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func keys(l map[string]any, ro map[string]any) []string {
var keys []string
seen := map[string]bool{}
for _, a := range []map[string]any{l, ro} {
for k := range a {
if _, ok := seen[k]; !ok {
seen[k] = true
keys = append(keys, k)
}
}
}
sort.Strings(keys)
return keys
}
func gt(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV > rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) > toNumber(right)
case int:
return l > right.(int)
case float64:
return l > right.(float64)
case string:
return l > right.(string)
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li any
var rai any
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return false
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func le(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV <= rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) <= toNumber(right)
case int:
return l <= right.(int)
case float64:
return l <= right.(float64)
case string:
return l <= right.(string)
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li any
var rai any
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return le(li, rai)
}
}
return true
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func ge(left, right any) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV >= rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) >= toNumber(right)
case int:
return l >= right.(int)
case float64:
return l >= right.(float64)
case string:
return l >= right.(string)
case []any:
ra := right.([]any)
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li any
var rai any
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return ge(li, rai)
}
}
return true
case map[string]any:
ro := right.(map[string]any)
for _, key := range keys(l, ro) {
var li any
var rai any
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func in(left, right any) bool {
a, ok := right.([]any)
if !ok {
return false
}
for _, v := range a {
if left == v {
return true
}
}
return false
}
func like(left, right any) (bool, error) {
return match(right.(string), left.(string))
}
func regexMatch(left, right any) (bool, error) {
return regexp.Match(right.(string), []byte(left.(string)))
}
func regexNonMatch(left, right any) (bool, error) {
m, err := regexp.Match(right.(string), []byte(left.(string)))
return !m, err
}
func typeValue(v any) int {
switch v.(type) {
case nil:
return 0
case bool:
return 1
case float64, int:
return 2
case string:
return 3
case []any:
return 4
case map[string]any:
return 5
default:
panic("unknown type")
}
}
// Ternary operator https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
func ternary(left, middle, right any) any {
if toBool(left) {
if middle != nil {
return middle
}
return left
}
return right
}
// Range operators https://www.arangodb.com/docs/3.7/aql/operators.html#range-operator
func aqlrange(left, right any) []float64 {
var v []float64
for i := int(left.(float64)); i <= int(right.(float64)); i++ {
v = append(v, float64(i))
}
return v
}

View File

@@ -1,121 +0,0 @@
package caql
import (
"errors"
"fmt"
"strconv"
"github.com/antlr/antlr4/runtime/Go/antlr"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type Parser struct {
Searcher Searcher
Prefix string
}
func (p *Parser) Parse(aql string) (t *Tree, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
// Set up the input
inputStream := antlr.NewInputStream(aql)
errorListener := &errorListener{}
// Create the Lexer
lexer := parser.NewCAQLLexer(inputStream)
lexer.RemoveErrorListeners()
lexer.AddErrorListener(errorListener)
stream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel)
// Create the Parser
aqlParser := parser.NewCAQLParser(stream)
aqlParser.RemoveErrorListeners()
aqlParser.AddErrorListener(errorListener)
aqlParser.SetErrorHandler(antlr.NewBailErrorStrategy())
if errorListener.errs != nil {
err = errorListener.errs[0]
}
return &Tree{aqlParser: aqlParser, parseContext: aqlParser.Parse(), searcher: p.Searcher, prefix: p.Prefix}, err
}
type Tree struct {
parseContext parser.IParseContext
aqlParser *parser.CAQLParser
searcher Searcher
prefix string
}
func (t *Tree) Eval(values map[string]any) (i any, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
interpreter := aqlInterpreter{values: values}
antlr.ParseTreeWalkerDefault.Walk(&interpreter, t.parseContext)
if interpreter.errs != nil {
return nil, interpreter.errs[0]
}
return interpreter.stack[0], nil
}
func (t *Tree) String() (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
builder := aqlBuilder{searcher: t.searcher, prefix: t.prefix}
antlr.ParseTreeWalkerDefault.Walk(&builder, t.parseContext)
return builder.stack[0], err
}
func (t *Tree) BleveString() (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
builder := bleveBuilder{}
antlr.ParseTreeWalkerDefault.Walk(&builder, t.parseContext)
if builder.err != nil {
return "", builder.err
}
return builder.stack[0], err
}
type errorListener struct {
*antlr.DefaultErrorListener
errs []error
}
func (el *errorListener) SyntaxError(_ antlr.Recognizer, _ any, line, column int, msg string, _ antlr.RecognitionException) {
el.errs = append(el.errs, fmt.Errorf("line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg))
}
func (el *errorListener) ReportAmbiguity(_ antlr.Parser, _ *antlr.DFA, _, _ int, _ bool, _ *antlr.BitSet, _ antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportAmbiguity"))
}
func (el *errorListener) ReportAttemptingFullContext(_ antlr.Parser, _ *antlr.DFA, _, _ int, _ *antlr.BitSet, _ antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportAttemptingFullContext"))
}
func (el *errorListener) ReportContextSensitivity(_ antlr.Parser, _ *antlr.DFA, _, _, _ int, _ antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportContextSensitivity"))
}

View File

@@ -1,362 +0,0 @@
package caql_test
import (
"encoding/json"
"reflect"
"testing"
"github.com/SecurityBrewery/catalyst/caql"
)
type MockSearcher struct{}
func (m MockSearcher) Search(_ string) (ids []string, err error) {
return []string{"1", "2", "3"}, nil
}
func TestParseSAQLEval(t *testing.T) {
t.Parallel()
tests := []struct {
name string
saql string
wantRebuild string
wantValue any
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
values string
}{
// Custom
{name: "Compare 1", saql: "1 <= 2", wantRebuild: "1 <= 2", wantValue: true},
{name: "Compare 2", saql: "1 >= 2", wantRebuild: "1 >= 2", wantValue: false},
{name: "Compare 3", saql: "1 == 2", wantRebuild: "1 == 2", wantValue: false},
{name: "Compare 4", saql: "1 > 2", wantRebuild: "1 > 2", wantValue: false},
{name: "Compare 5", saql: "1 < 2", wantRebuild: "1 < 2", wantValue: true},
{name: "Compare 6", saql: "1 != 2", wantRebuild: "1 != 2", wantValue: true},
{name: "SymbolRef 1", saql: "name", wantRebuild: "name", wantValue: false, values: `{"name": false}`},
{name: "SymbolRef 2", saql: "d.name", wantRebuild: "d.name", wantValue: false, values: `{"d": {"name": false}}`},
{name: "SymbolRef 3", saql: "name == false", wantRebuild: "name == false", wantValue: true, values: `{"name": false}`},
{name: "SymbolRef Error 1", saql: "name, title", wantParseErr: true},
{name: "SymbolRef Error 2", saql: "unknown", wantRebuild: "unknown", wantValue: false, wantEvalErr: true, values: `{}`},
{name: "Misc 1", saql: `active == true && age < 39`, wantRebuild: `active == true AND age < 39`, wantValue: true, values: `{"active": true, "age": 2}`},
{name: "Misc 2", saql: `(attr == 10) AND foo == 'bar' OR NOT baz`, wantRebuild: `(attr == 10) AND foo == "bar" OR NOT baz`, wantValue: false, values: `{"attr": 2, "foo": "bar", "baz": true}`},
{name: "Misc 3", saql: `attr == 10 AND (foo == 'bar' OR foo == 'baz')`, wantRebuild: `attr == 10 AND (foo == "bar" OR foo == "baz")`, wantValue: false, values: `{"attr": 2, "foo": "bar", "baz": true}`},
{name: "Misc 4", saql: `5 > 1 AND "a" != "b"`, wantRebuild: `5 > 1 AND "a" != "b"`, wantValue: true},
{name: "LIKE 1", saql: `"foo" LIKE "%f%"`, wantRebuild: `"foo" LIKE "%f%"`, wantValue: true},
{name: "LIKE 2", saql: `"foo" NOT LIKE "%f%"`, wantRebuild: `"foo" NOT LIKE "%f%"`, wantValue: false},
{name: "LIKE 3", saql: `NOT "foo" LIKE "%f%"`, wantRebuild: `NOT "foo" LIKE "%f%"`, wantValue: false},
{name: "Summand 1", saql: "1 + 2", wantRebuild: "1 + 2", wantValue: 3},
{name: "Summand 2", saql: "1 - 2", wantRebuild: "1 - 2", wantValue: -1},
{name: "Factor 1", saql: "1 * 2", wantRebuild: "1 * 2", wantValue: 2},
{name: "Factor 2", saql: "1 / 2", wantRebuild: "1 / 2", wantValue: 0.5},
{name: "Factor 3", saql: "1.0 / 2.0", wantRebuild: "1.0 / 2.0", wantValue: 0.5},
{name: "Factor 4", saql: "1 % 2", wantRebuild: "1 % 2", wantValue: 1},
{name: "Term 1", saql: "(1 + 2) * 2", wantRebuild: "(1 + 2) * 2", wantValue: 6},
{name: "Term 2", saql: "2 * (1 + 2)", wantRebuild: "2 * (1 + 2)", wantValue: 6},
// https://www.arangodb.com/docs/3.7/aql/fundamentals-data-types.html
{name: "Null 1", saql: `null`, wantRebuild: "null"},
{name: "Bool 1", saql: `true`, wantRebuild: "true", wantValue: true},
{name: "Bool 2", saql: `false`, wantRebuild: "false", wantValue: false},
{name: "Numeric 1", saql: "1", wantRebuild: "1", wantValue: 1},
{name: "Numeric 2", saql: "+1", wantRebuild: "1", wantValue: 1},
{name: "Numeric 3", saql: "42", wantRebuild: "42", wantValue: 42},
{name: "Numeric 4", saql: "-1", wantRebuild: "-1", wantValue: -1},
{name: "Numeric 5", saql: "-42", wantRebuild: "-42", wantValue: -42},
{name: "Numeric 6", saql: "1.23", wantRebuild: "1.23", wantValue: 1.23},
{name: "Numeric 7", saql: "-99.99", wantRebuild: "-99.99", wantValue: -99.99},
{name: "Numeric 8", saql: "0.5", wantRebuild: "0.5", wantValue: 0.5},
{name: "Numeric 9", saql: ".5", wantRebuild: ".5", wantValue: 0.5},
{name: "Numeric 10", saql: "-4.87e103", wantRebuild: "-4.87e103", wantValue: -4.87e+103},
{name: "Numeric 11", saql: "0b10", wantRebuild: "0b10", wantValue: 2},
{name: "Numeric 12", saql: "0x10", wantRebuild: "0x10", wantValue: 16},
{name: "Numeric Error 1", saql: "1.", wantParseErr: true},
{name: "Numeric Error 2", saql: "01.23", wantParseErr: true},
{name: "Numeric Error 3", saql: "00.23", wantParseErr: true},
{name: "Numeric Error 4", saql: "00", wantParseErr: true},
// {name: "String 1", saql: `"yikes!"`, wantRebuild: `"yikes!"`, wantValue: "yikes!"},
// {name: "String 2", saql: `"don't know"`, wantRebuild: `"don't know"`, wantValue: "don't know"},
// {name: "String 3", saql: `"this is a \"quoted\" word"`, wantRebuild: `"this is a \"quoted\" word"`, wantValue: "this is a \"quoted\" word"},
// {name: "String 4", saql: `"this is a longer string."`, wantRebuild: `"this is a longer string."`, wantValue: "this is a longer string."},
// {name: "String 5", saql: `"the path separator on Windows is \\"`, wantRebuild: `"the path separator on Windows is \\"`, wantValue: "the path separator on Windows is \\"},
// {name: "String 6", saql: `'yikes!'`, wantRebuild: `"yikes!"`, wantValue: "yikes!"},
// {name: "String 7", saql: `'don\'t know'`, wantRebuild: `"don't know"`, wantValue: "don't know"},
// {name: "String 8", saql: `'this is a "quoted" word'`, wantRebuild: `"this is a \"quoted\" word"`, wantValue: "this is a \"quoted\" word"},
// {name: "String 9", saql: `'this is a longer string.'`, wantRebuild: `"this is a longer string."`, wantValue: "this is a longer string."},
// {name: "String 10", saql: `'the path separator on Windows is \\'`, wantRebuild: `"the path separator on Windows is \\"`, wantValue: `the path separator on Windows is \`},
{name: "Array 1", saql: "[]", wantRebuild: "[]", wantValue: []any{}},
{name: "Array 2", saql: `[true]`, wantRebuild: `[true]`, wantValue: []any{true}},
{name: "Array 3", saql: `[1, 2, 3]`, wantRebuild: `[1, 2, 3]`, wantValue: []any{float64(1), float64(2), float64(3)}},
{
name: "Array 4", saql: `[-99, "yikes!", [false, ["no"], []], 1]`, wantRebuild: `[-99, "yikes!", [false, ["no"], []], 1]`,
wantValue: []any{-99.0, "yikes!", []any{false, []any{"no"}, []any{}}, float64(1)},
},
{name: "Array 5", saql: `[["fox", "marshal"]]`, wantRebuild: `[["fox", "marshal"]]`, wantValue: []any{[]any{"fox", "marshal"}}},
{name: "Array 6", saql: `[1, 2, 3,]`, wantRebuild: `[1, 2, 3]`, wantValue: []any{float64(1), float64(2), float64(3)}},
{name: "Array Error 1", saql: "(1,2,3)", wantParseErr: true},
{name: "Array Access 1", saql: "u.friends[0]", wantRebuild: "u.friends[0]", wantValue: 7, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 2", saql: "u.friends[2]", wantRebuild: "u.friends[2]", wantValue: 9, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 3", saql: "u.friends[-1]", wantRebuild: "u.friends[-1]", wantValue: 9, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 4", saql: "u.friends[-2]", wantRebuild: "u.friends[-2]", wantValue: 8, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Object 1", saql: "{}", wantRebuild: "{}", wantValue: map[string]any{}},
{name: "Object 2", saql: `{a: 1}`, wantRebuild: "{a: 1}", wantValue: map[string]any{"a": float64(1)}},
{name: "Object 3", saql: `{'a': 1}`, wantRebuild: `{'a': 1}`, wantValue: map[string]any{"a": float64(1)}},
{name: "Object 4", saql: `{"a": 1}`, wantRebuild: `{"a": 1}`, wantValue: map[string]any{"a": float64(1)}},
{name: "Object 5", saql: `{'return': 1}`, wantRebuild: `{'return': 1}`, wantValue: map[string]any{"return": float64(1)}},
{name: "Object 6", saql: `{"return": 1}`, wantRebuild: `{"return": 1}`, wantValue: map[string]any{"return": float64(1)}},
{name: "Object 9", saql: `{a: 1,}`, wantRebuild: "{a: 1}", wantValue: map[string]any{"a": float64(1)}},
{name: "Object 10", saql: `{"a": 1,}`, wantRebuild: `{"a": 1}`, wantValue: map[string]any{"a": float64(1)}},
// {"Object 8", "{`return`: 1}", `{"return": 1}`, true},
// {"Object 7", "{´return´: 1}", `{"return": 1}`, true},
{name: "Object Error 1: return is a keyword", saql: `{like: 1}`, wantParseErr: true},
{name: "Object Access 1", saql: "u.address.city.name", wantRebuild: "u.address.city.name", wantValue: "Munich", values: `{"u": {"address": {"city": {"name": "Munich"}}}}`},
{name: "Object Access 2", saql: "u.friends[0].name.first", wantRebuild: "u.friends[0].name.first", wantValue: "Kevin", values: `{"u": {"friends": [{"name": {"first": "Kevin"}}]}}`},
{name: "Object Access 3", saql: `u["address"]["city"]["name"]`, wantRebuild: `u["address"]["city"]["name"]`, wantValue: "Munich", values: `{"u": {"address": {"city": {"name": "Munich"}}}}`},
{name: "Object Access 4", saql: `u["friends"][0]["name"]["first"]`, wantRebuild: `u["friends"][0]["name"]["first"]`, wantValue: "Kevin", values: `{"u": {"friends": [{"name": {"first": "Kevin"}}]}}`},
{name: "Object Access 5", saql: "u._key", wantRebuild: "u._key", wantValue: false, values: `{"u": {"_key": false}}`},
// This query language does not support binds
// https://www.arangodb.com/docs/3.7/aql/fundamentals-bind-parameters.html
// {name: "Bind 1", saql: "u.id == @id && u.name == @name", wantRebuild: `u.id == @id AND u.name == @name`, wantValue: true},
// {name: "Bind 2", saql: "u.id == CONCAT('prefix', @id, 'suffix') && u.name == @name", wantRebuild: `u.id == CONCAT('prefix', @id, 'suffix') AND u.name == @name`, wantValue: false},
// {name: "Bind 3", saql: "doc.@attr.@subattr", wantRebuild: `doc.@attr.@subattr`, wantValue: true, values: `{"doc": {"@attr": {"@subattr": true}}}`},
// {name: "Bind 4", saql: "doc[@attr][@subattr]", wantRebuild: `doc[@attr][@subattr]`, wantValue: true, values: `{"doc": {"@attr": {"@subattr": true}}}`},
// https://www.arangodb.com/docs/3.7/aql/fundamentals-type-value-order.html
{name: "Compare 7", saql: `null < false`, wantRebuild: `null < false`, wantValue: true},
{name: "Compare 8", saql: `null < true`, wantRebuild: `null < true`, wantValue: true},
{name: "Compare 9", saql: `null < 1`, wantRebuild: `null < 1`, wantValue: true},
{name: "Compare 10", saql: `null < ''`, wantRebuild: `null < ""`, wantValue: true},
{name: "Compare 11", saql: `null < ' '`, wantRebuild: `null < " "`, wantValue: true},
{name: "Compare 12", saql: `null < '3'`, wantRebuild: `null < "3"`, wantValue: true},
{name: "Compare 13", saql: `null < 'abc'`, wantRebuild: `null < "abc"`, wantValue: true},
{name: "Compare 14", saql: `null < []`, wantRebuild: `null < []`, wantValue: true},
{name: "Compare 15", saql: `null < {}`, wantRebuild: `null < {}`, wantValue: true},
{name: "Compare 16", saql: `false < true`, wantRebuild: `false < true`, wantValue: true},
{name: "Compare 17", saql: `false < 5`, wantRebuild: `false < 5`, wantValue: true},
{name: "Compare 18", saql: `false < ''`, wantRebuild: `false < ""`, wantValue: true},
{name: "Compare 19", saql: `false < ' '`, wantRebuild: `false < " "`, wantValue: true},
{name: "Compare 20", saql: `false < '7'`, wantRebuild: `false < "7"`, wantValue: true},
{name: "Compare 21", saql: `false < 'abc'`, wantRebuild: `false < "abc"`, wantValue: true},
{name: "Compare 22", saql: `false < []`, wantRebuild: `false < []`, wantValue: true},
{name: "Compare 23", saql: `false < {}`, wantRebuild: `false < {}`, wantValue: true},
{name: "Compare 24", saql: `true < 9`, wantRebuild: `true < 9`, wantValue: true},
{name: "Compare 25", saql: `true < ''`, wantRebuild: `true < ""`, wantValue: true},
{name: "Compare 26", saql: `true < ' '`, wantRebuild: `true < " "`, wantValue: true},
{name: "Compare 27", saql: `true < '11'`, wantRebuild: `true < "11"`, wantValue: true},
{name: "Compare 28", saql: `true < 'abc'`, wantRebuild: `true < "abc"`, wantValue: true},
{name: "Compare 29", saql: `true < []`, wantRebuild: `true < []`, wantValue: true},
{name: "Compare 30", saql: `true < {}`, wantRebuild: `true < {}`, wantValue: true},
{name: "Compare 31", saql: `13 < ''`, wantRebuild: `13 < ""`, wantValue: true},
{name: "Compare 32", saql: `15 < ' '`, wantRebuild: `15 < " "`, wantValue: true},
{name: "Compare 33", saql: `17 < '18'`, wantRebuild: `17 < "18"`, wantValue: true},
{name: "Compare 34", saql: `21 < 'abc'`, wantRebuild: `21 < "abc"`, wantValue: true},
{name: "Compare 35", saql: `23 < []`, wantRebuild: `23 < []`, wantValue: true},
{name: "Compare 36", saql: `25 < {}`, wantRebuild: `25 < {}`, wantValue: true},
{name: "Compare 37", saql: `'' < ' '`, wantRebuild: `"" < " "`, wantValue: true},
{name: "Compare 38", saql: `'' < '27'`, wantRebuild: `"" < "27"`, wantValue: true},
{name: "Compare 39", saql: `'' < 'abc'`, wantRebuild: `"" < "abc"`, wantValue: true},
{name: "Compare 40", saql: `'' < []`, wantRebuild: `"" < []`, wantValue: true},
{name: "Compare 41", saql: `'' < {}`, wantRebuild: `"" < {}`, wantValue: true},
{name: "Compare 42", saql: `[] < {}`, wantRebuild: `[] < {}`, wantValue: true},
{name: "Compare 43", saql: `[] < [29]`, wantRebuild: `[] < [29]`, wantValue: true},
{name: "Compare 44", saql: `[1] < [2]`, wantRebuild: `[1] < [2]`, wantValue: true},
{name: "Compare 45", saql: `[1, 2] < [2]`, wantRebuild: `[1, 2] < [2]`, wantValue: true},
{name: "Compare 46", saql: `[99, 99] < [100]`, wantRebuild: `[99, 99] < [100]`, wantValue: true},
{name: "Compare 47", saql: `[false] < [true]`, wantRebuild: `[false] < [true]`, wantValue: true},
{name: "Compare 48", saql: `[false, 1] < [false, '']`, wantRebuild: `[false, 1] < [false, ""]`, wantValue: true},
{name: "Compare 49", saql: `{} < {"a": 1}`, wantRebuild: `{} < {"a": 1}`, wantValue: true},
{name: "Compare 50", saql: `{} == {"a": null}`, wantRebuild: `{} == {"a": null}`, wantValue: true},
{name: "Compare 51", saql: `{"a": 1} < {"a": 2}`, wantRebuild: `{"a": 1} < {"a": 2}`, wantValue: true},
{name: "Compare 52", saql: `{"b": 1} < {"a": 0}`, wantRebuild: `{"b": 1} < {"a": 0}`, wantValue: true},
{name: "Compare 53", saql: `{"a": {"c": true}} < {"a": {"c": 0}}`, wantRebuild: `{"a": {"c": true}} < {"a": {"c": 0}}`, wantValue: true},
{name: "Compare 54", saql: `{"a": {"c": true, "a": 0}} < {"a": {"c": false, "a": 1}}`, wantRebuild: `{"a": {"c": true, "a": 0}} < {"a": {"c": false, "a": 1}}`, wantValue: true},
{name: "Compare 55", saql: `{"a": 1, "b": 2} == {"b": 2, "a": 1}`, wantRebuild: `{"a": 1, "b": 2} == {"b": 2, "a": 1}`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html
{name: "Compare 56", saql: `0 == null`, wantRebuild: `0 == null`, wantValue: false},
{name: "Compare 57", saql: `1 > 0`, wantRebuild: `1 > 0`, wantValue: true},
{name: "Compare 58", saql: `true != null`, wantRebuild: `true != null`, wantValue: true},
{name: "Compare 59", saql: `45 <= "yikes!"`, wantRebuild: `45 <= "yikes!"`, wantValue: true},
{name: "Compare 60", saql: `65 != "65"`, wantRebuild: `65 != "65"`, wantValue: true},
{name: "Compare 61", saql: `65 == 65`, wantRebuild: `65 == 65`, wantValue: true},
{name: "Compare 62", saql: `1.23 > 1.32`, wantRebuild: `1.23 > 1.32`, wantValue: false},
{name: "Compare 63", saql: `1.5 IN [2, 3, 1.5]`, wantRebuild: `1.5 IN [2, 3, 1.5]`, wantValue: true},
{name: "Compare 64", saql: `"foo" IN null`, wantRebuild: `"foo" IN null`, wantValue: false},
{name: "Compare 65", saql: `42 NOT IN [17, 40, 50]`, wantRebuild: `42 NOT IN [17, 40, 50]`, wantValue: true},
{name: "Compare 66", saql: `"abc" == "abc"`, wantRebuild: `"abc" == "abc"`, wantValue: true},
{name: "Compare 67", saql: `"abc" == "ABC"`, wantRebuild: `"abc" == "ABC"`, wantValue: false},
{name: "Compare 68", saql: `"foo" LIKE "f%"`, wantRebuild: `"foo" LIKE "f%"`, wantValue: true},
{name: "Compare 69", saql: `"foo" NOT LIKE "f%"`, wantRebuild: `"foo" NOT LIKE "f%"`, wantValue: false},
{name: "Compare 70", saql: `"foo" =~ "^f[o].$"`, wantRebuild: `"foo" =~ "^f[o].$"`, wantValue: true},
{name: "Compare 71", saql: `"foo" !~ "[a-z]+bar$"`, wantRebuild: `"foo" !~ "[a-z]+bar$"`, wantValue: true},
{name: "Compare 72", saql: `"abc" LIKE "a%"`, wantRebuild: `"abc" LIKE "a%"`, wantValue: true},
{name: "Compare 73", saql: `"abc" LIKE "_bc"`, wantRebuild: `"abc" LIKE "_bc"`, wantValue: true},
{name: "Compare 74", saql: `"a_b_foo" LIKE "a\\_b\\_foo"`, wantRebuild: `"a_b_foo" LIKE "a\\_b\\_foo"`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#array-comparison-operators
{name: "Compare Array 1", saql: `[1, 2, 3] ALL IN [2, 3, 4]`, wantRebuild: `[1, 2, 3] ALL IN [2, 3, 4]`, wantValue: false},
{name: "Compare Array 2", saql: `[1, 2, 3] ALL IN [1, 2, 3]`, wantRebuild: `[1, 2, 3] ALL IN [1, 2, 3]`, wantValue: true},
{name: "Compare Array 3", saql: `[1, 2, 3] NONE IN [3]`, wantRebuild: `[1, 2, 3] NONE IN [3]`, wantValue: false},
{name: "Compare Array 4", saql: `[1, 2, 3] NONE IN [23, 42]`, wantRebuild: `[1, 2, 3] NONE IN [23, 42]`, wantValue: true},
{name: "Compare Array 5", saql: `[1, 2, 3] ANY IN [4, 5, 6]`, wantRebuild: `[1, 2, 3] ANY IN [4, 5, 6]`, wantValue: false},
{name: "Compare Array 6", saql: `[1, 2, 3] ANY IN [1, 42]`, wantRebuild: `[1, 2, 3] ANY IN [1, 42]`, wantValue: true},
{name: "Compare Array 7", saql: `[1, 2, 3] ANY == 2`, wantRebuild: `[1, 2, 3] ANY == 2`, wantValue: true},
{name: "Compare Array 8", saql: `[1, 2, 3] ANY == 4`, wantRebuild: `[1, 2, 3] ANY == 4`, wantValue: false},
{name: "Compare Array 9", saql: `[1, 2, 3] ANY > 0`, wantRebuild: `[1, 2, 3] ANY > 0`, wantValue: true},
{name: "Compare Array 10", saql: `[1, 2, 3] ANY <= 1`, wantRebuild: `[1, 2, 3] ANY <= 1`, wantValue: true},
{name: "Compare Array 11", saql: `[1, 2, 3] NONE < 99`, wantRebuild: `[1, 2, 3] NONE < 99`, wantValue: false},
{name: "Compare Array 12", saql: `[1, 2, 3] NONE > 10`, wantRebuild: `[1, 2, 3] NONE > 10`, wantValue: true},
{name: "Compare Array 13", saql: `[1, 2, 3] ALL > 2`, wantRebuild: `[1, 2, 3] ALL > 2`, wantValue: false},
{name: "Compare Array 14", saql: `[1, 2, 3] ALL > 0`, wantRebuild: `[1, 2, 3] ALL > 0`, wantValue: true},
{name: "Compare Array 15", saql: `[1, 2, 3] ALL >= 3`, wantRebuild: `[1, 2, 3] ALL >= 3`, wantValue: false},
{name: "Compare Array 16", saql: `["foo", "bar"] ALL != "moo"`, wantRebuild: `["foo", "bar"] ALL != "moo"`, wantValue: true},
{name: "Compare Array 17", saql: `["foo", "bar"] NONE == "bar"`, wantRebuild: `["foo", "bar"] NONE == "bar"`, wantValue: false},
{name: "Compare Array 18", saql: `["foo", "bar"] ANY == "foo"`, wantRebuild: `["foo", "bar"] ANY == "foo"`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
{name: "Logical 1", saql: "active == true OR age < 39", wantRebuild: "active == true OR age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 2", saql: "active == true || age < 39", wantRebuild: "active == true OR age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 3", saql: "active == true AND age < 39", wantRebuild: "active == true AND age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 4", saql: "active == true && age < 39", wantRebuild: "active == true AND age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 5", saql: "!active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 6", saql: "NOT active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 7", saql: "not active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 8", saql: "NOT NOT active", wantRebuild: "NOT NOT active", wantValue: true, values: `{"active": true}`},
{name: "Logical 9", saql: `u.age > 15 && u.address.city != ""`, wantRebuild: `u.age > 15 AND u.address.city != ""`, wantValue: false, values: `{"u": {"age": 2, "address": {"city": "Munich"}}}`},
{name: "Logical 10", saql: `true || false`, wantRebuild: `true OR false`, wantValue: true},
{name: "Logical 11", saql: `NOT u.isInvalid`, wantRebuild: `NOT u.isInvalid`, wantValue: false, values: `{"u": {"isInvalid": true}}`},
{name: "Logical 12", saql: `1 || ! 0`, wantRebuild: `1 OR NOT 0`, wantValue: 1},
{name: "Logical 13", saql: `25 > 1 && 42 != 7`, wantRebuild: `25 > 1 AND 42 != 7`, wantValue: true},
{name: "Logical 14", saql: `22 IN [23, 42] || 23 NOT IN [22, 7]`, wantRebuild: `22 IN [23, 42] OR 23 NOT IN [22, 7]`, wantValue: true},
{name: "Logical 15", saql: `25 != 25`, wantRebuild: `25 != 25`, wantValue: false},
{name: "Logical 16", saql: `1 || 7`, wantRebuild: `1 OR 7`, wantValue: 1},
// {name: "Logical 17", saql: `null || "foo"`, wantRebuild: `null OR "foo"`, wantValue: "foo"},
{name: "Logical 17", saql: `null || "foo"`, wantRebuild: `null OR d._key IN ["1","2","3"]`, wantValue: "foo", values: `{"d": {"_key": "1"}}`}, // eval != rebuild
{name: "Logical 18", saql: `null && true`, wantRebuild: `null AND true`, wantValue: nil},
{name: "Logical 19", saql: `true && 23`, wantRebuild: `true AND 23`, wantValue: 23},
{name: "Logical 20", saql: "true == (6 < 8)", wantRebuild: "true == (6 < 8)", wantValue: true},
{name: "Logical 21", saql: "true == 6 < 8", wantRebuild: "true == 6 < 8", wantValue: true}, // does not work in go
// https://www.arangodb.com/docs/3.7/aql/operators.html#arithmetic-operators
{name: "Arithmetic 1", saql: `1 + 1`, wantRebuild: `1 + 1`, wantValue: 2},
{name: "Arithmetic 2", saql: `33 - 99`, wantRebuild: `33 - 99`, wantValue: -66},
{name: "Arithmetic 3", saql: `12.4 * 4.5`, wantRebuild: `12.4 * 4.5`, wantValue: 55.8},
{name: "Arithmetic 4", saql: `13.0 / 0.1`, wantRebuild: `13.0 / 0.1`, wantValue: 130.0},
{name: "Arithmetic 5", saql: `23 % 7`, wantRebuild: `23 % 7`, wantValue: 2},
{name: "Arithmetic 6", saql: `-15`, wantRebuild: `-15`, wantValue: -15},
{name: "Arithmetic 7", saql: `+9.99`, wantRebuild: `9.99`, wantValue: 9.99},
{name: "Arithmetic 8", saql: `1 + "a"`, wantRebuild: `1 + "a"`, wantValue: 1},
{name: "Arithmetic 9", saql: `1 + "99"`, wantRebuild: `1 + "99"`, wantValue: 100},
{name: "Arithmetic 10", saql: `1 + null`, wantRebuild: `1 + null`, wantValue: 1},
{name: "Arithmetic 11", saql: `null + 1`, wantRebuild: `null + 1`, wantValue: 1},
{name: "Arithmetic 12", saql: `3 + []`, wantRebuild: `3 + []`, wantValue: 3},
{name: "Arithmetic 13", saql: `24 + [2]`, wantRebuild: `24 + [2]`, wantValue: 26},
{name: "Arithmetic 14", saql: `24 + [2, 4]`, wantRebuild: `24 + [2, 4]`, wantValue: 24},
{name: "Arithmetic 15", saql: `25 - null`, wantRebuild: `25 - null`, wantValue: 25},
{name: "Arithmetic 16", saql: `17 - true`, wantRebuild: `17 - true`, wantValue: 16},
{name: "Arithmetic 17", saql: `23 * {}`, wantRebuild: `23 * {}`, wantValue: 0},
{name: "Arithmetic 18", saql: `5 * [7]`, wantRebuild: `5 * [7]`, wantValue: 35},
{name: "Arithmetic 19", saql: `24 / "12"`, wantRebuild: `24 / "12"`, wantValue: 2},
{name: "Arithmetic Error 1: Division by zero", saql: `1 / 0`, wantRebuild: `1 / 0`, wantValue: 0},
// https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
{name: "Ternary 1", saql: `u.age > 15 || u.active == true ? u.userId : null`, wantRebuild: `u.age > 15 OR u.active == true ? u.userId : null`, wantValue: 45, values: `{"u": {"active": true, "age": 2, "userId": 45}}`},
{name: "Ternary 2", saql: `u.value ? : 'value is null, 0 or not present'`, wantRebuild: `u.value ? : "value is null, 0 or not present"`, wantValue: "value is null, 0 or not present", values: `{"u": {"value": 0}}`},
// https://www.arangodb.com/docs/3.7/aql/operators.html#range-operator
{name: "Range 1", saql: `2010..2013`, wantRebuild: `2010..2013`, wantValue: []float64{2010, 2011, 2012, 2013}},
// {"Array operators 1", `u.friends[*].name`, `u.friends[*].name`, false},
// Security
{name: "Security 1", saql: `doc.value == 1 || true REMOVE doc IN collection //`, wantParseErr: true},
{name: "Security 2", saql: `doc.value == 1 || true INSERT {foo: "bar"} IN collection //`, wantParseErr: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#operator-precedence
{name: "Precedence", saql: `2 > 15 && "a" != ""`, wantRebuild: `2 > 15 AND "a" != ""`, wantValue: false},
}
for _, tt := range tests {
tt := tt
parser := &caql.Parser{
Searcher: &MockSearcher{},
}
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.String()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantRebuild {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJSON map[string]any
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJSON)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJSON)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
wantValue := tt.wantValue
if i, ok := wantValue.(int); ok {
wantValue = float64(i)
}
if !reflect.DeepEqual(value, wantValue) {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
})
}
}

View File

@@ -1,139 +0,0 @@
// Adapted from https://github.com/badgerodon/collections under the MIT License
// Original License:
//
// Copyright (c) 2012 Caleb Doxsey
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package caql
import (
"sort"
)
type Set struct {
hash map[any]nothing
}
type nothing struct{}
func NewSet(initial ...any) *Set {
s := &Set{make(map[any]nothing)}
for _, v := range initial {
s.Insert(v)
}
return s
}
func (s *Set) Difference(set *Set) *Set {
n := make(map[any]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; !exists {
n[k] = nothing{}
}
}
return &Set{n}
}
func (s *Set) Has(element any) bool {
_, exists := s.hash[element]
return exists
}
func (s *Set) Insert(element any) {
s.hash[element] = nothing{}
}
func (s *Set) Intersection(set *Set) *Set {
n := make(map[any]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; exists {
n[k] = nothing{}
}
}
return &Set{n}
}
func (s *Set) Len() int {
return len(s.hash)
}
func (s *Set) ProperSubsetOf(set *Set) bool {
return s.SubsetOf(set) && s.Len() < set.Len()
}
func (s *Set) Remove(element any) {
delete(s.hash, element)
}
func (s *Set) Minus(set *Set) *Set {
n := make(map[any]nothing)
for k := range s.hash {
n[k] = nothing{}
}
for _, v := range set.Values() {
delete(n, v)
}
return &Set{n}
}
func (s *Set) SubsetOf(set *Set) bool {
if s.Len() > set.Len() {
return false
}
for k := range s.hash {
if _, exists := set.hash[k]; !exists {
return false
}
}
return true
}
func (s *Set) Union(set *Set) *Set {
n := make(map[any]nothing)
for k := range s.hash {
n[k] = nothing{}
}
for k := range set.hash {
n[k] = nothing{}
}
return &Set{n}
}
func (s *Set) Values() []any {
values := []any{}
for k := range s.hash {
values = append(values, k)
}
sort.Slice(values, func(i, j int) bool { return lt(values[i], values[j]) })
return values
}

View File

@@ -1,97 +0,0 @@
// Adapted from https://github.com/badgerodon/collections under the MIT License
// Original License:
//
// Copyright (c) 2012 Caleb Doxsey
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package caql
import (
"testing"
)
func Test(t *testing.T) {
t.Parallel()
s := NewSet()
s.Insert(5)
if s.Len() != 1 {
t.Errorf("Length should be 1")
}
if !s.Has(5) {
t.Errorf("Membership test failed")
}
s.Remove(5)
if s.Len() != 0 {
t.Errorf("Length should be 0")
}
if s.Has(5) {
t.Errorf("The set should be empty")
}
// Difference
s1 := NewSet(1, 2, 3, 4, 5, 6)
s2 := NewSet(4, 5, 6)
s3 := s1.Difference(s2)
if s3.Len() != 3 {
t.Errorf("Length should be 3")
}
if !(s3.Has(1) && s3.Has(2) && s3.Has(3)) {
t.Errorf("Set should only contain 1, 2, 3")
}
// Intersection
s3 = s1.Intersection(s2)
if s3.Len() != 3 {
t.Errorf("Length should be 3 after intersection")
}
if !(s3.Has(4) && s3.Has(5) && s3.Has(6)) {
t.Errorf("Set should contain 4, 5, 6")
}
// Union
s4 := NewSet(7, 8, 9)
s3 = s2.Union(s4)
if s3.Len() != 6 {
t.Errorf("Length should be 6 after union")
}
if !(s3.Has(7)) {
t.Errorf("Set should contain 4, 5, 6, 7, 8, 9")
}
// Subset
if !s1.SubsetOf(s1) {
t.Errorf("set should be a subset of itself")
}
// Proper Subset
if s1.ProperSubsetOf(s1) {
t.Errorf("set should not be a subset of itself")
}
}

View File

@@ -1,82 +0,0 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"strconv"
"strings"
"unicode/utf8"
)
// unquote interprets s as a single-quoted, double-quoted,
// or backquoted string literal, returning the string value
// that s quotes.
func unquote(s string) (string, error) {
n := len(s)
if n < 2 {
return "", strconv.ErrSyntax
}
quote := s[0]
if quote != s[n-1] {
return "", strconv.ErrSyntax
}
s = s[1 : n-1]
if quote == '`' {
if strings.ContainsRune(s, '`') {
return "", strconv.ErrSyntax
}
if strings.ContainsRune(s, '\r') {
// -1 because we know there is at least one \r to remove.
buf := make([]byte, 0, len(s)-1)
for i := 0; i < len(s); i++ {
if s[i] != '\r' {
buf = append(buf, s[i])
}
}
return string(buf), nil
}
return s, nil
}
if quote != '"' && quote != '\'' {
return "", strconv.ErrSyntax
}
if strings.ContainsRune(s, '\n') {
return "", strconv.ErrSyntax
}
// Is it trivial? Avoid allocation.
if !strings.ContainsRune(s, '\\') && !strings.ContainsRune(s, rune(quote)) {
switch quote {
case '"', '\'':
if utf8.ValidString(s) {
return s, nil
}
}
}
var runeTmp [utf8.UTFMax]byte
buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations.
for len(s) > 0 {
c, multibyte, ss, err := strconv.UnquoteChar(s, quote)
if err != nil {
return "", err
}
s = ss
if c < utf8.RuneSelf || !multibyte {
buf = append(buf, byte(c))
} else {
n := utf8.EncodeRune(runeTmp[:], c)
buf = append(buf, runeTmp[:n]...)
}
}
return string(buf), nil
}

View File

@@ -1,126 +0,0 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"errors"
"strconv"
"testing"
)
type quoteTest struct {
in string
out string
}
var quotetests = []quoteTest{
{in: "\a\b\f\r\n\t\v", out: `"\a\b\f\r\n\t\v"`},
{"\\", `"\\"`},
{"abc\xffdef", `"abc\xffdef"`},
{"\u263a", `"☺"`},
{"\U0010ffff", `"\U0010ffff"`},
{"\x04", `"\x04"`},
// Some non-printable but graphic runes. Final column is double-quoted.
{"!\u00a0!\u2000!\u3000!", `"!\u00a0!\u2000!\u3000!"`},
}
type unQuoteTest struct {
in string
out string
}
var unquotetests = []unQuoteTest{
{`""`, ""},
{`"a"`, "a"},
{`"abc"`, "abc"},
{`"☺"`, "☺"},
{`"hello world"`, "hello world"},
{`"\xFF"`, "\xFF"},
{`"\377"`, "\377"},
{`"\u1234"`, "\u1234"},
{`"\U00010111"`, "\U00010111"},
{`"\U0001011111"`, "\U0001011111"},
{`"\a\b\f\n\r\t\v\\\""`, "\a\b\f\n\r\t\v\\\""},
{`"'"`, "'"},
{`'a'`, "a"},
{`'☹'`, "☹"},
{`'\a'`, "\a"},
{`'\x10'`, "\x10"},
{`'\377'`, "\377"},
{`'\u1234'`, "\u1234"},
{`'\U00010111'`, "\U00010111"},
{`'\t'`, "\t"},
{`' '`, " "},
{`'\''`, "'"},
{`'"'`, "\""},
{"``", ``},
{"`a`", `a`},
{"`abc`", `abc`},
{"`☺`", ``},
{"`hello world`", `hello world`},
{"`\\xFF`", `\xFF`},
{"`\\377`", `\377`},
{"`\\`", `\`},
{"`\n`", "\n"},
{"` `", ` `},
{"` `", ` `},
{"`a\rb`", "ab"},
}
var misquoted = []string{
``,
`"`,
`"a`,
`"'`,
`b"`,
`"\"`,
`"\9"`,
`"\19"`,
`"\129"`,
`'\'`,
`'\9'`,
`'\19'`,
`'\129'`,
// `'ab'`,
`"\x1!"`,
`"\U12345678"`,
`"\z"`,
"`",
"`xxx",
"`\"",
`"\'"`,
`'\"'`,
"\"\n\"",
"\"\\n\n\"",
"'\n'",
}
func TestUnquote(t *testing.T) {
t.Parallel()
for _, tt := range unquotetests {
if out, err := unquote(tt.in); err != nil || out != tt.out {
t.Errorf("unquote(%#q) = %q, %v want %q, nil", tt.in, out, err, tt.out)
}
}
// run the quote tests too, backward
for _, tt := range quotetests {
if in, err := unquote(tt.out); in != tt.in {
t.Errorf("unquote(%#q) = %q, %v, want %q, nil", tt.out, in, err, tt.in)
}
}
for _, s := range misquoted {
if out, err := unquote(s); out != "" || !errors.Is(err, strconv.ErrSyntax) {
t.Errorf("unquote(%#q) = %q, %v want %q, %v", s, out, err, "", strconv.ErrSyntax)
}
}
}

View File

@@ -1,158 +0,0 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"errors"
"strings"
"unicode/utf8"
)
// ErrBadPattern indicates a pattern was malformed.
var ErrBadPattern = errors.New("syntax error in pattern")
// match reports whether name matches the shell pattern.
// The pattern syntax is:
//
// pattern:
// { term }
// term:
// '%' matches any sequence of non-/ characters
// '_' matches any single non-/ character
// c matches character c (c != '%', '_', '\\')
// '\\' c matches character c
//
// match requires pattern to match all of name, not just a substring.
// The only possible returned error is ErrBadPattern, when pattern
// is malformed.
func match(pattern, name string) (matched bool, err error) {
Pattern:
for len(pattern) > 0 {
var star bool
var chunk string
star, chunk, pattern = scanChunk(pattern)
if star && chunk == "" {
// Trailing * matches rest of string unless it has a /.
return !strings.ContainsRune(name, '/'), nil
}
// Look for match at current position.
t, ok, err := matchChunk(chunk, name)
// if we're the last chunk, make sure we've exhausted the name
// otherwise we'll give a false result even if we could still match
// using the star
if ok && (len(t) == 0 || len(pattern) > 0) {
name = t
continue
}
if err != nil {
return false, err
}
if star {
// Look for match skipping i+1 bytes.
// Cannot skip /.
for i := 0; i < len(name) && name[i] != '/'; i++ {
t, ok, err := matchChunk(chunk, name[i+1:])
if ok {
// if we're the last chunk, make sure we exhausted the name
if len(pattern) == 0 && len(t) > 0 {
continue
}
name = t
continue Pattern
}
if err != nil {
return false, err
}
}
}
// Before returning false with no error,
// check that the remainder of the pattern is syntactically valid.
for len(pattern) > 0 {
_, chunk, pattern = scanChunk(pattern)
if _, _, err := matchChunk(chunk, ""); err != nil {
return false, err
}
}
return false, nil
}
return len(name) == 0, nil
}
// scanChunk gets the next segment of pattern, which is a non-star string
// possibly preceded by a star.
func scanChunk(pattern string) (star bool, chunk, rest string) {
for len(pattern) > 0 && pattern[0] == '%' {
pattern = pattern[1:]
star = true
}
var i int
Scan:
for i = 0; i < len(pattern); i++ {
switch pattern[i] {
case '\\':
// error check handled in matchChunk: bad pattern.
if i+1 < len(pattern) {
i++
}
case '%':
break Scan
}
}
return star, pattern[0:i], pattern[i:]
}
// matchChunk checks whether chunk matches the beginning of s.
// If so, it returns the remainder of s (after the match).
// Chunk is all single-character operators: literals, char classes, and ?.
func matchChunk(chunk, s string) (rest string, ok bool, err error) {
// failed records whether the match has failed.
// After the match fails, the loop continues on processing chunk,
// checking that the pattern is well-formed but no longer reading s.
failed := false
for len(chunk) > 0 {
if !failed && len(s) == 0 {
failed = true
}
switch chunk[0] {
case '_':
if !failed {
if s[0] == '/' {
failed = true
}
_, n := utf8.DecodeRuneInString(s)
s = s[n:]
}
chunk = chunk[1:]
case '\\':
chunk = chunk[1:]
if len(chunk) == 0 {
return "", false, ErrBadPattern
}
fallthrough
default:
if !failed {
if chunk[0] != s[0] {
failed = true
}
s = s[1:]
}
chunk = chunk[1:]
}
}
if failed {
return "", false, nil
}
return s, true, nil
}

View File

@@ -1,55 +0,0 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"errors"
"testing"
)
type MatchTest struct {
pattern, s string
match bool
err error
}
var matchTests = []MatchTest{
{"abc", "abc", true, nil},
{"%", "abc", true, nil},
{"%c", "abc", true, nil},
{"a%", "a", true, nil},
{"a%", "abc", true, nil},
{"a%", "ab/c", false, nil},
{"a%/b", "abc/b", true, nil},
{"a%/b", "a/c/b", false, nil},
{"a%b%c%d%e%/f", "axbxcxdxe/f", true, nil},
{"a%b%c%d%e%/f", "axbxcxdxexxx/f", true, nil},
{"a%b%c%d%e%/f", "axbxcxdxe/xxx/f", false, nil},
{"a%b%c%d%e%/f", "axbxcxdxexxx/fff", false, nil},
{"a%b_c%x", "abxbbxdbxebxczzx", true, nil},
{"a%b_c%x", "abxbbxdbxebxczzy", false, nil},
{"a\\%b", "a%b", true, nil},
{"a\\%b", "ab", false, nil},
{"a_b", "a☺b", true, nil},
{"a___b", "a☺b", false, nil},
{"a_b", "a/b", false, nil},
{"a%b", "a/b", false, nil},
{"\\", "a", false, ErrBadPattern},
{"%x", "xxx", true, nil},
}
func TestMatch(t *testing.T) {
t.Parallel()
for _, tt := range matchTests {
ok, err := match(tt.pattern, tt.s)
if ok != tt.match || !errors.Is(err, tt.err) {
t.Errorf("match(%#q, %#q) = %v, %v want %v, %v", tt.pattern, tt.s, ok, err, tt.match, tt.err)
}
}
}

78
cmd.go Normal file
View File

@@ -0,0 +1,78 @@
package main
import (
"log"
"slices"
"github.com/pocketbase/pocketbase"
"github.com/pocketbase/pocketbase/models"
"github.com/spf13/cobra"
"github.com/SecurityBrewery/catalyst/fakedata"
"github.com/SecurityBrewery/catalyst/migrations"
)
func fakeDataCmd(app *pocketbase.PocketBase) *cobra.Command {
var userCount, ticketCount int
cmd := &cobra.Command{
Use: "fake-data",
Run: func(_ *cobra.Command, _ []string) {
if err := fakedata.Generate(app, userCount, ticketCount); err != nil {
log.Fatal(err)
}
},
}
cmd.PersistentFlags().IntVar(&userCount, "users", 10, "Number of users to generate")
cmd.PersistentFlags().IntVar(&ticketCount, "tickets", 100, "Number of tickets to generate")
return cmd
}
func setFeatureFlagsCmd(app *pocketbase.PocketBase) *cobra.Command {
return &cobra.Command{
Use: "set-feature-flags",
Run: func(_ *cobra.Command, args []string) {
featureCollection, err := app.Dao().FindCollectionByNameOrId(migrations.FeatureCollectionName)
if err != nil {
log.Fatal(err)
}
featureRecords, err := app.Dao().FindRecordsByExpr(migrations.FeatureCollectionName)
if err != nil {
log.Fatal(err)
}
var existingFlags []string
for _, featureRecord := range featureRecords {
// remove feature flags that are not in the args
if !slices.Contains(args, featureRecord.GetString("name")) {
if err := app.Dao().DeleteRecord(featureRecord); err != nil {
log.Fatal(err)
}
continue
}
existingFlags = append(existingFlags, featureRecord.GetString("name"))
}
for _, arg := range args {
if slices.Contains(existingFlags, arg) {
continue
}
// add feature flags that are not in the args
record := models.NewRecord(featureCollection)
record.Set("name", arg)
if err := app.Dao().SaveRecord(record); err != nil {
log.Fatal(err)
}
}
},
}
}

File diff suppressed because one or more lines are too long

View File

@@ -1,79 +0,0 @@
package main
import (
"context"
"fmt"
"log"
"net/http"
"time"
"github.com/arangodb/go-driver"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/test"
)
func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
config, err := cmd.ParseCatalystConfig()
if err != nil {
log.Fatal(err)
}
// create app and clear db after start
theCatalyst, err := catalyst.New(&hooks.Hooks{
DatabaseAfterConnectFuncs: []func(ctx context.Context, client driver.Client, name string){test.Clear},
}, config)
if err != nil {
log.Fatal(err)
}
demoUser := &maut.User{ID: "demo", Roles: []string{maut.AdminRole}}
ctx := maut.UserContext(context.Background(), demoUser, catalyst.Admin.Permissions)
if err := test.SetupTestData(ctx, theCatalyst.DB); err != nil {
log.Fatal(err)
}
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "eve", Roles: []string{"admin"}})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "eve", &model.UserData{
Name: pointer.String("Eve"),
Email: pointer.String("eve@example.com"),
Image: &avatarEve,
})
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "kevin", Roles: []string{"admin"}})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "kevin", &model.UserData{
Name: pointer.String("Kevin"),
Email: pointer.String("kevin@example.com"),
Image: &avatarKevin,
})
_, _ = theCatalyst.DB.UserCreate(context.Background(), &model.UserForm{ID: "tom", Roles: []string{"admin"}})
_ = theCatalyst.DB.UserDataCreate(context.Background(), "tom", &model.UserData{
Name: pointer.String("tom"),
Email: pointer.String("tom@example.com"),
Image: &avatarKevin,
})
// proxy static requests
theCatalyst.Server.Get("/ui/*", func(writer http.ResponseWriter, request *http.Request) {
log.Println("proxy request", request.URL.Path)
api.Proxy("http://localhost:8080/")(writer, request)
})
server := &http.Server{
Addr: fmt.Sprintf(":%d", config.Port),
ReadHeaderTimeout: 3 * time.Second,
Handler: theCatalyst.Server,
}
if err := server.ListenAndServe(); err != nil {
log.Fatal(err)
}
}

View File

@@ -1,42 +0,0 @@
package main
import (
"fmt"
"io/fs"
"log"
"net/http"
"time"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/ui"
)
func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
config, err := cmd.ParseCatalystConfig()
if err != nil {
log.Fatal(err)
}
theCatalyst, err := catalyst.New(&hooks.Hooks{}, config)
if err != nil {
log.Fatal(err)
}
fsys, _ := fs.Sub(ui.UI, "dist")
staticHandlerFunc := http.HandlerFunc(api.VueStatic(fsys))
theCatalyst.Server.Get("/ui/*", http.StripPrefix("/ui", staticHandlerFunc).ServeHTTP)
server := &http.Server{
Addr: fmt.Sprintf(":%d", config.Port),
ReadHeaderTimeout: 3 * time.Second,
Handler: theCatalyst.Server,
}
if err := server.ListenAndServe(); err != nil {
log.Fatal(err)
}
}

View File

@@ -1,120 +0,0 @@
package cmd
import (
"errors"
"github.com/alecthomas/kong"
kongyaml "github.com/alecthomas/kong-yaml"
"github.com/coreos/go-oidc/v3/oidc"
maut "github.com/jonas-plum/maut/auth"
"golang.org/x/exp/slices"
"golang.org/x/oauth2"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/storage"
)
type CLI struct {
Secret string `env:"SECRET" required:"" help:"A random secret value (can be created with 'openssl rand -hex 32')"`
ExternalAddress string `env:"EXTERNAL_ADDRESS" required:""`
CatalystAddress string `env:"CATALYST_ADDRESS" default:"http://catalyst:8000"`
Network string `env:"CATALYST_NETWORK" default:"catalyst"`
Port int `env:"PORT" default:"8000"`
AuthBlockNew bool `env:"AUTH_BLOCK_NEW" default:"true" help:"Block newly created users"`
AuthDefaultRoles []string `env:"AUTH_DEFAULT_ROLES" help:"Default roles for new users"`
AuthAdminUsers []string `env:"AUTH_ADMIN_USERS" help:"Username of admins"`
InitialAPIKey string `env:"INITIAL_API_KEY"`
// SimpleAuthEnable bool `env:"SIMPLE_AUTH_ENABLE" default:"true"`
APIKeyAuthEnable bool `env:"API_KEY_AUTH_ENABLE" default:"true"`
OIDCEnable bool `env:"OIDC_ENABLE" default:"true"`
OIDCIssuer string `env:"OIDC_ISSUER"`
AuthURL string `env:"OIDC_AUTH_URL"`
OIDCClientID string `env:"OIDC_CLIENT_ID" default:"catalyst"`
OIDCClientSecret string `env:"OIDC_CLIENT_SECRET"`
OIDCScopes []string `env:"OIDC_SCOPES" help:"Additional scopes, ['oidc', 'profile', 'email'] are always added." placeholder:"customscopes"`
OIDCClaimUsername string `env:"OIDC_CLAIM_USERNAME" default:"preferred_username" help:"username field in the OIDC claim"`
OIDCClaimEmail string `env:"OIDC_CLAIM_EMAIL" default:"email" help:"email field in the OIDC claim"`
OIDCClaimName string `env:"OIDC_CLAIM_NAME" default:"name" help:"name field in the OIDC claim"`
IndexPath string `env:"INDEX_PATH" default:"index.bleve" help:"Path for the bleve index"`
ArangoDBHost string `env:"ARANGO_DB_HOST" default:"http://arangodb:8529"`
ArangoDBUser string `env:"ARANGO_DB_USER" default:"root"`
ArangoDBPassword string `env:"ARANGO_DB_PASSWORD" required:""`
S3Host string `env:"S3_HOST" default:"http://minio:9000" name:"s3-host"`
S3User string `env:"S3_USER" default:"minio" name:"s3-user"`
S3Password string `env:"S3_PASSWORD" required:"" name:"s3-password"`
S3Region string `env:"S3_REGION" default:"us-east-1" name:"s3-region"`
Version kong.VersionFlag `help:"Show version."`
}
func ParseCatalystConfig() (*catalyst.Config, error) {
var cli CLI
kong.Parse(
&cli,
kong.Configuration(kong.JSON, "/etc/catalyst.json", ".catalyst.json"),
kong.Configuration(kongyaml.Loader, "/etc/catalyst.yaml", ".catalyst.yaml"),
kong.Vars{"version": catalyst.GetVersion()},
)
if cli.OIDCEnable {
if cli.OIDCIssuer == "" {
return nil, errors.New("OIDC issuer not set")
}
if cli.OIDCClientSecret == "" {
return nil, errors.New("OIDC client secret is required")
}
}
return MapConfig(cli)
}
func MapConfig(cli CLI) (*catalyst.Config, error) {
scopes := slices.Compact(append([]string{oidc.ScopeOpenID, "profile", "email"}, cli.OIDCScopes...))
config := &catalyst.Config{
IndexPath: cli.IndexPath,
Network: cli.Network,
DB: &database.Config{
Host: cli.ArangoDBHost,
User: cli.ArangoDBUser,
Password: cli.ArangoDBPassword,
},
Storage: &storage.Config{Host: cli.S3Host, User: cli.S3User, Region: cli.S3Region, Password: cli.S3Password},
ExternalAddress: cli.ExternalAddress,
InternalAddress: cli.CatalystAddress,
Port: cli.Port,
Auth: &maut.Config{
CookieSecret: []byte(cli.Secret),
SimpleAuthEnable: false, // cli.SimpleAuthEnable,
APIKeyAuthEnable: cli.APIKeyAuthEnable,
OIDCAuthEnable: cli.OIDCEnable,
// InitialUser: "",
// InitialPassword: "",
InitialAPIKey: cli.InitialAPIKey,
OIDCIssuer: cli.OIDCIssuer,
AuthURL: cli.AuthURL,
OAuth2: &oauth2.Config{
ClientID: cli.OIDCClientID,
ClientSecret: cli.OIDCClientSecret,
RedirectURL: cli.ExternalAddress + "/auth/callback",
Scopes: scopes,
},
UserCreateConfig: &maut.UserCreateConfig{
AuthBlockNew: cli.AuthBlockNew,
AuthDefaultRoles: cli.AuthDefaultRoles,
AuthAdminUsers: cli.AuthAdminUsers,
OIDCClaimUsername: cli.OIDCClaimUsername,
OIDCClaimEmail: cli.OIDCClaimEmail,
OIDCClaimName: cli.OIDCClaimName,
},
},
}
return config, nil
}

View File

@@ -1,164 +0,0 @@
// Adapted from https://github.com/philopon/go-toposort under the MIT License
// Original License:
//
// Copyright (c) 2017 Hirotomo Moriwaki
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package dag
import (
"errors"
"sort"
"golang.org/x/exp/maps"
"golang.org/x/exp/slices"
)
type Graph struct {
nodes []string
outputs map[string]map[string]struct{}
// node: number of parents
inputs map[string]int
}
func NewGraph() *Graph {
return &Graph{
nodes: []string{},
inputs: make(map[string]int),
outputs: make(map[string]map[string]struct{}),
}
}
func (g *Graph) AddNode(name string) error {
g.nodes = append(g.nodes, name)
if _, ok := g.outputs[name]; ok {
return errors.New("duplicate detected")
}
g.outputs[name] = make(map[string]struct{})
g.inputs[name] = 0
return nil
}
func (g *Graph) AddNodes(names ...string) error {
for _, name := range names {
if err := g.AddNode(name); err != nil {
return err
}
}
return nil
}
func (g *Graph) AddEdge(from, to string) error {
m, ok := g.outputs[from]
if !ok {
return errors.New("node does not exist")
}
m[to] = struct{}{}
g.inputs[to]++
return nil
}
func (g *Graph) Toposort() ([]string, error) {
outputs := map[string]map[string]struct{}{}
for key, value := range g.outputs {
outputs[key] = map[string]struct{}{}
for k, v := range value {
outputs[key][k] = v
}
}
L := make([]string, 0, len(g.nodes))
S := make([]string, 0, len(g.nodes))
sort.Strings(g.nodes)
for _, n := range g.nodes {
if g.inputs[n] == 0 {
S = append(S, n)
}
}
for len(S) > 0 {
var n string
n, S = S[0], S[1:]
L = append(L, n)
ms := make([]string, len(outputs[n]))
keys := maps.Keys(outputs[n])
slices.Sort(keys)
for _, k := range keys {
m := k
// i := outputs[n][m]
// ms[i-1] = m
ms = append(ms, m)
}
for _, m := range ms {
delete(outputs[n], m)
g.inputs[m]--
if g.inputs[m] == 0 {
S = append(S, m)
}
}
}
N := 0
for _, v := range g.inputs {
N += v
}
if N > 0 {
return L, errors.New("cycle detected")
}
return L, nil
}
func (g *Graph) GetParents(id string) []string {
var parents []string
for node, targets := range g.outputs {
if _, ok := targets[id]; ok {
parents = append(parents, node)
}
}
sort.Strings(parents)
return parents
}
func (g *Graph) GetRoot() (string, error) {
var roots []string
for n, parents := range g.inputs {
if parents == 0 {
roots = append(roots, n)
}
}
if len(roots) != 1 {
return "", errors.New("more than one root")
}
return roots[0], nil
}

View File

@@ -1,256 +0,0 @@
// Adapted from https://github.com/philopon/go-toposort under the MIT License
// Original License:
//
// Copyright (c) 2017 Hirotomo Moriwaki
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package dag_test
import (
"reflect"
"testing"
"github.com/stretchr/testify/assert"
"golang.org/x/exp/slices"
"github.com/SecurityBrewery/catalyst/dag"
)
type Edge struct {
From string
To string
}
func TestDuplicatedNode(t *testing.T) {
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNode("a"))
assert.Error(t, graph.AddNode("a"))
}
func TestWikipedia(t *testing.T) {
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNodes("2", "3", "5", "7", "8", "9", "10", "11"))
edges := []Edge{
{"7", "8"},
{"7", "11"},
{"5", "11"},
{"3", "8"},
{"3", "10"},
{"11", "2"},
{"11", "9"},
{"11", "10"},
{"8", "9"},
}
for _, e := range edges {
assert.NoError(t, graph.AddEdge(e.From, e.To))
}
result, err := graph.Toposort()
if err != nil {
t.Errorf("closed path detected in no closed pathed graph")
}
for _, e := range edges {
if i, j := slices.Index(result, e.From), slices.Index(result, e.To); i > j {
t.Errorf("dependency failed: not satisfy %v(%v) > %v(%v)", e.From, i, e.To, j)
}
}
}
func TestCycle(t *testing.T) {
t.Parallel()
graph := dag.NewGraph()
assert.NoError(t, graph.AddNodes("1", "2", "3"))
assert.NoError(t, graph.AddEdge("1", "2"))
assert.NoError(t, graph.AddEdge("2", "3"))
assert.NoError(t, graph.AddEdge("3", "1"))
if _, err := graph.Toposort(); err == nil {
t.Errorf("closed path not detected in closed pathed graph")
}
}
func TestGraph_GetParents(t *testing.T) {
t.Parallel()
type fields struct {
nodes []string
edges map[string]string
}
type args struct {
id string
}
tests := []struct {
name string
fields fields
args args
want []string
}{
{"parents 2", fields{nodes: []string{"1", "2", "3"}, edges: map[string]string{"1": "2", "2": "3"}}, args{id: "2"}, []string{"1"}},
{"parents 3", fields{nodes: []string{"1", "2", "3"}, edges: map[string]string{"1": "3", "2": "3"}}, args{id: "3"}, []string{"1", "2"}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
g := dag.NewGraph()
for _, node := range tt.fields.nodes {
assert.NoError(t, g.AddNode(node))
}
for from, to := range tt.fields.edges {
assert.NoError(t, g.AddEdge(from, to))
}
if got := g.GetParents(tt.args.id); !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetParents() = %v, want %v", got, tt.want)
}
})
}
}
func TestDAG_AddNode(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
v := "1"
assert.NoError(t, dag.AddNode(v))
assert.Error(t, dag.AddNode(v))
}
func TestDAG_AddEdge(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("0"))
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
// add a single edge and inspect the graph
assert.NoError(t, dag.AddEdge("1", "2"))
if parents := dag.GetParents("2"); len(parents) != 1 {
t.Errorf("GetParents(v2) = %d, want 1", len(parents))
}
assert.NoError(t, dag.AddEdge("2", "3"))
_ = dag.AddEdge("0", "1")
}
func TestDAG_GetParents(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
_ = dag.AddEdge("1", "3")
_ = dag.AddEdge("2", "3")
parents := dag.GetParents("3")
if length := len(parents); length != 2 {
t.Errorf("GetParents(v3) = %d, want 2", length)
}
}
func TestDAG_GetDescendants(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddNode("4"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("2", "3"))
assert.NoError(t, dag.AddEdge("2", "4"))
}
func TestDAG_Topsort(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddNode("4"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("2", "3"))
assert.NoError(t, dag.AddEdge("2", "4"))
desc, _ := dag.Toposort()
assert.Equal(t, desc, []string{"1", "2", "3", "4"})
}
func TestDAG_TopsortStable(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("1", "3"))
desc, _ := dag.Toposort()
assert.Equal(t, desc, []string{"1", "2", "3"})
}
func TestDAG_TopsortStable2(t *testing.T) {
t.Parallel()
dag := dag.NewGraph()
assert.NoError(t, dag.AddNodes("block-ioc", "block-iocs", "block-sender", "board", "fetch-iocs", "escalate", "extract-iocs", "mail-available", "search-email-gateway"))
assert.NoError(t, dag.AddEdge("block-iocs", "block-ioc"))
assert.NoError(t, dag.AddEdge("block-sender", "extract-iocs"))
assert.NoError(t, dag.AddEdge("board", "escalate"))
assert.NoError(t, dag.AddEdge("board", "mail-available"))
assert.NoError(t, dag.AddEdge("fetch-iocs", "block-iocs"))
assert.NoError(t, dag.AddEdge("extract-iocs", "fetch-iocs"))
assert.NoError(t, dag.AddEdge("mail-available", "block-sender"))
assert.NoError(t, dag.AddEdge("mail-available", "extract-iocs"))
assert.NoError(t, dag.AddEdge("mail-available", "search-email-gateway"))
assert.NoError(t, dag.AddEdge("search-email-gateway", "extract-iocs"))
sorted, err := dag.Toposort()
assert.NoError(t, err)
want := []string{"board", "escalate", "mail-available", "block-sender", "search-email-gateway", "extract-iocs", "fetch-iocs", "block-iocs", "block-ioc"}
assert.Equal(t, want, sorted)
}

View File

@@ -1,102 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*model.Artifact, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a in NOT_NULL(d.artifacts, [])
FILTER a.name == @name
RETURN a`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]any{
"@collection": TicketCollectionName,
"ID": fmt.Sprint(id),
"name": name,
}), busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc model.Artifact
_, err = cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
return &doc, nil
}
func (db *Database) ArtifactUpdate(ctx context.Context, id int64, name string, artifact *model.Artifact) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), @artifact)
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"artifact": artifact,
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) EnrichArtifact(ctx context.Context, id int64, name string, enrichmentForm *model.EnrichmentForm) (*model.TicketWithTickets, error) {
enrichment := model.Enrichment{Created: time.Now().UTC(), Data: enrichmentForm.Data, Name: enrichmentForm.Name}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET enrichments = NOT_NULL(a.enrichments, {})
LET newenrichments = MERGE(enrichments, ZIP( [@enrichmentname], [@enrichment]) )
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), MERGE(a, { "enrichments": newenrichments }))
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"enrichmentname": enrichment.Name,
"enrichment": enrichment,
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}

View File

@@ -1,100 +0,0 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toAutomation(doc *model.AutomationForm) *model.Automation {
return &model.Automation{
Image: doc.Image,
Script: doc.Script,
Schema: doc.Schema,
Type: doc.Type,
}
}
func toAutomationResponse(id string, doc model.Automation) *model.AutomationResponse {
return &model.AutomationResponse{
ID: id,
Image: doc.Image,
Script: doc.Script,
Schema: doc.Schema,
Type: doc.Type,
}
}
func (db *Database) AutomationCreate(ctx context.Context, automation *model.AutomationForm) (*model.AutomationResponse, error) {
if automation == nil {
return nil, errors.New("requires automation")
}
if automation.ID == "" {
return nil, errors.New("requires automation ID")
}
var doc model.Automation
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.CreateDocument(ctx, newctx, automation.ID, toAutomation(automation))
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationGet(ctx context.Context, id string) (*model.AutomationResponse, error) {
var doc model.Automation
meta, err := db.automationCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationUpdate(ctx context.Context, id string, automation *model.AutomationForm) (*model.AutomationResponse, error) {
var doc model.Automation
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.ReplaceDocument(ctx, id, toAutomation(automation))
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationDelete(ctx context.Context, id string) error {
_, err := db.automationCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) AutomationList(ctx context.Context) ([]*model.AutomationResponse, error) {
query := "FOR d IN @@collection SORT d._key ASC RETURN UNSET(d, 'script')"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": AutomationCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.AutomationResponse
for {
var doc model.Automation
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toAutomationResponse(meta.Key, doc))
}
return docs, err
}

View File

@@ -1,195 +0,0 @@
package busdb
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/api"
"github.com/SecurityBrewery/catalyst/generated/model"
)
// BusDatabase
// 1. Save entry to log
// 2. Send update ticket to bus
// 3. Add document to index
type BusDatabase struct {
internal driver.Database
logCollection driver.Collection
bus *bus.Bus
// index *index.Index
}
func NewDatabase(ctx context.Context, internal driver.Database, b *bus.Bus) (*BusDatabase, error) {
logCollection, err := internal.Collection(ctx, LogCollectionName)
if err != nil {
return nil, err
}
return &BusDatabase{
internal: internal,
logCollection: logCollection,
bus: b,
}, nil
}
type Operation struct {
Type bus.DatabaseUpdateType
Ids []driver.DocumentID
}
var (
CreateOperation = &Operation{Type: bus.DatabaseEntryCreated}
ReadOperation = &Operation{Type: bus.DatabaseEntryRead}
)
func (db *BusDatabase) Query(ctx context.Context, query string, vars map[string]any, operation *Operation) (cur driver.Cursor, logs *model.LogEntry, err error) {
defer func() { err = toHTTPErr(err) }()
cur, err = db.internal.Query(ctx, query, vars)
if err != nil {
return nil, nil, err
}
switch {
case operation.Type == bus.DatabaseEntryCreated, operation.Type == bus.DatabaseEntryUpdated:
db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: operation.Ids, Type: operation.Type})
}
return cur, logs, err
}
func (db *BusDatabase) Remove(ctx context.Context) (err error) {
defer func() { err = toHTTPErr(err) }()
return db.internal.Remove(ctx)
}
func (db *BusDatabase) Collection(ctx context.Context, name string) (col driver.Collection, err error) {
defer func() { err = toHTTPErr(err) }()
return db.internal.Collection(ctx, name)
}
type Collection[T any] struct {
internal driver.Collection
db *BusDatabase
}
func NewCollection[T any](internal driver.Collection, db *BusDatabase) *Collection[T] {
return &Collection[T]{internal: internal, db: db}
}
func (c *Collection[T]) CreateDocument(_, newctx context.Context, key string, document *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.CreateDocument(newctx, &Keyed[T]{Key: key, Doc: document})
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryCreated})
return meta, nil
}
func (c *Collection[T]) CreateEdge(_, newctx context.Context, edge *driver.EdgeDocument) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.CreateDocument(newctx, edge)
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryCreated})
return meta, nil
}
func (c *Collection[T]) CreateEdges(ctx context.Context, edges []*driver.EdgeDocument) (meta driver.DocumentMetaSlice, err error) {
defer func() { err = toHTTPErr(err) }()
metas, errs, err := c.internal.CreateDocuments(ctx, edges)
if err != nil {
return nil, err
}
if errs.FirstNonNil() != nil {
return nil, errs.FirstNonNil()
}
var ids []driver.DocumentID
for _, meta := range metas {
ids = append(ids, meta.ID)
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: ids, Type: bus.DatabaseEntryCreated})
return metas, nil
}
func (c *Collection[T]) DocumentExists(ctx context.Context, id string) (exists bool, err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.DocumentExists(ctx, id)
}
func (c *Collection[T]) ReadDocument(ctx context.Context, key string, result *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.ReadDocument(ctx, key, result)
return
}
func (c *Collection[T]) UpdateDocument(ctx context.Context, key string, update any) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.UpdateDocument(ctx, key, update)
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryUpdated})
return meta, nil
}
func (c *Collection[T]) ReplaceDocument(ctx context.Context, key string, document *T) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
meta, err = c.internal.ReplaceDocument(ctx, key, document)
if err != nil {
return meta, err
}
c.db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{IDs: []driver.DocumentID{meta.ID}, Type: bus.DatabaseEntryUpdated})
return meta, nil
}
func (c *Collection[T]) RemoveDocument(ctx context.Context, formatInt string) (meta driver.DocumentMeta, err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.RemoveDocument(ctx, formatInt)
}
func (c *Collection[T]) Truncate(ctx context.Context) (err error) {
defer func() { err = toHTTPErr(err) }()
return c.internal.Truncate(ctx)
}
func toHTTPErr(err error) error {
if err != nil {
ae := driver.ArangoError{}
if errors.As(err, &ae) {
return &api.HTTPError{Status: ae.Code, Internal: err}
}
return err
}
return nil
}

View File

@@ -1,25 +0,0 @@
package busdb
import "encoding/json"
type Keyed[T any] struct {
Key string
Doc *T
}
func (p *Keyed[T]) MarshalJSON() ([]byte, error) {
b, err := json.Marshal(p.Doc)
if err != nil {
panic(err)
}
var m map[string]any
err = json.Unmarshal(b, &m)
if err != nil {
panic(err)
}
m["_key"] = p.Key
return json.Marshal(m)
}

View File

@@ -1,90 +0,0 @@
package busdb
import (
"context"
"errors"
"strings"
"github.com/arangodb/go-driver"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
const LogCollectionName = "logs"
func (db *BusDatabase) LogCreate(ctx context.Context, logType, reference, message string) (*model.LogEntry, error) {
user, _, ok := maut.UserFromContext(ctx)
if !ok {
return nil, errors.New("no user in context")
}
logentry := &model.LogEntry{
Type: logType,
Reference: reference,
Created: time.Now().UTC(),
Creator: user.ID,
Message: message,
}
doc := model.LogEntry{}
_, err := db.logCollection.CreateDocument(driver.WithReturnNew(ctx, &doc), logentry)
if err != nil {
return nil, err
}
return &doc, nil
}
func (db *BusDatabase) LogBatchCreate(ctx context.Context, logentries []*model.LogEntry) error {
var ids []driver.DocumentID
for _, entry := range logentries {
if strings.HasPrefix(entry.Reference, "tickets/") {
ids = append(ids, driver.DocumentID(entry.Reference))
}
}
if ids != nil {
go db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{
IDs: ids,
Type: bus.DatabaseEntryCreated,
})
}
_, errs, err := db.logCollection.CreateDocuments(ctx, logentries)
if err != nil {
return err
}
err = errs.FirstNonNil()
if err != nil {
return err
}
return nil
}
func (db *BusDatabase) LogList(ctx context.Context, reference string) ([]*model.LogEntry, error) {
query := "FOR d IN @@collection FILTER d.reference == @reference SORT d.created DESC RETURN d"
cursor, err := db.internal.Query(ctx, query, map[string]any{
"@collection": LogCollectionName,
"reference": reference,
})
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.LogEntry
for {
var doc model.LogEntry
_, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, &doc)
}
return docs, err
}

View File

@@ -1,119 +0,0 @@
package database
import (
"context"
"errors"
"fmt"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toDashboardResponse(key string, doc *model.Dashboard) *model.DashboardResponse {
return &model.DashboardResponse{
ID: key,
Name: doc.Name,
Widgets: doc.Widgets,
}
}
func (db *Database) DashboardCreate(ctx context.Context, dashboard *model.Dashboard) (*model.DashboardResponse, error) {
if dashboard == nil {
return nil, errors.New("requires dashboard")
}
if dashboard.Name == "" {
return nil, errors.New("requires dashboard name")
}
if err := db.parseWidgets(dashboard); err != nil {
return nil, err
}
var doc model.Dashboard
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.dashboardCollection.CreateDocument(ctx, newctx, strcase.ToKebab(dashboard.Name), dashboard)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardGet(ctx context.Context, id string) (*model.DashboardResponse, error) {
var doc model.Dashboard
meta, err := db.dashboardCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardUpdate(ctx context.Context, id string, dashboard *model.Dashboard) (*model.DashboardResponse, error) {
if err := db.parseWidgets(dashboard); err != nil {
return nil, err
}
var doc model.Dashboard
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.dashboardCollection.ReplaceDocument(ctx, id, dashboard)
if err != nil {
return nil, err
}
return toDashboardResponse(meta.Key, &doc), nil
}
func (db *Database) DashboardDelete(ctx context.Context, id string) error {
_, err := db.dashboardCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) DashboardList(ctx context.Context) ([]*model.DashboardResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": DashboardCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.DashboardResponse
for {
var doc model.Dashboard
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toDashboardResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) parseWidgets(dashboard *model.Dashboard) error {
for _, widget := range dashboard.Widgets {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
_, err := parser.Parse(widget.Aggregation)
if err != nil {
return fmt.Errorf("invalid aggregation query (%s): syntax error", widget.Aggregation)
}
if widget.Filter != nil {
_, err := parser.Parse(*widget.Filter)
if err != nil {
return fmt.Errorf("invalid filter query (%s): syntax error", *widget.Filter)
}
}
}
return nil
}

View File

@@ -1,241 +0,0 @@
package database
import (
"context"
"errors"
"fmt"
"log"
"time"
"github.com/arangodb/go-driver"
"github.com/arangodb/go-driver/http"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/index"
)
const (
Name = "catalyst"
TicketCollectionName = "tickets"
TemplateCollectionName = "templates"
PlaybookCollectionName = "playbooks"
AutomationCollectionName = "automations"
UserDataCollectionName = "userdata"
UserCollectionName = "users"
TicketTypeCollectionName = "tickettypes"
JobCollectionName = "jobs"
SettingsCollectionName = "settings"
DashboardCollectionName = "dashboards"
TicketArtifactsGraphName = "Graph"
RelatedTicketsCollectionName = "related"
)
type Database struct {
*busdb.BusDatabase
Index *index.Index
bus *bus.Bus
Hooks *hooks.Hooks
templateCollection *busdb.Collection[model.TicketTemplate]
ticketCollection *busdb.Collection[model.Ticket]
playbookCollection *busdb.Collection[model.PlaybookTemplate]
automationCollection *busdb.Collection[model.Automation]
userdataCollection *busdb.Collection[model.UserData]
userCollection *busdb.Collection[model.User]
tickettypeCollection *busdb.Collection[model.TicketType]
jobCollection *busdb.Collection[model.Job]
settingsCollection *busdb.Collection[model.Settings]
dashboardCollection *busdb.Collection[model.Dashboard]
relatedCollection *busdb.Collection[driver.EdgeDocument]
// containsCollection *busdb.Collection
}
type Config struct {
Host string
User string
Password string
Name string
}
func New(ctx context.Context, index *index.Index, bus *bus.Bus, hooks *hooks.Hooks, config *Config) (*Database, error) {
name := config.Name
if config.Name == "" {
name = Name
}
var err error
var client driver.Client
for {
deadline, ok := ctx.Deadline()
if ok && time.Until(deadline) < 0 {
return nil, context.DeadlineExceeded
}
client, err = getClient(ctx, config)
if err == nil {
break
}
if errors.Is(err, context.DeadlineExceeded) {
return nil, errors.New("could not load database, connection timed out")
}
log.Printf("could not connect to database: %s, retrying in 10 seconds\n", err)
time.Sleep(time.Second * 10)
}
hooks.DatabaseAfterConnect(ctx, client, name)
arangoDB, err := SetupDB(ctx, client, name)
if err != nil {
return nil, fmt.Errorf("DB setup failed: %w", err)
}
if err = migrations.PerformMigrations(ctx, arangoDB); err != nil {
return nil, fmt.Errorf("migrations failed: %w", err)
}
ticketCollection, err := arangoDB.Collection(ctx, TicketCollectionName)
if err != nil {
return nil, err
}
templateCollection, err := arangoDB.Collection(ctx, TemplateCollectionName)
if err != nil {
return nil, err
}
playbookCollection, err := arangoDB.Collection(ctx, PlaybookCollectionName)
if err != nil {
return nil, err
}
relatedCollection, err := arangoDB.Collection(ctx, RelatedTicketsCollectionName)
if err != nil {
return nil, err
}
automationCollection, err := arangoDB.Collection(ctx, AutomationCollectionName)
if err != nil {
return nil, err
}
userdataCollection, err := arangoDB.Collection(ctx, UserDataCollectionName)
if err != nil {
return nil, err
}
userCollection, err := arangoDB.Collection(ctx, UserCollectionName)
if err != nil {
return nil, err
}
tickettypeCollection, err := arangoDB.Collection(ctx, TicketTypeCollectionName)
if err != nil {
return nil, err
}
jobCollection, err := arangoDB.Collection(ctx, JobCollectionName)
if err != nil {
return nil, err
}
settingsCollection, err := arangoDB.Collection(ctx, SettingsCollectionName)
if err != nil {
return nil, err
}
dashboardCollection, err := arangoDB.Collection(ctx, DashboardCollectionName)
if err != nil {
return nil, err
}
hookedDB, err := busdb.NewDatabase(ctx, arangoDB, bus)
if err != nil {
return nil, err
}
db := &Database{
BusDatabase: hookedDB,
bus: bus,
Index: index,
Hooks: hooks,
templateCollection: busdb.NewCollection[model.TicketTemplate](templateCollection, hookedDB),
ticketCollection: busdb.NewCollection[model.Ticket](ticketCollection, hookedDB),
playbookCollection: busdb.NewCollection[model.PlaybookTemplate](playbookCollection, hookedDB),
automationCollection: busdb.NewCollection[model.Automation](automationCollection, hookedDB),
userdataCollection: busdb.NewCollection[model.UserData](userdataCollection, hookedDB),
userCollection: busdb.NewCollection[model.User](userCollection, hookedDB),
tickettypeCollection: busdb.NewCollection[model.TicketType](tickettypeCollection, hookedDB),
jobCollection: busdb.NewCollection[model.Job](jobCollection, hookedDB),
settingsCollection: busdb.NewCollection[model.Settings](settingsCollection, hookedDB),
dashboardCollection: busdb.NewCollection[model.Dashboard](dashboardCollection, hookedDB),
relatedCollection: busdb.NewCollection[driver.EdgeDocument](relatedCollection, hookedDB),
}
return db, nil
}
func getClient(ctx context.Context, config *Config) (driver.Client, error) {
conn, err := http.NewConnection(http.ConnectionConfig{Endpoints: []string{config.Host}})
if err != nil {
return nil, err
}
client, err := driver.NewClient(driver.ClientConfig{
Connection: conn,
Authentication: driver.BasicAuthentication(config.User, config.Password),
})
if err != nil {
return nil, err
}
if _, err := client.Version(ctx); err != nil {
return nil, err
}
return client, nil
}
func SetupDB(ctx context.Context, client driver.Client, dbName string) (driver.Database, error) {
databaseExists, err := client.DatabaseExists(ctx, dbName)
if err != nil {
return nil, fmt.Errorf("could not check if database exists: %w", err)
}
var db driver.Database
if !databaseExists {
db, err = client.CreateDatabase(ctx, dbName, nil)
} else {
db, err = client.Database(ctx, dbName)
}
if err != nil {
return nil, fmt.Errorf("could not create database: %w", err)
}
collectionExists, err := db.CollectionExists(ctx, migrations.MigrationCollection)
if err != nil {
return nil, fmt.Errorf("could not check if collection exists: %w", err)
}
if !collectionExists {
if _, err := db.CreateCollection(ctx, migrations.MigrationCollection, &driver.CreateCollectionOptions{
KeyOptions: &driver.CollectionKeyOptions{AllowUserKeys: true},
}); err != nil {
log.Println(err)
}
}
return db, nil
}
func (db *Database) Truncate(ctx context.Context) {
_ = db.templateCollection.Truncate(ctx)
_ = db.ticketCollection.Truncate(ctx)
_ = db.playbookCollection.Truncate(ctx)
_ = db.automationCollection.Truncate(ctx)
_ = db.userdataCollection.Truncate(ctx)
_ = db.userCollection.Truncate(ctx)
_ = db.tickettypeCollection.Truncate(ctx)
_ = db.jobCollection.Truncate(ctx)
_ = db.relatedCollection.Truncate(ctx)
_ = db.settingsCollection.Truncate(ctx)
_ = db.dashboardCollection.Truncate(ctx)
// db.containsCollection.Truncate(ctx)
}

View File

@@ -1,230 +0,0 @@
package database
import (
"context"
"encoding/json"
"errors"
"fmt"
"github.com/arangodb/go-driver"
"github.com/docker/docker/client"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toJob(doc *model.JobForm) *model.Job {
return &model.Job{
Automation: doc.Automation,
Payload: doc.Payload,
Origin: doc.Origin,
Running: true,
Status: "created",
}
}
func (db *Database) toJobResponse(ctx context.Context, key string, doc *model.Job, update bool) (*model.JobResponse, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return nil, err
}
defer cli.Close()
status := doc.Status
if doc.Running {
inspect, err := cli.ContainerInspect(ctx, key)
if err != nil || inspect.State == nil {
if update {
_, _ = db.JobUpdate(ctx, key, &model.JobUpdate{
Status: doc.Status,
Running: false,
})
}
} else if doc.Status != inspect.State.Status {
status = inspect.State.Status
if update {
_, _ = db.JobUpdate(ctx, key, &model.JobUpdate{
Status: status,
Running: doc.Running,
})
}
}
}
return &model.JobResponse{
Automation: doc.Automation,
ID: key,
Log: doc.Log,
Payload: doc.Payload,
Origin: doc.Origin,
Output: doc.Output,
Status: status,
Container: doc.Container,
}, nil
}
func (db *Database) JobCreate(ctx context.Context, id string, job *model.JobForm) (*model.JobResponse, error) {
if job == nil {
return nil, errors.New("requires job")
}
var doc model.Job
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.jobCollection.CreateDocument(ctx, newctx, id, toJob(job))
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobGet(ctx context.Context, id string) (*model.JobResponse, error) {
var doc model.Job
meta, err := db.jobCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobUpdate(ctx context.Context, id string, job *model.JobUpdate) (*model.JobResponse, error) {
var doc model.Job
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.jobCollection.UpdateDocument(ctx, id, job)
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobLogAppend(ctx context.Context, id string, logLine string) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "log": CONCAT(NOT_NULL(d.log, ""), @logline) } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]any{
"@collection": JobCollectionName,
"ID": id,
"logline": logLine,
}, &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
})
if err != nil {
return err
}
defer cur.Close()
return nil
}
func (db *Database) JobComplete(ctx context.Context, id string, out any) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "output": @out, "status": "completed", "running": false } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]any{
"@collection": JobCollectionName,
"ID": id,
"out": out,
}, &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
})
if err != nil {
return err
}
defer cur.Close()
return nil
}
func (db *Database) JobDelete(ctx context.Context, id string) error {
_, err := db.jobCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) JobList(ctx context.Context) ([]*model.JobResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": JobCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.JobResponse
for {
var doc model.Job
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
job, err := db.toJobResponse(ctx, meta.Key, &doc, false)
if err != nil {
return nil, err
}
docs = append(docs, job)
}
return docs, err
}
func publishJobMapping(id, automation string, contextStructs *model.Context, origin *model.Origin, payloadMapping map[string]string, db *Database) error {
msg, err := generatePayload(payloadMapping, contextStructs)
if err != nil {
return fmt.Errorf("message generation failed: %w", err)
}
db.bus.JobChannel.Publish(&bus.JobMsg{
ID: id,
Automation: automation,
Origin: origin,
Message: &model.Message{
Context: contextStructs,
Payload: msg,
},
})
return nil
}
func generatePayload(msgMapping map[string]string, contextStructs *model.Context) (map[string]any, error) {
contextJSON, err := json.Marshal(contextStructs)
if err != nil {
return nil, err
}
automationContext := map[string]any{}
err = json.Unmarshal(contextJSON, &automationContext)
if err != nil {
return nil, err
}
parser := caql.Parser{}
msg := map[string]any{}
for arg, expr := range msgMapping {
tree, err := parser.Parse(expr)
if err != nil {
return nil, err
}
v, err := tree.Eval(automationContext)
if err != nil {
return nil, err
}
msg[arg] = v
}
return msg, nil
}

View File

@@ -1,25 +0,0 @@
#!/usr/bin/env python
import subprocess
import sys
subprocess.call(
[sys.executable, "-m", "pip", "install", "requests"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
import json
import requests
def run(msg):
if "ticket" in msg["context"]:
headers = {"PRIVATE-TOKEN": msg["secrets"]["catalyst_apikey"]}
url = "%s/tickets/%d/comments" % (msg["secrets"]["catalyst_apiurl"], msg["context"]["ticket"]["id"])
data = {'message': msg["payload"]["default"], 'creator': 'automation'}
requests.post(url, json=data, headers=headers).json()
return {"done": True}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -1,13 +0,0 @@
#!/usr/bin/env python
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload']['default'].encode('utf-8'))
return {"hash": sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env python
import subprocess
import sys
subprocess.call(
[sys.executable, "-m", "pip", "install", "requests"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
import json
import requests
def run(msg):
api_key = msg['secrets']['vt_api_key'].encode('utf-8')
resource = msg['payload']['default'].encode('utf-8')
params = {'apikey': api_key, 'resource': resource}
return requests.get("https://www.virustotal.com/vtapi/v2/file/report", params=params).json()
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -1,21 +0,0 @@
package migrations
import _ "embed"
//go:embed templates/default.json
var DefaultTemplateSchema string
//go:embed automations/hash.sha1.py
var SHA1HashAutomation string
//go:embed automations/vt.hash.py
var VTHashAutomation string
//go:embed automations/comment.py
var CommentAutomation string
//go:embed playbooks/phishing.yml
var PhishingPlaybook string
//go:embed playbooks/simple.yaml
var SimplePlaybook string

View File

@@ -1,247 +0,0 @@
package migrations
import (
"context"
"fmt"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
)
const MigrationCollection string = "migrations"
type Migration interface {
MID() string
Migrate(ctx context.Context, driver driver.Database) error
}
func generateMigrations() ([]Migration, error) {
// content here should never change
return []Migration{
&createCollection{ID: "create-log-collection", Name: "logs", DataType: "log", Schema: `{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"},"reference":{"type":"string"}},"required":["created","creator","message","reference"],"type":"object"}`},
&createCollection{ID: "create-ticket-collection", Name: "tickets", DataType: "ticket", Schema: `{"properties":{"artifacts":{"items":{"properties":{"enrichments":{"additionalProperties":{"properties":{"created":{"format":"date-time","type":"string"},"data":{"example":{"hash":"b7a067a742c20d07a7456646de89bc2d408a1153"},"properties":{},"type":"object"},"name":{"example":"hash.sha1","type":"string"}},"required":["created","data","name"],"type":"object"},"type":"object"},"name":{"example":"2.2.2.2","type":"string"},"status":{"example":"Unknown","type":"string"},"type":{"type":"string"}},"required":["name"],"type":"object"},"type":"array"},"comments":{"items":{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"}},"required":["created","creator","message"],"type":"object"},"type":"array"},"created":{"format":"date-time","type":"string"},"details":{"example":{"description":"my little incident"},"properties":{},"type":"object"},"files":{"items":{"properties":{"key":{"example":"myfile","type":"string"},"name":{"example":"notes.docx","type":"string"}},"required":["key","name"],"type":"object"},"type":"array"},"modified":{"format":"date-time","type":"string"},"name":{"example":"WannyCry","type":"string"},"owner":{"example":"bob","type":"string"},"playbooks":{"additionalProperties":{"properties":{"name":{"example":"Phishing","type":"string"},"tasks":{"additionalProperties":{"properties":{"automation":{"type":"string"},"closed":{"format":"date-time","type":"string"},"created":{"format":"date-time","type":"string"},"data":{"properties":{},"type":"object"},"done":{"type":"boolean"},"join":{"example":false,"type":"boolean"},"payload":{"additionalProperties":{"type":"string"},"type":"object"},"name":{"example":"Inform user","type":"string"},"next":{"additionalProperties":{"type":"string"},"type":"object"},"owner":{"type":"string"},"schema":{"properties":{},"type":"object"},"type":{"enum":["task","input","automation"],"example":"task","type":"string"}},"required":["created","done","name","type"],"type":"object"},"type":"object"}},"required":["name","tasks"],"type":"object"},"type":"object"},"read":{"example":["bob"],"items":{"type":"string"},"type":"array"},"references":{"items":{"properties":{"href":{"example":"https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2017-0144","type":"string"},"name":{"example":"CVE-2017-0144","type":"string"}},"required":["href","name"],"type":"object"},"type":"array"},"schema":{"example":"{}","type":"string"},"status":{"example":"open","type":"string"},"type":{"example":"incident","type":"string"},"write":{"example":["alice"],"items":{"type":"string"},"type":"array"}},"required":["created","modified","name","schema","status","type"],"type":"object"}`},
&createCollection{ID: "create-template-collection", Name: "templates", DataType: "template", Schema: `{"properties":{"name":{"type":"string"},"schema":{"type":"string"}},"required":["name","schema"],"type":"object"}`},
&createCollection{ID: "create-playbook-collection", Name: "playbooks", DataType: "playbook", Schema: `{"properties":{"name":{"type":"string"},"yaml":{"type":"string"}},"required":["name","yaml"],"type":"object"}`},
&createCollection{ID: "create-automation-collection", Name: "automations", DataType: "automation", Schema: `{"properties":{"image":{"type":"string"},"script":{"type":"string"}},"required":["image","script"],"type":"object"}`},
&createCollection{ID: "create-userdata-collection", Name: "userdata", DataType: "userdata", Schema: `{"properties":{"email":{"type":"string"},"image":{"type":"string"},"name":{"type":"string"},"timeformat":{"title":"Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)","type":"string"}},"type":"object"}`},
&createCollection{ID: "create-tickettype-collection", Name: "tickettypes", DataType: "tickettype", Schema: `{"properties":{"default_groups":{"items":{"type":"string"},"type":"array"},"default_playbooks":{"items":{"type":"string"},"type":"array"},"default_template":{"type":"string"},"icon":{"type":"string"},"name":{"type":"string"}},"required":["default_playbooks","default_template","icon","name"],"type":"object"}`},
&createCollection{ID: "create-user-collection", Name: "users", DataType: "user", Schema: `{"properties":{"apikey":{"type":"boolean"},"blocked":{"type":"boolean"},"roles":{"items":{"type":"string"},"type":"array"},"sha256":{"type":"string"}},"required":["apikey","blocked","roles"],"type":"object"}`},
&createGraph{ID: "create-ticket-graph", Name: "Graph", EdgeDefinitions: []driver.EdgeDefinition{{Collection: "related", From: []string{"tickets"}, To: []string{"tickets"}}}},
&createDocument[busdb.Keyed[model.TicketTemplate]]{ID: "create-template-default", Collection: "templates", Document: &busdb.Keyed[model.TicketTemplate]{Key: "default", Doc: &model.TicketTemplate{Schema: DefaultTemplateSchema, Name: "Default"}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-vt.hash", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "vt.hash", Doc: &model.Automation{Image: "docker.io/python:3", Script: VTHashAutomation}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-comment", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "comment", Doc: &model.Automation{Image: "docker.io/python:3", Script: CommentAutomation}}},
&createDocument[busdb.Keyed[model.Automation]]{ID: "create-automation-hash.sha1", Collection: "automations", Document: &busdb.Keyed[model.Automation]{Key: "hash.sha1", Doc: &model.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation}}},
&createDocument[busdb.Keyed[model.PlaybookTemplate]]{ID: "create-playbook-phishing", Collection: "playbooks", Document: &busdb.Keyed[model.PlaybookTemplate]{Key: "phishing", Doc: &model.PlaybookTemplate{Name: "Phishing", Yaml: PhishingPlaybook}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-alert", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "alert", Doc: &model.TicketType{Name: "Alerts", Icon: "mdi-alert", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-incident", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "incident", Doc: &model.TicketType{Name: "Incidents", Icon: "mdi-radioactive", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-investigation", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "investigation", Doc: &model.TicketType{Name: "Forensic Investigations", Icon: "mdi-fingerprint", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument[busdb.Keyed[model.TicketType]]{ID: "create-tickettype-hunt", Collection: "tickettypes", Document: &busdb.Keyed[model.TicketType]{Key: "hunt", Doc: &model.TicketType{Name: "Threat Hunting", Icon: "mdi-target", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&updateSchema{ID: "update-automation-collection-1", Name: "automations", DataType: "automation", Schema: `{"properties":{"image":{"type":"string"},"script":{"type":"string"}},"required":["image","script"],"type":"object"}`},
&updateDocument[model.Automation]{ID: "update-automation-vt.hash-1", Collection: "automations", Key: "vt.hash", Document: &model.Automation{Image: "docker.io/python:3", Script: VTHashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&updateDocument[model.Automation]{ID: "update-automation-comment-1", Collection: "automations", Key: "comment", Document: &model.Automation{Image: "docker.io/python:3", Script: CommentAutomation, Type: []string{"playbook"}}},
&updateDocument[model.Automation]{ID: "update-automation-hash.sha1-1", Collection: "automations", Key: "hash.sha1", Document: &model.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&createCollection{ID: "create-job-collection", Name: "jobs", DataType: "job", Schema: `{"properties":{"automation":{"type":"string"},"log":{"type":"string"},"payload":{},"origin":{"properties":{"artifact_origin":{"properties":{"artifact":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["artifact","ticket_id"],"type":"object"},"task_origin":{"properties":{"playbook_id":{"type":"string"},"task_id":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["playbook_id","task_id","ticket_id"],"type":"object"}},"type":"object"},"output":{"properties":{},"type":"object"},"running":{"type":"boolean"},"status":{"type":"string"}},"required":["automation","running","status"],"type":"object"}`},
&createDocument[busdb.Keyed[model.PlaybookTemplate]]{ID: "create-playbook-simple", Collection: "playbooks", Document: &busdb.Keyed[model.PlaybookTemplate]{Key: "simple", Doc: &model.PlaybookTemplate{Name: "Simple", Yaml: SimplePlaybook}}},
&createCollection{ID: "create-settings-collection", Name: "settings", DataType: "settings", Schema: `{"type":"object","properties":{"artifactStates":{"title":"Artifact States","items":{"type":"object","properties":{"color":{"title":"Color","type":"string","enum":["error","info","success","warning"]},"icon":{"title":"Icon (https://materialdesignicons.com)","type":"string"},"id":{"title":"ID","type":"string"},"name":{"title":"Name","type":"string"}},"required":["id","name","icon"]},"type":"array"},"artifactKinds":{"title":"Artifact Kinds","items":{"type":"object","properties":{"color":{"title":"Color","type":"string","enum":["error","info","success","warning"]},"icon":{"title":"Icon (https://materialdesignicons.com)","type":"string"},"id":{"title":"ID","type":"string"},"name":{"title":"Name","type":"string"}},"required":["id","name","icon"]},"type":"array"},"timeformat":{"title":"Time Format","type":"string"}},"required":["timeformat","artifactKinds","artifactStates"]}`},
&createDocument[busdb.Keyed[model.Settings]]{ID: "create-settings-global", Collection: "settings", Document: &busdb.Keyed[model.Settings]{Key: "global", Doc: &model.Settings{ArtifactStates: []*model.Type{{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(model.TypeColorInfo)}, {Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(model.TypeColorError)}, {Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(model.TypeColorSuccess)}}, ArtifactKinds: []*model.Type{{Icon: "mdi-server", ID: "asset", Name: "Asset"}, {Icon: "mdi-bullseye", ID: "ioc", Name: "IOC"}}, Timeformat: "YYYY-MM-DDThh:mm:ss"}}},
&updateSchema{ID: "update-ticket-collection", Name: "tickets", DataType: "ticket", Schema: `{"properties":{"artifacts":{"items":{"properties":{"enrichments":{"additionalProperties":{"properties":{"created":{"format":"date-time","type":"string"},"data":{"example":{"hash":"b7a067a742c20d07a7456646de89bc2d408a1153"},"properties":{},"type":"object"},"name":{"example":"hash.sha1","type":"string"}},"required":["created","data","name"],"type":"object"},"type":"object"},"name":{"example":"2.2.2.2","type":"string"},"status":{"example":"Unknown","type":"string"},"type":{"type":"string"},"kind":{"type":"string"}},"required":["name"],"type":"object"},"type":"array"},"comments":{"items":{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"}},"required":["created","creator","message"],"type":"object"},"type":"array"},"created":{"format":"date-time","type":"string"},"details":{"example":{"description":"my little incident"},"properties":{},"type":"object"},"files":{"items":{"properties":{"key":{"example":"myfile","type":"string"},"name":{"example":"notes.docx","type":"string"}},"required":["key","name"],"type":"object"},"type":"array"},"modified":{"format":"date-time","type":"string"},"name":{"example":"WannyCry","type":"string"},"owner":{"example":"bob","type":"string"},"playbooks":{"additionalProperties":{"properties":{"name":{"example":"Phishing","type":"string"},"tasks":{"additionalProperties":{"properties":{"automation":{"type":"string"},"closed":{"format":"date-time","type":"string"},"created":{"format":"date-time","type":"string"},"data":{"properties":{},"type":"object"},"done":{"type":"boolean"},"join":{"example":false,"type":"boolean"},"payload":{"additionalProperties":{"type":"string"},"type":"object"},"name":{"example":"Inform user","type":"string"},"next":{"additionalProperties":{"type":"string"},"type":"object"},"owner":{"type":"string"},"schema":{"properties":{},"type":"object"},"type":{"enum":["task","input","automation"],"example":"task","type":"string"}},"required":["created","done","name","type"],"type":"object"},"type":"object"}},"required":["name","tasks"],"type":"object"},"type":"object"},"read":{"example":["bob"],"items":{"type":"string"},"type":"array"},"references":{"items":{"properties":{"href":{"example":"https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2017-0144","type":"string"},"name":{"example":"CVE-2017-0144","type":"string"}},"required":["href","name"],"type":"object"},"type":"array"},"schema":{"example":"{}","type":"string"},"status":{"example":"open","type":"string"},"type":{"example":"incident","type":"string"},"write":{"example":["alice"],"items":{"type":"string"},"type":"array"}},"required":["created","modified","name","schema","status","type"],"type":"object"}`},
&createCollection{ID: "create-dashboard-collection", Name: "dashboards", DataType: "dashboards", Schema: `{"type":"object","properties":{"name":{"type":"string"},"widgets":{"items":{"type":"object","properties":{"aggregation":{"type":"string"},"filter":{"type":"string"},"name":{"type":"string"},"type":{"enum":[ "bar", "line", "pie" ]},"width": { "type": "integer", "minimum": 1, "maximum": 12 }},"required":["name","aggregation", "type", "width"]},"type":"array"}},"required":["name","widgets"]}`},
&updateDocument[model.Settings]{ID: "update-settings-global-1", Collection: "settings", Key: "global", Document: &model.Settings{ArtifactStates: []*model.Type{{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(model.TypeColorInfo)}, {Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(model.TypeColorError)}, {Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(model.TypeColorSuccess)}}, ArtifactKinds: []*model.Type{{Icon: "mdi-server", ID: "asset", Name: "Asset"}, {Icon: "mdi-bullseye", ID: "ioc", Name: "IOC"}}, Timeformat: "yyyy-MM-dd hh:mm:ss"}},
&mapRoles{ID: "simplify-roles"},
}, nil
}
func loadSchema(dataType, jsonschema string) (*driver.CollectionSchemaOptions, error) {
ticketCollectionSchema := &driver.CollectionSchemaOptions{Level: driver.CollectionSchemaLevelStrict, Message: fmt.Sprintf("Validation of %s failed", dataType)}
err := ticketCollectionSchema.LoadRule([]byte(jsonschema))
return ticketCollectionSchema, err
}
type migration struct {
Key string `json:"_key"`
}
func PerformMigrations(ctx context.Context, db driver.Database) error {
collection, err := db.Collection(ctx, MigrationCollection)
if err != nil {
return err
}
migrations, err := generateMigrations()
if err != nil {
return fmt.Errorf("could not generate migrations: %w", err)
}
for _, m := range migrations {
migrationRan, err := collection.DocumentExists(ctx, m.MID())
if err != nil {
return err
}
if !migrationRan {
if err := m.Migrate(ctx, db); err != nil {
return fmt.Errorf("migration %s failed: %w", m.MID(), err)
}
if _, err := collection.CreateDocument(ctx, &migration{Key: m.MID()}); err != nil {
return fmt.Errorf("could not save %s migration document: %w", m.MID(), err)
}
}
}
return nil
}
type createCollection struct {
ID string
Name string
DataType string
Schema string
}
func (m *createCollection) MID() string {
return m.ID
}
func (m *createCollection) Migrate(ctx context.Context, db driver.Database) error {
schema, err := loadSchema(m.DataType, m.Schema)
if err != nil {
return err
}
_, err = db.CreateCollection(ctx, m.Name, &driver.CreateCollectionOptions{
Schema: schema,
})
return err
}
type updateSchema struct {
ID string
Name string
DataType string
Schema string
}
func (m *updateSchema) MID() string {
return m.ID
}
func (m *updateSchema) Migrate(ctx context.Context, db driver.Database) error {
schema, err := loadSchema(m.DataType, m.Schema)
if err != nil {
return err
}
col, err := db.Collection(ctx, m.Name)
if err != nil {
return err
}
err = col.SetProperties(ctx, driver.SetCollectionPropertiesOptions{
Schema: schema,
})
return err
}
type createGraph struct {
ID string
Name string
EdgeDefinitions []driver.EdgeDefinition
}
func (m *createGraph) MID() string {
return m.ID
}
func (m *createGraph) Migrate(ctx context.Context, db driver.Database) error {
_, err := db.CreateGraph(ctx, m.Name, &driver.CreateGraphOptions{
EdgeDefinitions: m.EdgeDefinitions,
})
return err
}
type createDocument[T any] struct {
ID string
Collection string
Document *T
}
func (m *createDocument[T]) MID() string {
return m.ID
}
func (m *createDocument[T]) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
}
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
type updateDocument[T any] struct {
ID string
Collection string
Key string
Document *T
}
func (m *updateDocument[T]) MID() string {
return m.ID
}
func (m *updateDocument[T]) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
}
exists, err := collection.DocumentExists(ctx, m.Key)
if err != nil {
return err
}
if !exists {
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
_, err = collection.ReplaceDocument(ctx, m.Key, m.Document)
return err
}
type mapRoles struct {
ID string
}
func (m mapRoles) MID() string {
return m.ID
}
func (m mapRoles) Migrate(ctx context.Context, driver driver.Database) error {
_, err := driver.Query(ctx, "FOR u IN users UPDATE u WITH {roles: u.roles[*].name} IN users", nil)
return err
}

View File

@@ -1,85 +0,0 @@
name: Phishing
tasks:
board:
name: Board Involvement?
description: Is a board member involved?
type: input
schema:
properties:
boardInvolved:
default: false
title: A board member is involved.
type: boolean
required:
- boardInvolved
title: Board Involvement?
type: object
next:
escalate: "boardInvolved == true"
mail-available: "boardInvolved == false"
escalate:
name: Escalate to CISO
description: Please escalate the task to the CISO
type: task
mail-available:
name: Mail available
type: input
schema:
oneOf:
- properties:
mail:
title: Mail
type: string
x-display: textarea
schemaKey:
const: 'yes'
type: string
required:
- mail
title: 'Yes'
- properties:
schemaKey:
const: 'no'
type: string
title: 'No'
title: Mail available
type: object
next:
block-sender: "schemaKey == 'yes'"
extract-iocs: "schemaKey == 'yes'"
search-email-gateway: "schemaKey == 'no'"
search-email-gateway:
name: Search email gateway
description: Please search email-gateway for the phishing mail.
type: task
next:
extract-iocs:
block-sender:
name: Block sender
type: task
next:
extract-iocs:
extract-iocs:
name: Extract IOCs
description: Please insert the IOCs
type: input
schema:
properties:
iocs:
items:
type: string
title: IOCs
type: array
title: Extract IOCs
type: object
next:
block-iocs:
block-iocs:
name: Block IOCs
type: task

View File

@@ -1,37 +0,0 @@
name: Simple
tasks:
input:
name: Enter something to hash
type: input
schema:
title: Something
type: object
properties:
something:
type: string
title: Something
default: ""
next:
hash: "something != ''"
hash:
name: Hash the something
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['something']"
next:
comment: "hash != ''"
comment:
name: Comment the hash
type: automation
automation: comment
payload:
default: "playbook.tasks['hash'].data['hash']"
next:
done: "done"
done:
name: You can close this case now
type: task

View File

@@ -1,79 +0,0 @@
{
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"tlp"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}

View File

@@ -1,160 +0,0 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/icza/dyno"
"gopkg.in/yaml.v3"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
type PlaybookYAML struct {
Name string `yaml:"name"`
Tasks map[string]TaskYAML `yaml:"tasks"`
}
type TaskYAML struct {
Name string `yaml:"name"`
Type string `yaml:"type"`
Schema any `yaml:"schema"`
Automation string `yaml:"automation"`
Payload map[string]string `yaml:"payload"`
Next map[string]string `yaml:"next"`
Join bool `yaml:"join"`
}
func toPlaybooks(docs []*model.PlaybookTemplateForm) (map[string]*model.Playbook, error) {
playbooks := map[string]*model.Playbook{}
for _, doc := range docs {
playbook, err := toPlaybook(doc)
if err != nil {
return nil, err
}
if doc.ID != nil {
playbooks[*doc.ID] = playbook
} else {
playbooks[strcase.ToKebab(playbook.Name)] = playbook
}
}
return playbooks, nil
}
func toPlaybook(doc *model.PlaybookTemplateForm) (*model.Playbook, error) {
ticketPlaybook := &model.Playbook{}
err := yaml.Unmarshal([]byte(doc.Yaml), ticketPlaybook)
if err != nil {
return nil, err
}
for idx, task := range ticketPlaybook.Tasks {
if task.Schema != nil {
schema, ok := dyno.ConvertMapI2MapS(task.Schema).(map[string]any)
if ok {
task.Schema = schema
} else {
return nil, errors.New("could not convert schema")
}
}
task.Created = time.Now().UTC()
ticketPlaybook.Tasks[idx] = task
}
return ticketPlaybook, nil
}
func toPlaybookTemplateResponse(key string, doc *model.PlaybookTemplate) *model.PlaybookTemplateResponse {
return &model.PlaybookTemplateResponse{ID: key, Name: doc.Name, Yaml: doc.Yaml}
}
func (db *Database) PlaybookCreate(ctx context.Context, playbook *model.PlaybookTemplateForm) (*model.PlaybookTemplateResponse, error) {
if playbook == nil {
return nil, errors.New("requires playbook")
}
var playbookYAML PlaybookYAML
err := yaml.Unmarshal([]byte(playbook.Yaml), &playbookYAML)
if err != nil {
return nil, err
}
if playbookYAML.Name == "" {
return nil, errors.New("requires template name")
}
p := model.PlaybookTemplate{Name: playbookYAML.Name, Yaml: playbook.Yaml}
var doc model.PlaybookTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.CreateDocument(ctx, newctx, strcase.ToKebab(playbookYAML.Name), &p)
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookGet(ctx context.Context, id string) (*model.PlaybookTemplateResponse, error) {
doc := model.PlaybookTemplate{}
meta, err := db.playbookCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookDelete(ctx context.Context, id string) error {
_, err := db.playbookCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *model.PlaybookTemplateForm) (*model.PlaybookTemplateResponse, error) {
var pb PlaybookYAML
err := yaml.Unmarshal([]byte(playbook.Yaml), &pb)
if err != nil {
return nil, err
}
if pb.Name == "" {
return nil, errors.New("requires template name")
}
var doc model.PlaybookTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.ReplaceDocument(ctx, id, &model.PlaybookTemplate{Name: pb.Name, Yaml: playbook.Yaml})
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookList(ctx context.Context) ([]*model.PlaybookTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": PlaybookCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.PlaybookTemplateResponse
for {
var doc model.PlaybookTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toPlaybookTemplateResponse(meta.Key, &doc))
}
return docs, err
}

View File

@@ -1,186 +0,0 @@
package database
import (
"errors"
"fmt"
"log"
"sort"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/dag"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func playbookGraph(playbook *model.Playbook) (*dag.Graph, error) {
d := dag.NewGraph()
var taskIDs []string
for taskID := range playbook.Tasks {
taskIDs = append(taskIDs, taskID)
}
sort.Strings(taskIDs)
for _, taskID := range taskIDs {
if err := d.AddNode(taskID); err != nil {
return nil, errors.New("could not add node")
}
}
for _, taskID := range taskIDs {
task := playbook.Tasks[taskID]
for next := range task.Next {
if err := d.AddEdge(taskID, next); err != nil {
return nil, errors.New("could not add edge")
}
}
}
return d, nil
}
func toTaskResponse(playbook *model.Playbook, taskID string, order int, graph *dag.Graph) (*model.TaskResponse, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return nil, fmt.Errorf("task %s not found", taskID)
}
tr := &model.TaskResponse{
Automation: task.Automation,
Closed: task.Closed,
Created: task.Created,
Data: task.Data,
Done: task.Done,
Join: task.Join,
Payload: task.Payload,
Name: task.Name,
Next: task.Next,
Owner: task.Owner,
Schema: task.Schema,
Type: task.Type,
// Active: active,
// Order: v.Order,
}
tr.Order = int64(order)
taskActive, _ := active(playbook, taskID, graph, task)
tr.Active = taskActive
return tr, nil
}
func activePlaybook(playbook *model.Playbook, taskID string) (bool, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return false, fmt.Errorf("playbook does not contain tasks %s", taskID)
}
d, err := playbookGraph(playbook)
if err != nil {
return false, err
}
return active(playbook, taskID, d, task)
}
func active(playbook *model.Playbook, taskID string, d *dag.Graph, task *model.Task) (bool, error) {
if task.Done {
return false, nil
}
parents := d.GetParents(taskID)
if len(parents) == 0 {
return true, nil // valid(&task)
}
if task.Join != nil && *task.Join {
for _, parent := range parents {
parentTask := playbook.Tasks[parent]
if !parentTask.Done {
return false, nil
}
requirement := parentTask.Next[taskID]
b, err := evalRequirement(requirement, parentTask.Data)
if err != nil {
return false, err
}
if !b {
return false, nil
}
}
return true, nil
}
for _, parent := range parents {
parentTask := playbook.Tasks[parent]
if !parentTask.Done {
// return false, nil
continue
}
requirement := parentTask.Next[taskID]
b, err := evalRequirement(requirement, parentTask.Data)
if err != nil {
continue
}
if b {
return true, nil
}
}
return false, nil
}
func evalRequirement(aql string, data any) (bool, error) {
if aql == "" {
return true, nil
}
parser := caql.Parser{}
tree, err := parser.Parse(aql)
if err != nil {
return false, err
}
var dataMap map[string]any
if data != nil {
if dataMapX, ok := data.(map[string]any); ok {
dataMap = dataMapX
} else {
log.Println("wrong data type for task data")
}
}
v, err := tree.Eval(dataMap)
if err != nil {
return false, err
}
if b, ok := v.(bool); ok {
return b, nil
}
return false, err
}
/*
// "github.com/qri-io/jsonschema"
func valid(task *model.Task) (bool, error) {
schema, err := json.Marshal(task.Schema)
if err != nil {
return false, err
}
rs := &jsonschema.Schema{}
if err := json.Unmarshal(schema, rs); err != nil {
return false, err
}
state := rs.Validate(context.Background(), task.Data)
return len(*state.Errs) > 0, nil
}
*/

View File

@@ -1,147 +0,0 @@
package database
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/model"
)
var playbook2 = &model.Playbook{
Name: "Phishing",
Tasks: map[string]*model.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"acquire-mail": "boardInvolved == false",
}},
"escalate": {},
"acquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
}},
"extract-iocs": {Next: map[string]string{"fetch-iocs": ""}},
"fetch-iocs": {Next: map[string]string{"block-iocs": ""}},
"search-email-gateway": {Next: map[string]string{"block-iocs": ""}},
"block-sender": {Next: map[string]string{"block-iocs": ""}},
"block-iocs": {Next: map[string]string{"block-ioc": ""}},
"block-ioc": {},
},
}
var playbook3 = &model.Playbook{
Name: "Phishing",
Tasks: map[string]*model.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"acquire-mail": "boardInvolved == false",
}, Data: map[string]any{"boardInvolved": true}, Done: true},
"escalate": {},
"acquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
}},
"extract-iocs": {Next: map[string]string{"fetch-iocs": ""}},
"fetch-iocs": {Next: map[string]string{"block-iocs": ""}},
"search-email-gateway": {Next: map[string]string{"block-iocs": ""}},
"block-sender": {Next: map[string]string{"block-iocs": ""}},
"block-iocs": {Next: map[string]string{"block-ioc": ""}},
"block-ioc": {},
},
}
var playbook4 = &model.Playbook{
Name: "Malware",
Tasks: map[string]*model.Task{
"file-or-hash": {Next: map[string]string{
"enter-hash": "file == 'Hash'",
"upload": "file == 'File'",
}},
"enter-hash": {Next: map[string]string{
"virustotal": "hash != ''",
}},
"upload": {Next: map[string]string{
"hash": "malware",
}},
"hash": {Next: map[string]string{"virustotal": ""}},
"virustotal": {},
},
}
func Test_canBeCompleted(t *testing.T) {
t.Parallel()
type args struct {
playbook *model.Playbook
taskID string
}
tests := []struct {
name string
args args
want bool
wantErr bool
}{
{"playbook2 board", args{playbook: playbook2, taskID: "board"}, true, false},
{"playbook2 escalate", args{playbook: playbook2, taskID: "escalate"}, false, false},
{"playbook2 acquire-mail", args{playbook: playbook2, taskID: "acquire-mail"}, false, false},
{"playbook2 block-ioc", args{playbook: playbook2, taskID: "block-ioc"}, false, false},
{"playbook3 board", args{playbook: playbook3, taskID: "board"}, false, false},
{"playbook3 escalate", args{playbook: playbook3, taskID: "escalate"}, true, false},
{"playbook3 acquire-mail", args{playbook: playbook3, taskID: "acquire-mail"}, false, false},
{"playbook3 block-ioc", args{playbook: playbook3, taskID: "block-ioc"}, false, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := activePlaybook(tt.args.playbook, tt.args.taskID)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("activePlaybook() got = %v, want %v", got, tt.want)
}
})
}
}
func Test_playbookOrder(t *testing.T) {
t.Parallel()
type args struct {
playbook *model.Playbook
}
tests := []struct {
name string
args args
want []string
wantErr bool
}{
{"playbook4", args{playbook: playbook4}, []string{"file-or-hash", "enter-hash", "upload", "hash", "virustotal"}, false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := toPlaybookResponse(tt.args.playbook)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}
names := make([]string, len(got.Tasks))
for name, task := range got.Tasks {
names[task.Order] = name
}
assert.Equal(t, tt.want, names)
})
}
}

View File

@@ -1,51 +0,0 @@
package database
import (
"context"
"errors"
"strconv"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
)
func (db *Database) RelatedCreate(ctx context.Context, id, id2 int64) error {
if id == id2 {
return errors.New("tickets cannot relate to themself")
}
_, err := db.relatedCollection.CreateEdge(ctx, ctx, &driver.EdgeDocument{
From: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
To: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
})
return err
}
func (db *Database) RelatedBatchCreate(ctx context.Context, edges []*driver.EdgeDocument) error {
_, err := db.relatedCollection.CreateEdges(ctx, edges)
return err
}
func (db *Database) RelatedRemove(ctx context.Context, id, id2 int64) error {
q := `
FOR d in @@collection
FILTER (d._from == @id && d._to == @id2) || (d._to == @id && d._from == @id2)
REMOVE d in @@collection`
_, _, err := db.Query(ctx, q, map[string]any{
"@collection": RelatedTicketsCollectionName,
"id": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
"id2": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
}, &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
},
})
return err
}

View File

@@ -1,35 +0,0 @@
package database
import (
"context"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (db *Database) Settings(ctx context.Context) (*model.Settings, error) {
settings := &model.Settings{}
if _, err := db.settingsCollection.ReadDocument(ctx, "global", settings); err != nil {
return nil, err
}
return settings, nil
}
func (db *Database) SaveSettings(ctx context.Context, settings *model.Settings) (*model.Settings, error) {
exists, err := db.settingsCollection.DocumentExists(ctx, "global")
if err != nil {
return nil, err
}
if exists {
if _, err := db.settingsCollection.ReplaceDocument(ctx, "global", settings); err != nil {
return nil, err
}
} else {
if _, err := db.settingsCollection.CreateDocument(ctx, ctx, "global", settings); err != nil {
return nil, err
}
}
return settings, nil
}

View File

@@ -1,91 +0,0 @@
package database
import (
"context"
"fmt"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func (db *Database) Statistics(ctx context.Context) (*model.Statistics, error) {
query := `RETURN {
tickets_per_type: MERGE(FOR d in tickets
COLLECT type = d.type WITH COUNT INTO typecount
RETURN ZIP([type], [typecount])),
unassigned: FIRST(FOR d in tickets
FILTER d.status == "open" AND !d.owner
COLLECT WITH COUNT INTO length
RETURN length),
open_tickets_per_user: MERGE(FOR d in tickets
FILTER d.status == "open"
COLLECT user = d.owner WITH COUNT INTO usercount
RETURN ZIP([user], [usercount])),
tickets_per_week: MERGE(FOR d in tickets
COLLECT week = CONCAT(DATE_YEAR(d.created), "-", DATE_ISOWEEK(d.created) < 10 ? "0" : "", DATE_ISOWEEK(d.created)) WITH COUNT INTO weekcount
RETURN ZIP([week], [weekcount])),
}`
cur, _, err := db.Query(ctx, query, nil, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cur.Close()
statistics := model.Statistics{}
if _, err := cur.ReadDocument(ctx, &statistics); err != nil {
return nil, err
}
return &statistics, nil
}
func (db *Database) WidgetData(ctx context.Context, aggregation string, filter *string) (map[string]any, error) {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
queryTree, err := parser.Parse(aggregation)
if err != nil {
return nil, fmt.Errorf("invalid aggregation query (%s): syntax error", aggregation)
}
aggregationString, err := queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid widget aggregation query (%s): %w", aggregation, err)
}
aggregation = aggregationString
filterQ := ""
if filter != nil && *filter != "" {
queryTree, err := parser.Parse(*filter)
if err != nil {
return nil, fmt.Errorf("invalid filter query (%s): syntax error", *filter)
}
filterString, err := queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid widget filter query (%s): %w", *filter, err)
}
filterQ = "FILTER " + filterString
}
query := `RETURN MERGE(FOR d in tickets
` + filterQ + `
COLLECT field = ` + aggregation + ` WITH COUNT INTO count
RETURN ZIP([field], [count]))`
cur, _, err := db.Query(ctx, query, nil, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cur.Close()
statistics := map[string]any{}
if _, err := cur.ReadDocument(ctx, &statistics); err != nil {
return nil, err
}
return statistics, nil
}

View File

@@ -1,67 +0,0 @@
package database
import (
"context"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
type playbookResponse struct {
PlaybookID string `json:"playbook_id"`
PlaybookName string `json:"playbook_name"`
Playbook model.Playbook `json:"playbook"`
TicketID int64 `json:"ticket_id"`
TicketName string `json:"ticket_name"`
}
func (db *Database) TaskList(ctx context.Context) ([]*model.TaskWithContext, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d.status == 'open'
FOR playbook IN NOT_NULL(VALUES(d.playbooks), [])
RETURN { ticket_id: TO_NUMBER(d._key), ticket_name: d.name, playbook_id: POSITION(d.playbooks, playbook, true), playbook_name: playbook.name, playbook: playbook }`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]any{
"@collection": TicketCollectionName,
}), busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.TaskWithContext
for {
var doc playbookResponse
_, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
playbook, err := toPlaybookResponse(&doc.Playbook)
if err != nil {
return nil, err
}
for _, task := range playbook.Tasks {
if task.Active {
docs = append(docs, &model.TaskWithContext{
PlaybookId: doc.PlaybookID,
PlaybookName: doc.PlaybookName,
Task: task,
TicketId: doc.TicketID,
TicketName: doc.TicketName,
})
}
}
}
return docs, err
}

View File

@@ -1,89 +0,0 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toTicketTemplate(doc *model.TicketTemplateForm) *model.TicketTemplate {
return &model.TicketTemplate{Name: doc.Name, Schema: doc.Schema}
}
func toTicketTemplateResponse(key string, doc *model.TicketTemplate) *model.TicketTemplateResponse {
return &model.TicketTemplateResponse{ID: key, Name: doc.Name, Schema: doc.Schema}
}
func (db *Database) TemplateCreate(ctx context.Context, template *model.TicketTemplateForm) (*model.TicketTemplateResponse, error) {
if template == nil {
return nil, errors.New("requires template")
}
if template.Name == "" {
return nil, errors.New("requires template name")
}
var doc model.TicketTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.CreateDocument(ctx, newctx, strcase.ToKebab(template.Name), toTicketTemplate(template))
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateGet(ctx context.Context, id string) (*model.TicketTemplateResponse, error) {
var doc model.TicketTemplate
meta, err := db.templateCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateUpdate(ctx context.Context, id string, template *model.TicketTemplateForm) (*model.TicketTemplateResponse, error) {
var doc model.TicketTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.ReplaceDocument(ctx, id, toTicketTemplate(template))
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateDelete(ctx context.Context, id string) error {
_, err := db.templateCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TemplateList(ctx context.Context) ([]*model.TicketTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": TemplateCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.TicketTemplateResponse
for {
var doc model.TicketTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toTicketTemplateResponse(meta.Key, &doc))
}
return docs, err
}

View File

@@ -1,211 +0,0 @@
package database_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/test"
)
var (
template1 = &model.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Template 1",
}
default1 = &model.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Default",
}
)
func TestDatabase_TemplateCreate(t *testing.T) {
t.Parallel()
type args struct {
template *model.TicketTemplateForm
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{template: template1}},
{name: "Duplicate", args: args{template: default1}, wantErr: true},
{name: "Nil template", args: args{}, wantErr: true},
{name: "Template without fields", args: args{template: &model.TicketTemplateForm{}}, wantErr: true},
{name: "Only name", args: args{template: &model.TicketTemplateForm{Name: "name"}}, wantErr: false},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), tt.args.template); (err != nil) != tt.wantErr {
t.Errorf("TemplateCreate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_TemplateDelete(t *testing.T) {
t.Parallel()
type args struct {
id string
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{"default"}},
{name: "Not existing", args: args{"foobar"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
if err := db.TemplateDelete(test.Context(), tt.args.id); (err != nil) != tt.wantErr {
t.Errorf("TemplateDelete() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_TemplateGet(t *testing.T) {
t.Parallel()
type args struct {
id string
}
tests := []struct {
name string
args args
want *model.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", args: args{id: "default"}, want: &model.TicketTemplateResponse{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}},
{name: "Not existing", args: args{id: "foobar"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
got, err := db.TemplateGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("TemplateGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
return
}
assert.Equal(t, got, tt.want)
})
}
}
func TestDatabase_TemplateList(t *testing.T) {
t.Parallel()
tests := []struct {
name string
want []*model.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", want: []*model.TicketTemplateResponse{{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}, {ID: "template-1", Name: template1.Name, Schema: template1.Schema}}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
got, err := db.TemplateList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("TemplateList() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, got, tt.want)
})
}
}
func TestDatabase_TemplateUpdate(t *testing.T) {
t.Parallel()
type args struct {
id string
template *model.TicketTemplateForm
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{"default", template1}},
{name: "Not existing", args: args{"foobar", template1}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
if _, err := db.TemplateUpdate(test.Context(), tt.args.id, tt.args.template); (err != nil) != tt.wantErr {
t.Errorf("TemplateUpdate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

View File

@@ -1,624 +0,0 @@
package database
import (
"context"
"encoding/json"
"errors"
"fmt"
"sort"
"strconv"
"strings"
"sync"
"github.com/arangodb/go-driver"
"github.com/xeipuuv/gojsonschema"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
"github.com/SecurityBrewery/catalyst/index"
)
func toTicket(ticketForm *model.TicketForm) (any, error) {
playbooks, err := toPlaybooks(ticketForm.Playbooks)
if err != nil {
return nil, err
}
ticket := &model.Ticket{
Artifacts: ticketForm.Artifacts,
Comments: ticketForm.Comments,
Details: ticketForm.Details,
Files: ticketForm.Files,
Name: ticketForm.Name,
Owner: ticketForm.Owner,
Playbooks: playbooks,
Read: ticketForm.Read,
References: ticketForm.References,
Status: ticketForm.Status,
Type: ticketForm.Type,
Write: ticketForm.Write,
// ID: ticketForm.ID,
// Created: ticketForm.Created,
// Modified: ticketForm.Modified,
// Schema: ticketForm.Schema,
}
if ticketForm.Created != nil {
ticket.Created = *ticketForm.Created
} else {
ticket.Created = time.Now().UTC()
}
if ticketForm.Modified != nil {
ticket.Modified = *ticketForm.Modified
} else {
ticket.Modified = time.Now().UTC()
}
if ticketForm.Schema != nil {
ticket.Schema = *ticketForm.Schema
} else {
ticket.Schema = "{}"
}
if ticketForm.Status == "" {
ticket.Status = "open"
}
if ticketForm.ID != nil {
return &busdb.Keyed[model.Ticket]{Key: strconv.FormatInt(*ticketForm.ID, 10), Doc: ticket}, nil
}
return ticket, nil
}
func toTicketResponses(tickets []*model.TicketSimpleResponse) ([]*model.TicketResponse, error) {
var extendedTickets []*model.TicketResponse
for _, simple := range tickets {
tr, err := toTicketResponse(simple)
if err != nil {
return nil, err
}
extendedTickets = append(extendedTickets, tr)
}
return extendedTickets, nil
}
func toTicketResponse(ticket *model.TicketSimpleResponse) (*model.TicketResponse, error) {
playbooks, err := toPlaybookResponses(ticket.Playbooks)
if err != nil {
return nil, err
}
return &model.TicketResponse{
ID: ticket.ID,
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}, nil
}
func toTicketSimpleResponse(key string, ticket *model.Ticket) (*model.TicketSimpleResponse, error) {
id, err := strconv.ParseInt(key, 10, 64)
if err != nil {
return nil, err
}
return &model.TicketSimpleResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
ID: id,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: ticket.Playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}, nil
}
func toTicketWithTickets(ticketResponse *model.TicketResponse, tickets, correlatedTickets []*model.TicketSimpleResponse, logs []*model.LogEntry) *model.TicketWithTickets {
return &model.TicketWithTickets{
Artifacts: ticketResponse.Artifacts,
Comments: ticketResponse.Comments,
Created: ticketResponse.Created,
Details: ticketResponse.Details,
Files: ticketResponse.Files,
ID: ticketResponse.ID,
Modified: ticketResponse.Modified,
Name: ticketResponse.Name,
Owner: ticketResponse.Owner,
Playbooks: ticketResponse.Playbooks,
Read: ticketResponse.Read,
References: ticketResponse.References,
Schema: ticketResponse.Schema,
Status: ticketResponse.Status,
Type: ticketResponse.Type,
Write: ticketResponse.Write,
Logs: logs,
Tickets: tickets,
CorrelatedTickets: correlatedTickets,
}
}
func toPlaybookResponses(playbooks map[string]*model.Playbook) (map[string]*model.PlaybookResponse, error) {
pr := map[string]*model.PlaybookResponse{}
var err error
for k, v := range playbooks {
pr[k], err = toPlaybookResponse(v)
if err != nil {
return nil, err
}
}
return pr, nil
}
func toPlaybookResponse(playbook *model.Playbook) (*model.PlaybookResponse, error) {
graph, err := playbookGraph(playbook)
if err != nil {
return nil, err
}
re := &model.PlaybookResponse{
Name: playbook.Name,
Tasks: map[string]*model.TaskResponse{},
}
results, err := graph.Toposort()
if err != nil {
return nil, err
}
i := 0
for _, taskID := range results {
rootTask, err := toTaskResponse(playbook, taskID, i, graph)
if err != nil {
return nil, err
}
re.Tasks[taskID] = rootTask
i++
}
return re, nil
}
func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*model.TicketForm) ([]*model.TicketResponse, error) {
update, err := db.Hooks.IngestionFilter(ctx, db.Index)
if err != nil {
return nil, err
}
var dbTickets []any
for _, ticketForm := range ticketForms {
ticket, err := toTicket(ticketForm)
if err != nil {
return nil, err
}
if err := validate(ticket, model.TicketSchema); err != nil {
return nil, err
}
dbTickets = append(dbTickets, ticket)
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @tickets
` + ticketFilterQuery + `
LET updates = ` + update + `
LET newdoc = LENGTH(updates) != 0 ? APPLY("MERGE_RECURSIVE", APPEND([d], updates)) : d
LET keyeddoc = HAS(newdoc, "id") ? MERGE(newdoc, {"_key": TO_STRING(newdoc.id)}) : newdoc
LET noiddoc = UNSET(keyeddoc, "id")
INSERT noiddoc INTO @@collection
RETURN NEW`
apiTickets, _, err := db.ticketListQuery(ctx, query, mergeMaps(map[string]any{
"tickets": dbTickets,
}, ticketFilterVars), busdb.CreateOperation)
if err != nil {
return nil, err
}
if err = batchIndex(db.Index, apiTickets); err != nil {
return nil, err
}
var ids []driver.DocumentID
for _, apiTicket := range apiTickets {
ids = append(ids, driver.NewDocumentID(TicketCollectionName, fmt.Sprint(apiTicket.ID)))
}
db.bus.DatabaseChannel.Publish(&bus.DatabaseUpdateMsg{
IDs: ids,
Type: bus.DatabaseEntryCreated,
})
ticketResponses, err := toTicketResponses(apiTickets)
if err != nil {
return nil, err
}
for _, ticketResponse := range ticketResponses {
for playbookID := range ticketResponse.Playbooks {
if err := runRootTask(ticketResponse, playbookID, db); err != nil {
return nil, err
}
}
}
return ticketResponses, nil
}
func (db *Database) IndexRebuild(ctx context.Context) error {
if err := db.Index.Truncate(); err != nil {
return err
}
tickets, _, err := db.ticketListQuery(ctx, "FOR d IN @@collection RETURN d", nil, busdb.ReadOperation)
if err != nil {
return err
}
return batchIndex(db.Index, tickets)
}
func batchIndex(index *index.Index, tickets []*model.TicketSimpleResponse) error {
var wg sync.WaitGroup
var batch []*model.TicketSimpleResponse
for _, ticket := range tickets {
batch = append(batch, ticket)
if len(batch) > 100 {
wg.Add(1)
go func(docs []*model.TicketSimpleResponse) {
index.Index(docs)
wg.Done()
}(batch)
batch = []*model.TicketSimpleResponse{}
}
}
wg.Wait()
return nil
}
func (db *Database) TicketGet(ctx context.Context, ticketID int64) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
return db.ticketGetQuery(ctx, ticketID, `LET d = DOCUMENT(@@collection, @ID) `+ticketFilterQuery+` RETURN d`, ticketFilterVars, busdb.ReadOperation)
}
func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query string, bindVars map[string]any, operation *busdb.Operation) (*model.TicketWithTickets, error) {
if bindVars == nil {
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
if ticketID != 0 {
bindVars["ID"] = fmt.Sprint(ticketID)
}
cur, _, err := db.Query(ctx, query, bindVars, operation)
if err != nil {
return nil, err
}
defer cur.Close()
ticket := model.Ticket{}
meta, err := cur.ReadDocument(ctx, &ticket)
if err != nil {
return nil, err
}
ticketSimpleResponse, err := toTicketSimpleResponse(meta.Key, &ticket)
if err != nil {
return nil, err
}
// index
go db.Index.Index([]*model.TicketSimpleResponse{ticketSimpleResponse})
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
// tickets
ticketsQuery := `FOR vertex, edge IN OUTBOUND
DOCUMENT(@@tickets, @ID)
GRAPH @graph
FILTER IS_SAME_COLLECTION(@@collection, vertex)
FILTER vertex != null
LET d = DOCUMENT(@@collection, edge["_to"])
` + ticketFilterQuery + `
RETURN d`
outTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
ticketsQuery = `FOR vertex, edge IN INBOUND
DOCUMENT(@@tickets, @ID)
GRAPH @graph
FILTER IS_SAME_COLLECTION(@@collection, vertex)
FILTER vertex != null
LET d = DOCUMENT(@@collection, edge["_from"])
` + ticketFilterQuery + `
RETURN d`
inTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
var artifactNames []string
for _, artifact := range ticketSimpleResponse.Artifacts {
artifactNames = append(artifactNames, artifact.Name)
}
ticketsQuery = `FOR d IN @@collection
FILTER d._key != @ID
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER POSITION(@artifacts, a.name)
RETURN d`
sameArtifactTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]any{
"ID": fmt.Sprint(ticketID),
"artifacts": artifactNames,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
tickets := outTickets
tickets = append(tickets, inTickets...)
sort.Slice(tickets, func(i, j int) bool {
return tickets[i].ID < tickets[j].ID
})
ticketResponse, err := toTicketResponse(ticketSimpleResponse)
if err != nil {
return nil, err
}
logs, err := db.LogList(ctx, fmt.Sprintf("%s/%d", TicketCollectionName, ticketID))
if err != nil {
return nil, err
}
return toTicketWithTickets(ticketResponse, tickets, sameArtifactTickets, logs), nil
}
func (db *Database) TicketUpdate(ctx context.Context, ticketID int64, ticket *model.Ticket) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
REPLACE d WITH @ticket IN @@collection
RETURN NEW`
ticket.Modified = time.Now().UTC() // TODO make setable?
return db.ticketGetQuery(ctx, ticketID, query, mergeMaps(map[string]any{"ticket": ticket}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated, Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, strconv.FormatInt(ticketID, 10)),
},
})
}
func (db *Database) TicketDelete(ctx context.Context, ticketID int64) error {
_, err := db.TicketGet(ctx, ticketID)
if err != nil {
return err
}
_, err = db.ticketCollection.RemoveDocument(ctx, strconv.FormatInt(ticketID, 10))
if err != nil {
return err
}
return nil
}
func (db *Database) TicketList(ctx context.Context, ticketType string, query string, sorts []string, desc []bool, offset, count int64) (*model.TicketList, error) {
binVars := map[string]any{}
typeString := ""
if ticketType != "" {
typeString = "FILTER d.type == @type "
binVars["type"] = ticketType
}
filterString := ""
if query != "" {
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
queryTree, err := parser.Parse(query)
if err != nil {
return nil, errors.New("invalid filter query: syntax error")
}
filterString, err = queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid filter query: %w", err)
}
filterString = "FILTER " + filterString
}
documentCount, err := db.TicketCount(ctx, typeString, filterString, binVars)
if err != nil {
return nil, err
}
sortQ := sortQuery(sorts, desc, binVars)
binVars["offset"] = offset
binVars["count"] = count
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
q := `FOR d IN @@collection
` + ticketFilterQuery + `
` + sortQ + `
` + typeString + `
` + filterString + `
LIMIT @offset, @count
SORT d._key ASC
RETURN d`
// RETURN KEEP(d, "_key", "id", "name", "type", "created")`
ticketList, _, err := db.ticketListQuery(ctx, q, mergeMaps(binVars, ticketFilterVars), busdb.ReadOperation)
return &model.TicketList{
Count: documentCount,
Tickets: ticketList,
}, err
// return map[string]interface{}{"tickets": ticketList, "count": documentCount}, err
}
func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars map[string]any, operation *busdb.Operation) ([]*model.TicketSimpleResponse, *model.LogEntry, error) {
if bindVars == nil {
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
cursor, logEntry, err := db.Query(ctx, query, bindVars, operation)
if err != nil {
return nil, nil, err
}
defer cursor.Close()
var docs []*model.TicketSimpleResponse
for {
doc := model.Ticket{}
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, nil, err
}
resp, err := toTicketSimpleResponse(meta.Key, &doc)
if err != nil {
return nil, nil, err
}
docs = append(docs, resp)
}
return docs, logEntry, nil
}
func (db *Database) TicketCount(ctx context.Context, typequery, filterquery string, bindVars map[string]any) (int, error) {
if bindVars == nil {
bindVars = map[string]any{}
}
bindVars["@collection"] = TicketCollectionName
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return 0, err
}
countQuery := `RETURN LENGTH(FOR d IN @@collection ` + ticketFilterQuery + " " + typequery + " " + filterquery + ` RETURN 1)`
cursor, _, err := db.Query(ctx, countQuery, mergeMaps(bindVars, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return 0, err
}
documentCount := 0
_, err = cursor.ReadDocument(ctx, &documentCount)
if err != nil {
return 0, err
}
cursor.Close()
return documentCount, nil
}
func sortQuery(paramsSort []string, paramsDesc []bool, bindVars map[string]any) string {
sortQuery := ""
if len(paramsSort) > 0 {
var sorts []string
for i, column := range paramsSort {
colsort := fmt.Sprintf("d.@column%d", i)
if len(paramsDesc) > i && paramsDesc[i] {
colsort += " DESC"
}
sorts = append(sorts, colsort)
bindVars[fmt.Sprintf("column%d", i)] = column
}
sortQuery = "SORT " + strings.Join(sorts, ", ")
}
return sortQuery
}
func mergeMaps(a map[string]any, b map[string]any) map[string]any {
merged := map[string]any{}
for k, v := range a {
merged[k] = v
}
for k, v := range b {
merged[k] = v
}
return merged
}
func validate(e any, schema *gojsonschema.Schema) error {
b, err := json.Marshal(e)
if err != nil {
return err
}
res, err := schema.Validate(gojsonschema.NewStringLoader(string(b)))
if err != nil {
return err
}
if len(res.Errors()) > 0 {
var l []string
for _, e := range res.Errors() {
l = append(l, e.String())
}
return fmt.Errorf("validation failed: %v", strings.Join(l, ", "))
}
return nil
}

View File

@@ -1,295 +0,0 @@
package database
import (
"context"
"errors"
"fmt"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
maut "github.com/jonas-plum/maut/auth"
"github.com/mingrammer/commonregex"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) AddArtifact(ctx context.Context, id int64, artifact *model.Artifact) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
if artifact.Status == nil {
artifact.Status = pointer.String("unknown")
}
if artifact.Type == nil {
artifact.Type = pointer.String(inferType(artifact.Name))
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": @now, "artifacts": PUSH(NOT_NULL(d.artifacts, []), @artifact) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"artifact": artifact, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func inferType(name string) string {
switch {
case commonregex.IPRegex.MatchString(name):
return "ip"
case commonregex.LinkRegex.MatchString(name):
return "url"
case commonregex.EmailRegex.MatchString(name):
return "email"
case commonregex.MD5HexRegex.MatchString(name):
return "md5"
case commonregex.SHA1HexRegex.MatchString(name):
return "sha1"
case commonregex.SHA256HexRegex.MatchString(name):
return "sha256"
}
return "unknown"
}
func (db *Database) RemoveArtifact(ctx context.Context, id int64, name string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET newartifacts = REMOVE_VALUE(d.artifacts, a)
UPDATE d WITH { "modified": @now, "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"name": name, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) SetTemplate(ctx context.Context, id int64, schema string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "schema": @schema } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"schema": schema}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) AddComment(ctx context.Context, id int64, comment *model.CommentForm) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
if comment.Creator == nil || *comment.Creator == "" {
user, _, exists := maut.UserFromContext(ctx)
if !exists {
return nil, errors.New("no user in context")
}
comment.Creator = pointer.String(user.ID)
}
if comment.Created == nil {
comment.Created = pointer.Time(time.Now().UTC())
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": @now, "comments": PUSH(NOT_NULL(d.comments, []), @comment) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"comment": comment, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) RemoveComment(ctx context.Context, id int64, commentID int64) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": @now, "comments": REMOVE_NTH(d.comments, @commentID) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"commentID": commentID, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) SetReferences(ctx context.Context, id int64, references []*model.Reference) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": @now, "references": @references } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"references": references, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) AddFile(ctx context.Context, id int64, file *model.File) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": @now, "files": APPEND(NOT_NULL(d.files, []), [@file]) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{"file": file, "now": time.Now().UTC()}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
})
}
func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTemplate *model.PlaybookTemplateForm) (*model.TicketWithTickets, error) {
pb, err := toPlaybook(playbookTemplate)
if err != nil {
return nil, err
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
playbookID := strcase.ToKebab(pb.Name)
if playbookTemplate.ID != nil {
playbookID = *playbookTemplate.ID
}
parentTicket, err := db.TicketGet(ctx, id)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d._key == @ID
LET newplaybook = ZIP( [@playbookID], [@playbook] )
LET newplaybooks = MERGE(NOT_NULL(d.playbooks, {}), newplaybook)
LET newticket = MERGE(d, { "modified": @now, "playbooks": newplaybooks })
REPLACE d WITH newticket IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbook": pb,
"playbookID": findName(parentTicket.Playbooks, playbookID),
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
if err != nil {
return nil, err
}
if err := runRootTask(extractTicketResponse(ticket), playbookID, db); err != nil {
return nil, err
}
return ticket, nil
}
func findName(playbooks map[string]*model.PlaybookResponse, name string) string {
if _, ok := playbooks[name]; !ok {
return name
}
for i := 0; ; i++ {
try := fmt.Sprintf("%s%d", name, i)
if _, ok := playbooks[try]; !ok {
return try
}
}
}
func runRootTask(ticket *model.TicketResponse, playbookID string, db *Database) error {
playbook := ticket.Playbooks[playbookID]
for id, task := range playbook.Tasks {
if task.Order == 0 && task.Type == model.TaskTypeAutomation {
if err := runTask(ticket.ID, playbookID, id, task, ticket, db); err != nil {
return err
}
}
}
return nil
}
func (db *Database) RemoveTicketPlaybook(ctx context.Context, id int64, playbookID string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d._key == @ID
LET newplaybooks = UNSET(d.playbooks, @playbookID)
REPLACE d WITH MERGE(d, { "modified": @now, "playbooks": newplaybooks }) IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
}

View File

@@ -1,225 +0,0 @@
package database
import (
"context"
"errors"
"fmt"
"log"
"github.com/arangodb/go-driver"
"github.com/google/uuid"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/time"
)
func (db *Database) TaskGet(ctx context.Context, id int64, playbookID string, taskID string) (*model.TicketWithTickets, *model.PlaybookResponse, *model.TaskWithContext, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, nil, nil, err
}
playbook, ok := inc.Playbooks[playbookID]
if !ok {
return nil, nil, nil, errors.New("playbook does not exist")
}
task, ok := playbook.Tasks[taskID]
if !ok {
return nil, nil, nil, errors.New("task does not exist")
}
return inc, playbook, &model.TaskWithContext{
PlaybookId: playbookID,
PlaybookName: playbook.Name,
TaskId: taskID,
Task: task,
TicketId: id,
TicketName: inc.Name,
}, nil
}
func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID string, taskID string, data any) (*model.TicketWithTickets, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, err
}
completable := inc.Playbooks[playbookID].Tasks[taskID].Active
if !completable {
return nil, errors.New("cannot be completed")
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"data": NOT_NULL(@data, {}), "done": true, closed: @closed })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"data": data,
"closed": time.Now().UTC(),
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
if err != nil {
return nil, err
}
playbook := ticket.Playbooks[playbookID]
task := playbook.Tasks[taskID]
runNextTasks(id, playbookID, task.Next, task.Data, extractTicketResponse(ticket), db)
return ticket, nil
}
func extractTicketResponse(ticket *model.TicketWithTickets) *model.TicketResponse {
return &model.TicketResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
ID: ticket.ID,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: ticket.Playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}
}
func (db *Database) TaskUpdateOwner(ctx context.Context, id int64, playbookID string, taskID string, owner string) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"owner": @owner })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"owner": owner,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
if err != nil {
return nil, err
}
return ticket, nil
}
func (db *Database) TaskUpdateData(ctx context.Context, id int64, playbookID string, taskID string, data map[string]any) (*model.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"data": @data })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": @now, "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]any{
"playbookID": playbookID,
"taskID": taskID,
"data": data,
"now": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
Type: bus.DatabaseEntryUpdated,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
})
if err != nil {
return nil, err
}
return ticket, nil
}
func (db *Database) TaskRun(ctx context.Context, id int64, playbookID string, taskID string) error {
ticket, _, task, err := db.TaskGet(ctx, id, playbookID, taskID)
if err != nil {
return err
}
if task.Task.Type == model.TaskTypeAutomation {
if err := runTask(id, playbookID, taskID, task.Task, extractTicketResponse(ticket), db); err != nil {
return err
}
}
return nil
}
func runNextTasks(id int64, playbookID string, next map[string]string, data any, ticket *model.TicketResponse, db *Database) {
for nextTaskID, requirement := range next {
nextTask := ticket.Playbooks[playbookID].Tasks[nextTaskID]
if nextTask.Type == model.TaskTypeAutomation {
b, err := evalRequirement(requirement, data)
if err != nil {
continue
}
if b {
if err := runTask(id, playbookID, nextTaskID, nextTask, ticket, db); err != nil {
log.Println(err)
}
}
}
}
}
func runTask(ticketID int64, playbookID string, taskID string, task *model.TaskResponse, ticket *model.TicketResponse, db *Database) error {
playbook := ticket.Playbooks[playbookID]
msgContext := &model.Context{Playbook: playbook, Task: task, Ticket: ticket}
origin := &model.Origin{TaskOrigin: &model.TaskOrigin{TaskId: taskID, PlaybookId: playbookID, TicketId: ticketID}}
jobID := uuid.NewString()
return publishJobMapping(jobID, *task.Automation, msgContext, origin, task.Payload, db)
}

View File

@@ -1,102 +0,0 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toTicketType(doc *model.TicketTypeForm) *model.TicketType {
return &model.TicketType{
Name: doc.Name,
Icon: doc.Icon,
DefaultPlaybooks: doc.DefaultPlaybooks,
DefaultTemplate: doc.DefaultTemplate,
DefaultGroups: doc.DefaultGroups,
}
}
func toTicketTypeResponse(key string, doc *model.TicketType) *model.TicketTypeResponse {
return &model.TicketTypeResponse{
ID: key,
Name: doc.Name,
Icon: doc.Icon,
DefaultPlaybooks: doc.DefaultPlaybooks,
DefaultTemplate: doc.DefaultTemplate,
DefaultGroups: doc.DefaultGroups,
}
}
func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *model.TicketTypeForm) (*model.TicketTypeResponse, error) {
if tickettype == nil {
return nil, errors.New("requires ticket type")
}
if tickettype.Name == "" {
return nil, errors.New("requires ticket type name")
}
var doc model.TicketType
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.CreateDocument(ctx, newctx, strcase.ToKebab(tickettype.Name), toTicketType(tickettype))
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeGet(ctx context.Context, id string) (*model.TicketTypeResponse, error) {
var doc model.TicketType
meta, err := db.tickettypeCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeUpdate(ctx context.Context, id string, tickettype *model.TicketTypeForm) (*model.TicketTypeResponse, error) {
var doc model.TicketType
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.ReplaceDocument(ctx, id, toTicketType(tickettype))
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeDelete(ctx context.Context, id string) error {
_, err := db.tickettypeCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TicketTypeList(ctx context.Context) ([]*model.TicketTypeResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": TicketTypeCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.TicketTypeResponse
for {
var doc model.TicketType
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toTicketTypeResponse(meta.Key, &doc))
}
return docs, err
}

View File

@@ -1,233 +0,0 @@
package database
import (
"context"
"crypto/sha256"
"errors"
"fmt"
"log"
"math/rand"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
maut "github.com/jonas-plum/maut/auth"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/generated/time"
)
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
func init() {
rand.Seed(time.Now().UnixNano())
}
func generateKey() string {
b := make([]rune, 32)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}
func toUser(user *model.UserForm, sha256 *string) *model.User {
u := &model.User{
Blocked: user.Blocked,
Roles: user.Roles,
Sha256: sha256,
Apikey: user.Apikey,
}
// log.Println(u)
// b, _ := json.Marshal(u)
// loader := gojsonschema.NewBytesLoader(b)
// res, err := model.UserSchema.Validate(loader)
// if err != nil {
// log.Println(err)
// }
// log.Println(res.Errors())
return u
}
func toUserResponse(key string, user *model.User) *model.UserResponse {
return &model.UserResponse{
ID: key,
Roles: user.Roles,
Blocked: user.Blocked,
Apikey: user.Apikey,
}
}
func toNewUserResponse(key string, user *model.User, secret *string) *model.NewUserResponse {
return &model.NewUserResponse{
ID: key,
Roles: user.Roles,
Secret: secret,
Blocked: user.Blocked,
}
}
func (db *Database) UserGetOrCreate(ctx context.Context, newUser *model.UserForm) (*model.UserResponse, error) {
user, err := db.UserGet(ctx, newUser.ID)
if err != nil {
newUser, err := db.UserCreate(ctx, newUser)
if err != nil {
return nil, err
}
return &model.UserResponse{ID: newUser.ID, Roles: newUser.Roles, Blocked: newUser.Blocked}, nil
}
return user, nil
}
func (db *Database) UserCreate(ctx context.Context, newUser *model.UserForm) (*model.NewUserResponse, error) {
var key, sha256Hash *string
if newUser.Apikey {
key, sha256Hash = generateAPIKey()
}
var doc model.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, newUser.ID, toUser(newUser, sha256Hash))
if err != nil {
return nil, err
}
return toNewUserResponse(meta.Key, &doc, key), nil
}
func (db *Database) UserCreateSetupAPIKey(ctx context.Context, key string) (*model.UserResponse, error) {
newUser := &model.UserForm{
ID: "setup",
Roles: []string{maut.AdminRole},
Apikey: true,
Blocked: false,
}
sha256Hash := pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
var doc model.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, sha256Hash))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserUpdate(ctx context.Context, id string, user *model.UserForm) (*model.UserResponse, error) {
var doc model.User
_, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
if doc.Apikey {
return nil, errors.New("cannot update an API key")
}
ctx = driver.WithReturnNew(ctx, &doc)
user.ID = id
meta, err := db.userCollection.ReplaceDocument(ctx, id, toUser(user, nil))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserGet(ctx context.Context, id string) (*model.UserResponse, error) {
var doc model.User
meta, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserDelete(ctx context.Context, id string) error {
_, err := db.userCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) UserList(ctx context.Context) ([]*model.UserResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": UserCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.UserResponse
for {
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
doc.Sha256 = nil
docs = append(docs, toUserResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserAPIKeyByHash(ctx context.Context, sha256 string) (*model.UserResponse, error) {
query := `FOR d in @@collection
FILTER d.apikey && d.sha256 == @sha256
RETURN d`
vars := map[string]any{"@collection": UserCollectionName, "sha256": sha256}
cursor, _, err := db.Query(ctx, query, vars, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), err
}
func (db *Database) UserByIDAndPassword(ctx context.Context, id, password string) (*model.UserResponse, error) {
log.Println("UserByIDAndPassword", id, password)
query := `FOR d in @@collection
FILTER d._key == @id && !d.apikey && d.sha512 == SHA512(CONCAT(d.salt, @password))
RETURN d`
vars := map[string]any{"@collection": UserCollectionName, "id": id, "password": password}
cursor, _, err := db.Query(ctx, query, vars, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc model.User
meta, err := cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), err
}
func generateAPIKey() (key, sha256Hash *string) {
newKey := generateKey()
sha256Hash = pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(newKey))))
return &newKey, sha256Hash
}

View File

@@ -1,87 +0,0 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/model"
)
func toUserDataResponse(key string, doc *model.UserData) *model.UserDataResponse {
return &model.UserDataResponse{
Email: doc.Email,
ID: key,
Image: doc.Image,
Name: doc.Name,
Timeformat: doc.Timeformat,
}
}
func (db *Database) UserDataCreate(ctx context.Context, id string, userdata *model.UserData) error {
if userdata == nil {
return errors.New("requires setting")
}
if id == "" {
return errors.New("requires username")
}
_, err := db.userdataCollection.CreateDocument(ctx, ctx, id, userdata)
return err
}
func (db *Database) UserDataGetOrCreate(ctx context.Context, id string, newUserData *model.UserData) (*model.UserDataResponse, error) {
setting, err := db.UserDataGet(ctx, id)
if err != nil {
return toUserDataResponse(id, newUserData), db.UserDataCreate(ctx, id, newUserData)
}
return setting, nil
}
func (db *Database) UserDataGet(ctx context.Context, id string) (*model.UserDataResponse, error) {
var doc model.UserData
meta, err := db.userdataCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), err
}
func (db *Database) UserDataList(ctx context.Context) ([]*model.UserDataResponse, error) {
query := "FOR d IN @@collection SORT d.username ASC RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]any{"@collection": UserDataCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*model.UserDataResponse
for {
var doc model.UserData
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toUserDataResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserDataUpdate(ctx context.Context, id string, userdata *model.UserData) (*model.UserDataResponse, error) {
var doc model.UserData
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userdataCollection.ReplaceDocument(ctx, id, userdata)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), nil
}

View File

@@ -1,176 +0,0 @@
package database_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/model"
"github.com/SecurityBrewery/catalyst/generated/pointer"
"github.com/SecurityBrewery/catalyst/test"
)
var bob = &model.UserData{
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
var bobResponse = &model.UserDataResponse{
ID: "bob",
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
func TestDatabase_UserDataCreate(t *testing.T) {
t.Parallel()
type args struct {
id string
setting *model.UserData
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal setting", args: args{id: "bob", setting: bob}, wantErr: false},
{name: "Nil setting", args: args{id: "bob"}, wantErr: true},
{name: "UserData without settingname", args: args{id: ""}, wantErr: true},
{name: "Only settingname", args: args{id: "bob"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), tt.args.id, tt.args.setting); (err != nil) != tt.wantErr {
t.Errorf("settingCreate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_UserDataGet(t *testing.T) {
t.Parallel()
type args struct {
id string
}
tests := []struct {
name string
args args
want *model.UserDataResponse
wantErr bool
}{
{name: "Normal get", args: args{id: "bob"}, want: bobResponse},
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
got, err := db.UserDataGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("UserDataGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
return
}
assert.Equal(t, tt.want, got)
})
}
}
func TestDatabase_UserDataList(t *testing.T) {
t.Parallel()
tests := []struct {
name string
want []*model.UserDataResponse
wantErr bool
}{
{name: "Normal list", want: []*model.UserDataResponse{bobResponse}},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
got, err := db.UserDataList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("UserDataList() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, tt.want, got)
})
}
}
func TestDatabase_UserDataUpdate(t *testing.T) {
t.Parallel()
type args struct {
id string
setting *model.UserData
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{id: "bob", setting: bob}},
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
if _, err := db.UserDataUpdate(test.Context(), tt.args.id, tt.args.setting); (err != nil) != tt.wantErr {
t.Errorf("UserDataUpdate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

View File

@@ -1,160 +0,0 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2020 by Martin Mirchev
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser
* Developed by : Bart Kiers, bart@big-o.nl
*/
// $antlr-format alignTrailingComments on, columnLimit 150, maxEmptyLinesToKeep 1, reflowComments off, useTab off
// $antlr-format allowShortRulesOnASingleLine on, alignSemicolons ownLine
lexer grammar CAQLLexer;
channels { ERRORCHANNEL }
DOT: '.';
// https://github.com/arangodb/arangodb/blob/devel/arangod/Aql/grammar.y
T_REGEX_MATCH: '=~'; // "~= operator"
T_REGEX_NON_MATCH: '!~'; // "~! operator"
T_EQ: '=='; // "== operator";
T_NE: '!='; // "!= operator";
T_LT: '<'; // "< operator";
T_GT: '>'; // "> operator";
T_LE: '<='; // "<= operator";
T_GE: '>='; // ">= operator";
T_PLUS: '+'; // "+ operator"
T_MINUS: '-'; // "- operator"
T_TIMES: '*'; // "* operator"
T_DIV: '/'; // "/ operator"
T_MOD: '%'; // "% operator"
T_QUESTION: '?'; // "?"
T_COLON: ':'; // ":"
T_SCOPE: '::'; // "::"
T_RANGE: '..'; // ".."
T_COMMA: ','; // ","
T_OPEN: '('; // "("
T_CLOSE: ')'; // ")"
T_OBJECT_OPEN: '{'; // "{"
T_OBJECT_CLOSE: '}'; // "}"
T_ARRAY_OPEN: '['; // "["
T_ARRAY_CLOSE: ']'; // "]"
// https://www.arangodb.com/docs/stable/aql/fundamentals-syntax.html#keywords
T_AGGREGATE: A G G R E G A T E;
T_ALL: A L L;
T_AND: (A N D | '&&');
T_ANY: A N Y;
T_ASC: A S C;
T_COLLECT: C O L L E C T;
T_DESC: D E S C;
T_DISTINCT: D I S T I N C T;
T_FALSE: F A L S E;
T_FILTER: F I L T E R;
T_FOR: F O R;
T_GRAPH: G R A P H;
T_IN: I N;
T_INBOUND: I N B O U N D;
T_INSERT: I N S E R T;
T_INTO: I N T O;
T_K_SHORTEST_PATHS: K '_' S H O R T E S T '_' P A T H S;
T_LET: L E T;
T_LIKE: L I K E;
T_LIMIT: L I M I T;
T_NONE: N O N E;
T_NOT: (N O T | '!');
T_NULL: N U L L;
T_OR: (O R | '||');
T_OUTBOUND: O U T B O U N D;
T_REMOVE: R E M O V E;
T_REPLACE: R E P L A C E;
T_RETURN: R E T U R N;
T_SHORTEST_PATH: S H O R T E S T '_' P A T H;
T_SORT: S O R T;
T_TRUE: T R U E;
T_UPDATE: U P D A T E;
T_UPSERT: U P S E R T;
T_WITH: W I T H;
T_KEEP: K E E P;
T_COUNT: C O U N T;
T_OPTIONS: O P T I O N S;
T_PRUNE: P R U N E;
T_SEARCH: S E A R C H;
T_TO: T O;
T_CURRENT: C U R R E N T;
T_NEW: N E W;
T_OLD: O L D;
T_STRING: [a-zA-Z_] [a-zA-Z_0-9]*;
T_INT: [1-9] DIGIT* | '0' | '0x' HEX_DIGIT+ | '0b' [0-1]+;
T_FLOAT: ( [1-9] DIGIT* | '0' )? '.' DIGIT+ (E [-+]? DIGIT+)?;
T_PARAMETER: '@' T_STRING;
T_QUOTED_STRING: ('\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\'' | '"' ( '\\'. | '""' | ~('"'| '\\') )* '"');
SINGLE_LINE_COMMENT: '//' ~[\r\n]* (('\r'? '\n') | EOF) -> channel(HIDDEN);
MULTILINE_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
SPACES: [ \u000B\t\r\n] -> channel(HIDDEN);
UNEXPECTED_CHAR: .;
fragment HEX_DIGIT: [0-9a-fA-F];
fragment DIGIT: [0-9];
fragment A: [aA];
fragment B: [bB];
fragment C: [cC];
fragment D: [dD];
fragment E: [eE];
fragment F: [fF];
fragment G: [gG];
fragment H: [hH];
fragment I: [iI];
fragment J: [jJ];
fragment K: [kK];
fragment L: [lL];
fragment M: [mM];
fragment N: [nN];
fragment O: [oO];
fragment P: [pP];
fragment Q: [qQ];
fragment R: [rR];
fragment S: [sS];
fragment T: [tT];
fragment U: [uU];
fragment V: [vV];
fragment W: [wW];
fragment X: [xX];
fragment Y: [yY];
fragment Z: [zZ];
ERROR_RECONGNIGION: . -> channel(ERRORCHANNEL);

View File

@@ -1,109 +0,0 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 by Bart Kiers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser
* Developed by:
* Bart Kiers, bart@big-o.nl
* Martin Mirchev, marti_2203@abv.bg
* Mike Lische, mike@lischke-online.de
*/
// $antlr-format alignTrailingComments on, columnLimit 130, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments off
// $antlr-format useTab off, allowShortRulesOnASingleLine off, allowShortBlocksOnASingleLine on, alignSemicolons ownLine
parser grammar CAQLParser;
options {
tokenVocab = CAQLLexer;
}
parse: expression EOF
;
expression:
value_literal
| reference
| operator_unary
| expression (T_PLUS|T_MINUS) expression
| expression (T_TIMES|T_DIV|T_MOD) expression
| expression T_RANGE expression
| expression (T_LT|T_GT|T_LE|T_GE) expression
| expression T_NOT? T_IN expression
| expression (T_EQ|T_NE) expression
| expression (T_ALL|T_ANY|T_NONE) eq_op=(T_EQ|T_NE|T_LT|T_GT|T_LE|T_GE|T_IN) expression
| expression (T_ALL|T_ANY|T_NONE) T_NOT T_IN expression
| expression T_NOT? (T_LIKE|T_REGEX_MATCH|T_REGEX_NON_MATCH) expression
| expression T_AND expression
| expression T_OR expression
| expression T_QUESTION expression T_COLON expression
| expression T_QUESTION T_COLON expression
;
operator_unary: (
T_PLUS expression
| T_MINUS expression
| T_NOT expression
);
reference:
T_STRING
| compound_value
| function_call
| T_OPEN expression T_CLOSE
| reference DOT T_STRING
| reference T_ARRAY_OPEN expression T_ARRAY_CLOSE
;
compound_value: (
array
| object
);
function_call: (
T_STRING T_OPEN expression? (T_COMMA expression)*? T_COMMA? T_CLOSE
);
value_literal: (
T_QUOTED_STRING
| T_INT
| T_FLOAT
| T_NULL
| T_TRUE
| T_FALSE
);
array:(
T_ARRAY_OPEN expression? (T_COMMA expression)*? T_COMMA? T_ARRAY_CLOSE
);
object:
T_OBJECT_OPEN object_element? (T_COMMA object_element)* T_COMMA? T_OBJECT_CLOSE
;
object_element:(
T_STRING
| object_element_name T_COLON expression
| T_ARRAY_OPEN expression T_ARRAY_CLOSE T_COLON expression
);
object_element_name:(
T_STRING
| T_QUOTED_STRING
);

View File

@@ -1,30 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths: { }
definitions:
Artifact:
type: object
required: [ name ]
properties:
name: { type: string, example: "2.2.2.2" }
type: { type: string }
kind: { type: string }
status: { type: string, example: "Unknown" }
enrichments: { type: object, additionalProperties: { $ref: "#/definitions/Enrichment" } }
EnrichmentForm:
type: object
required: [ name, data ]
properties:
name: { type: string, example: "hash.sha1" }
data: { type: object, example: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } }
Enrichment:
type: object
required: [ name, data, created ]
properties:
name: { type: string, example: "hash.sha1" }
data: { type: object, example: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }

View File

@@ -1,160 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/automations:
get:
tags: [ "automations" ]
summary: "List automations"
operationId: "listAutomations"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/AutomationResponse" } }
examples:
test:
- id: comment
image: "docker.io/python:3"
script: ""
type: [ playbook ]
- id: hash.sha1
image: "docker.io/python:3"
script: ""
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
- id: vt.hash
image: "docker.io/python:3"
script: ""
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
security: [ { roles: [ "automation:read" ] } ]
post:
tags: [ "automations" ]
summary: "Create a new automation"
operationId: "createAutomation"
parameters:
- { name: "automation", in: "body", description: "New automation", required: true, schema: { $ref: "#/definitions/AutomationForm" }, x-example: { id: "hash-sha-256", image: "docker.io/python:3", script: "import sys\nimport json\nimport hashlib\n\n\ndef run(msg):\n sha256 = hashlib.sha256(msg['payload']['default'].encode('utf-8'))\n return {'hash': sha256.hexdigest()}\n\n\nprint(json.dumps(run(json.loads(sys.argv[1]))))\n", type: [ global ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: "hash-sha-256"
image: "docker.io/python:3"
type: [ global ]
script: |
import sys
import json
import hashlib
def run(msg):
sha256 = hashlib.sha256(msg['payload']['default'].encode('utf-8'))
return {'hash': sha256.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:write" ] } ]
/automations/{id}:
get:
tags: [ "automations" ]
summary: "Get a single automation"
operationId: "getAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: hash.sha1
image: "docker.io/python:3"
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
script: |
#!/usr/bin/env python
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload']['default'].encode('utf-8'))
return {"hash": sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:read" ] } ]
put:
tags: [ "automations" ]
summary: "Update an existing automation"
operationId: "updateAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
- { name: "automation", in: "body", description: "Automation object that needs to be added", required: true, schema: { $ref: "#/definitions/AutomationForm" }, x-example: { id: hash.sha1, image: "docker.io/python:3", script: "import sys\nimport json\nimport hashlib\n\n\ndef run(msg):\n sha1 = hashlib.sha1(msg['payload'].encode('utf-8'))\n return {'hash': sha1.hexdigest()}\n\n\nprint(json.dumps(run(json.loads(sys.argv[1]))))\n", type: [ global, artifact, playbook ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: hash.sha1
image: "docker.io/python:3"
type: [ global, artifact, playbook ]
script: |
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload'].encode('utf-8'))
return {'hash': sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:write" ] } ]
delete:
tags: [ "automations" ]
summary: "Delete a automation"
operationId: "deleteAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "automation:write" ] } ]
definitions:
AutomationForm:
type: object
required: [ id, image, script, type ]
properties:
id: { type: string }
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }
Automation:
type: object
required: [ image, script, type ]
properties:
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }
AutomationResponse:
type: object
required: [ id, image, script, type ]
properties:
id: { type: string }
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }

View File

@@ -1,167 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/statistics:
get:
tags: [ "statistics" ]
summary: "Get statistics"
operationId: "getStatistics"
responses:
"200":
description: "successful operation"
schema: { $ref: '#/definitions/Statistics' }
examples:
test:
unassigned: 0
open_tickets_per_user: { }
tickets_per_week: { "2021-39": 3 }
tickets_per_type: { "alert": 2, "incident": 1 }
security: [ { roles: [ "ticket:read" ] } ]
/dashboards:
get:
tags: [ "dashboards" ]
summary: "List dashboards"
operationId: "listDashboards"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/DashboardResponse" } }
examples:
test:
- id: simple
name: Simple
widgets:
- name: "open_tickets_per_user"
aggregation: "owner"
filter: 'status == "open"'
type: "bar"
width: 4
- name: "tickets_per_week"
aggregation: 'CONCAT(DATE_YEAR(created), "-", DATE_ISOWEEK(created) < 10 ? "0" : "", DATE_ISOWEEK(created))'
type: "line"
width: 8
security: [ { roles: [ "dashboard:read" ] } ]
post:
tags: [ "dashboards" ]
summary: "Create a new dashboard"
operationId: "createDashboard"
parameters:
- { name: "template", in: "body", description: "New template", required: true, schema: { $ref: "#/definitions/Dashboard" }, x-example: { name: "My Dashboard", widgets: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: "my-dashboard"
name: "My Dashboard"
widgets: []
security: [ { roles: [ "dashboard:write" ] } ]
/dashboards/{id}:
get:
tags: [ "dashboards" ]
summary: "Get a single dashboard"
operationId: "getDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: simple
name: Simple
widgets:
- name: "open_tickets_per_user"
aggregation: "owner"
filter: 'status == "open"'
type: "bar"
width: 4
- name: "tickets_per_week"
aggregation: 'CONCAT(DATE_YEAR(created), "-", DATE_ISOWEEK(created) < 10 ? "0" : "", DATE_ISOWEEK(created))'
type: "line"
width: 8
security: [ { roles: [ "dashboard:read" ] } ]
put:
tags: [ "dashboards" ]
summary: "Update an existing dashboard"
operationId: "updateDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
- { name: "dashboard", in: "body", description: "Dashboard object that needs to be added", required: true, schema: { $ref: "#/definitions/Dashboard" }, x-example: { name: "Simple", widgets: [] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/DashboardResponse" }
examples:
test:
id: simple
name: Simple
widgets: []
security: [ { roles: [ "dashboard:write" ] } ]
delete:
tags: [ "dashboards" ]
summary: "Delete a dashboard"
operationId: "deleteDashboard"
parameters:
- { name: "id", in: "path", description: "Dashboard ID", required: true, type: string, x-example: "simple" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "dashboard:write" ] } ]
/dashboard/data:
get:
tags: [ "dashboards" ]
summary: "Get widget data"
operationId: "dashboardData"
parameters:
- { name: "aggregation", in: "query", description: "Aggregation", required: true, type: string, x-example: "type" }
- { name: "filter", in: "query", description: "Filter", type: string, x-example: 'status == "closed"' }
responses:
"200":
description: "successful operation"
schema: { type: object }
examples:
test:
alert: 2
incident: 1
security: [ { roles: [ "dashboard:read" ] } ]
definitions:
Statistics:
type: object
required: [ unassigned, open_tickets_per_user, tickets_per_week, tickets_per_type ]
properties:
unassigned: { type: integer }
open_tickets_per_user: { type: object, additionalProperties: { type: integer } }
tickets_per_week: { type: object, additionalProperties: { type: integer } }
tickets_per_type: { type: object, additionalProperties: { type: integer } }
Dashboard:
type: object
required: [ name, widgets ]
properties:
name: { type: string }
widgets: { type: array, items: { $ref: "#/definitions/Widget" } }
DashboardResponse:
type: object
required: [ id, name, widgets ]
properties:
id: { type: string }
name: { type: string }
widgets: { type: array, items: { $ref: "#/definitions/Widget" } }
Widget:
type: object
required: [ name, type, aggregation, width ]
properties:
name: { type: string }
type: { type: string, enum: [ "bar", "line", "pie" ] }
filter: { type: string }
aggregation: { type: string }
width: { type: integer, minimum: 1, maximum: 12 }

View File

@@ -1,52 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/graph/{col}/{id}:
get:
tags: [ "graph" ]
summary: "Graph"
operationId: "graph"
parameters:
- { name: "col", in: "path", description: "Graph Start", required: true, type: string, x-example: "tickets" }
- { name: "id", in: "path", description: "Graph Start", required: true, type: string, x-example: "88" }
- { name: "depth", in: "query", description: "Graph Start", required: true, type: integer, x-example: 1 }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Graph" }
examples:
test:
nodes:
- { id: "artifacts/94d5cab6f5fe3422a447ab15436e7a672bc0c09a", name: "94d5cab6f5fe3422a447ab15436e7a672bc0c09a" }
- { id: "artifacts/http%3A%2F%2Fwww.customerviral.io%2Fscalable%2Fvertical%2Fkiller", name: "http://www.customerviral.io/scalable/vertical/killer" }
- { id: "artifacts/leadreintermediate.io", name: "leadreintermediate.io" }
- { id: "tickets/88", name: "live zebra" }
links:
- { id: "296239", sid: "tickets/88", tid: "artifacts/http%3A%2F%2Fwww.customerviral.io%2Fscalable%2Fvertical%2Fkiller" }
- { id: "296240", sid: "tickets/88", tid: "artifacts/leadreintermediate.io" }
- { id: "296242", sid: "tickets/88", tid: "artifacts/94d5cab6f5fe3422a447ab15436e7a672bc0c09a" }
security: [ { roles: [ "ticket:read" ] } ]
definitions:
Graph:
type: object
properties:
nodes: { type: array, items: { $ref: "#/definitions/Node" } }
links: { type: array, items: { $ref: "#/definitions/Link" } }
Node:
type: object
required: [ id, name ]
properties:
id: { type: string }
name: { type: string }
Link:
type: object
required: [ id, tid, sid ]
properties:
id: { type: string }
# name: { type: string }
tid: { type: string }
sid: { type: string }

View File

@@ -1,84 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/groups:
get:
tags: [ "groups" ]
summary: "List groups"
operationId: "listGroups"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/Group" } }
security: [ { roles: [ "group:read" ] } ]
post:
tags: [ "groups" ]
summary: "Create a new group"
operationId: "createGroup"
parameters:
- { name: "group", in: "body", description: "New group", required: true, schema: { $ref: "#/definitions/GroupForm" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/GroupResponse" }
security: [ { roles: [ "group:write" ] } ]
/groups/{id}:
get:
tags: [ "groups" ]
summary: "Get a single group"
operationId: "getGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/GroupResponse" }
security: [ { roles: [ "group:read" ] } ]
put:
tags: [ "groups" ]
summary: "Update an existing group"
operationId: "updateGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
- { name: "group", in: "body", description: "Group object that needs to be added", required: true, schema: { $ref: "#/definitions/Group" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Group" }
security: [ { roles: [ "group:write" ] } ]
delete:
tags: [ "groups" ]
summary: "Delete a group"
operationId: "deleteGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "group:write" ] } ]
definitions:
GroupForm:
type: object
required: [ name, users ]
properties:
id: { type: string }
name: { type: string }
users: { type: array, items: { type: string } }
Group:
type: object
required: [ name, users ]
properties:
name: { type: string }
users: { type: array, items: { type: string } }
GroupResponse:
type: object
required: [ id, name, users ]
properties:
id: { type: string }
name: { type: string }
users: { type: array, items: { type: string } }

View File

@@ -1,110 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/rules:
get:
tags: [ "rules" ]
summary: "List rules"
operationId: "listRules"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/RuleResponse" } }
examples:
test:
- id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:read" ] } ]
post:
tags: [ "rules" ]
summary: "Create a rule"
operationId: "createRule"
parameters:
- { name: "rule", in: "body", description: "New rule", required: true, schema: { $ref: "#/definitions/RuleForm" }, x-example: { name: "Ignore all Alerts", condition: "type == 'alert'", update: { "status": "closed" } } }
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/RuleResponse" } }
examples:
test:
id: ignore-all-alerts
name: Ignore all Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:write" ] } ]
/rules/{id}:
get:
tags: [ "rules" ]
summary: "Get a single rule"
operationId: "getRule"
parameters:
- { name: "id", in: "path", description: "Rule name", required: true, type: string, x-example: "ignore-alerts" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/RuleResponse" }
examples:
test:
id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:read" ] } ]
put:
tags: [ "rules" ]
summary: "Update an existing ticket rule"
operationId: "updateRule"
parameters:
- { name: "id", in: "path", description: "Rule ID", required: true, type: string, x-example: "ignore-alerts" }
- { name: "rule", in: "body", description: "Updated rule", required: true, schema: { $ref: "#/definitions/RuleForm" }, x-example: { name: "Ignore Alerts", condition: "type == 'alert'", update: { "status": "invalid" } } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/RuleResponse" }
examples:
test:
id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "invalid" }
security: [ { roles: [ "rule:write" ] } ]
delete:
tags: [ "rules" ]
summary: "Delete a rule"
operationId: "deleteRule"
parameters:
- { name: "id", in: "path", description: "Rule name", required: true, type: string, x-example: "ignore-alerts" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "rule:write" ] } ]
definitions:
RuleForm:
type: object
required: [ name, condition, update ]
properties:
id: { type: string }
name: { type: string }
condition: { type: string }
update: { type: object }
Rule:
type: object
required: [ name, condition, update ]
properties:
name: { type: string }
condition: { type: string }
update: { type: object }
RuleResponse:
type: object
required: [ id, name, condition, update ]
properties:
id: { type: string }
name: { type: string }
condition: { type: string }
update: { type: object }

View File

@@ -1,144 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/jobs:
get:
tags: [ "jobs" ]
summary: "List jobs"
operationId: "listJobs"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/JobResponse" } }
examples:
test:
- id: "b81c2366-ea37-43d2-b61b-03afdc21d985"
automation: "hash.sha1"
payload: "test"
status: "created"
security: [ { roles: [ "job:read" ] } ]
post:
tags: [ "jobs" ]
summary: "Start a new job"
operationId: "runJob"
parameters:
- { name: "job", in: "body", description: "New job", required: true, schema: { $ref: "#/definitions/JobForm" }, x-example: { automation: "hash.sha1", payload: "test" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "87390749-2125-4a87-91c5-da7e3f9bebf1", automation: "hash.sha1", payload: "test", status: "created" }
security: [ { roles: [ "job:write" ] } ]
/jobs/{id}:
get:
tags: [ "jobs" ]
summary: "Get a single job"
operationId: "getJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "b81c2366-ea37-43d2-b61b-03afdc21d985" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "b81c2366-ea37-43d2-b61b-03afdc21d985", automation: "hash.sha1", payload: "test", status: "created" }
security: [ { roles: [ "job:read" ] } ]
put:
tags: [ "jobs" ]
summary: "Update an existing job"
operationId: "updateJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "b81c2366-ea37-43d2-b61b-03afdc21d985" }
- { name: "job", in: "body", description: "Job object that needs to be added", required: true, schema: { $ref: "#/definitions/JobUpdate" }, x-example: { status: "failed", running: false } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "b81c2366-ea37-43d2-b61b-03afdc21d985", automation: "hash.sha1", payload: "test", status: "failed" }
security: [ { roles: [ "job:write" ] } ]
definitions:
Message:
type: object
properties:
payload: { }
secrets: { type: object, additionalProperties: { type: string } }
context: { $ref: "#/definitions/Context" }
Context:
type: object
properties:
artifact: { $ref: "#/definitions/Artifact" }
playbook: { $ref: "#/definitions/PlaybookResponse" }
task: { $ref: "#/definitions/TaskResponse" }
ticket: { $ref: "#/definitions/TicketResponse" }
Origin:
type: object
properties:
task_origin: { $ref: "#/definitions/TaskOrigin" }
artifact_origin: { $ref: "#/definitions/ArtifactOrigin" }
TaskOrigin:
type: object
required: [ ticket_id, playbook_id, task_id ]
properties:
ticket_id: { type: integer, format: int64 }
playbook_id: { type: string }
task_id: { type: string }
ArtifactOrigin:
type: object
required: [ ticket_id, artifact ]
properties:
ticket_id: { type: integer, format: int64 }
artifact: { type: string }
JobForm:
type: object
required: [ automation ]
properties:
automation: { type: string }
payload: { }
origin: { $ref: "#/definitions/Origin" }
JobUpdate:
type: object
required: [ running, status ]
properties:
container: { type: string }
running: { type: boolean }
status: { type: string }
log: { type: string }
output: { type: object }
Job:
type: object
required: [ automation, running, status ]
properties:
automation: { type: string }
container: { type: string }
payload: { }
running: { type: boolean }
status: { type: string }
log: { type: string }
output: { type: object }
origin: { $ref: "#/definitions/Origin" }
JobResponse:
type: object
required: [ id, automation, status ]
properties:
id: { type: string }
automation: { type: string }
container: { type: string }
status: { type: string }
payload: { }
log: { type: string }
output: { type: object }
origin: { $ref: "#/definitions/Origin" }

View File

@@ -1,31 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/logs/{reference}:
get:
tags: [ "logs" ]
summary: "Get log entries"
operationId: "getLogs"
parameters:
- { name: "reference", in: "path", description: "Reference", required: true, type: string, x-example: "tickets%2F294511" }
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/LogEntry" } }
examples:
test:
- { type: "manual", "created": "2021-12-12T12:12:12.000000012Z","creator": "bob","reference": "tickets/294511","message": "Fail run account resist lend solve incident centre priority temperature. Cause change distribution examine location technique shape partner milk customer. Rail tea plate soil report cook railway interpretation breath action. Exercise dream accept park conclusion addition shoot assistance may answer. Gold writer link stop combine hear power name commitment operation. Determine lifespan support grow degree henry exclude detail set religion. Direct library policy convention chain retain discover ride walk student. Gather proposal select march aspect play noise avoid encourage employ. Assessment preserve transport combine wish influence income guess run stand. Charge limit crime ignore statement foundation study issue stop claim." }
security: [ { roles: [ "log:read" ] } ]
definitions:
LogEntry:
type: object
required: [ type, reference, creator, created, message ]
properties:
type: { type: string }
reference: { type: string }
creator: { type: string }
created: { type: string, format: "date-time" }
message: { type: string }

View File

@@ -1,199 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/playbooks:
get:
tags: [ "playbooks" ]
summary: "List playbooks"
operationId: "listPlaybooks"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/PlaybookTemplateResponse" } }
examples:
test:
- id: phishing
name: Phishing
yaml: "name: Phishing\ntasks:\n board:\n name: Board Involvement?\n description: Is a board member involved?\n type: input\n schema:\n properties:\n boardInvolved:\n default: false\n title: A board member is involved.\n type: boolean\n required:\n - boardInvolved\n title: Board Involvement?\n type: object\n next:\n escalate: \"boardInvolved == true\"\n mail-available: \"boardInvolved == false\"\n\n escalate:\n name: Escalate to CISO\n description: Please escalate the task to the CISO\n type: task\n\n mail-available:\n name: Mail available\n type: input\n schema:\n oneOf:\n - properties:\n mail:\n title: Mail\n type: string\n x-display: textarea\n schemaKey:\n const: 'yes'\n type: string\n required:\n - mail\n title: 'Yes'\n - properties:\n schemaKey:\n const: 'no'\n type: string\n title: 'No'\n title: Mail available\n type: object\n next:\n block-sender: \"schemaKey == 'yes'\"\n extract-iocs: \"schemaKey == 'yes'\"\n search-email-gateway: \"schemaKey == 'no'\"\n\n search-email-gateway:\n name: Search email gateway\n description: Please search email-gateway for the phishing mail.\n type: task\n next:\n extract-iocs:\n\n block-sender:\n name: Block sender\n type: task\n next:\n extract-iocs:\n\n extract-iocs:\n name: Extract IOCs\n description: Please insert the IOCs\n type: input\n schema:\n properties:\n iocs:\n items:\n type: string\n title: IOCs\n type: array\n title: Extract IOCs\n type: object\n next:\n block-iocs:\n\n block-iocs:\n name: Block IOCs\n type: task\n"
- id: simple
name: Simple
yaml: "name: Simple\ntasks:\n input:\n name: Enter something to hash\n type: input\n schema:\n title: Something\n type: object\n properties:\n something:\n type: string\n title: Something\n default: \"\"\n next:\n hash: \"something != ''\"\n\n hash:\n name: Hash the something\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['something']\"\n next:\n comment: \"hash != ''\"\n\n comment:\n name: Comment the hash\n type: automation\n automation: comment\n payload:\n default: \"playbook.tasks['hash'].data['hash']\"\n next:\n done: \"done\"\n\n done:\n name: You can close this case now\n type: task\n"
security: [ { roles: [ "playbook:read" ] } ]
post:
tags: [ "playbooks" ]
summary: "Create a playbook"
operationId: "createPlaybook"
parameters:
- { name: "playbook", in: "body", description: "New playbook", required: true, schema: { $ref: "#/definitions/PlaybookTemplateForm" }, x-example: { yaml: "name: Simple2\ntasks:\n input:\n name: Upload malware if possible\n type: input\n schema:\n title: Malware\n type: object\n properties:\n malware:\n type: string\n title: Select malware\n default: \"\"\n next:\n hash: \"malware != ''\"\n\n hash:\n name: Hash the malware\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['malware']\"\n next:\n escalate:\n\n escalate:\n name: Escalate to malware team\n type: task\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple-2
name: Simple2
yaml: |
name: Simple2
tasks:
input:
name: Upload malware if possible
type: input
schema:
title: Malware
type: object
properties:
malware:
type: string
title: Select malware
default: ""
next:
hash: "malware != ''"
hash:
name: Hash the malware
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['malware']"
next:
escalate:
escalate:
name: Escalate to malware team
type: task
security: [ { roles: [ "playbook:write" ] } ]
/playbooks/{id}:
get:
tags: [ "playbooks" ]
summary: "Get a single playbook"
operationId: "getPlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook name", required: true, type: string, x-example: "simple" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple
name: Simple
yaml: |
name: Simple
tasks:
input:
name: Enter something to hash
type: input
schema:
title: Something
type: object
properties:
something:
type: string
title: Something
default: ""
next:
hash: "something != ''"
hash:
name: Hash the something
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['something']"
next:
comment: "hash != ''"
comment:
name: Comment the hash
type: automation
automation: comment
payload:
default: "playbook.tasks['hash'].data['hash']"
next:
done: "done"
done:
name: You can close this case now
type: task
security: [ { roles: [ "playbook:read" ] } ]
put:
tags: [ "playbooks" ]
summary: "Update an existing ticket playbook"
operationId: "updatePlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook ID", required: true, type: string, x-example: "simple" }
- { name: "playbook", in: "body", description: "Updated playbook", required: true, schema: { $ref: "#/definitions/PlaybookTemplateForm" }, x-example: { yaml: "name: Simple\ntasks:\n input:\n name: Upload malware if possible\n type: input\n schema:\n title: Malware\n type: object\n properties:\n malware:\n type: string\n title: Select malware\n default: \"\"\n next:\n hash: \"malware != ''\"\n\n hash:\n name: Hash the malware\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['malware']\"\n next:\n escalate:\n\n escalate:\n name: Escalate to malware team\n type: task\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple
name: Simple
yaml: |
name: Simple
tasks:
input:
name: Upload malware if possible
type: input
schema:
title: Malware
type: object
properties:
malware:
type: string
title: Select malware
default: ""
next:
hash: "malware != ''"
hash:
name: Hash the malware
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['malware']"
next:
escalate:
escalate:
name: Escalate to malware team
type: task
security: [ { roles: [ "playbook:write" ] } ]
delete:
tags: [ "playbooks" ]
summary: "Delete a playbook"
operationId: "deletePlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook name", required: true, type: string, x-example: "simple" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "playbook:write" ] } ]
definitions:
PlaybookTemplateForm:
type: object
required: [ yaml ]
properties:
id: { type: string }
yaml: { type: string }
PlaybookTemplate:
type: object
required: [ name, yaml ]
properties:
name: { type: string }
yaml: { type: string }
PlaybookTemplateResponse:
type: object
required: [ id, name, yaml ]
properties:
id: { type: string }
name: { type: string }
yaml: { type: string }

View File

@@ -1,89 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/settings:
get:
tags: [ "settings" ]
summary: "Get settings"
operationId: "getSettings"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/SettingsResponse" }
examples:
test:
version: "0.0.0-test"
tier: community
timeformat: "yyyy-MM-dd hh:mm:ss"
ticketTypes:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
artifactKinds:
- { icon: "mdi-server", id: "asset", name: "Asset" }
- { icon: "mdi-bullseye", id: "ioc", name: "IOC" }
artifactStates:
- { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" }
- { icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" }
- { icon: "mdi-check", id: "clean", name: "Clean", color: "success" }
security: [ { roles: [ "settings:read" ] } ]
post:
tags: [ "settings" ]
summary: "Save settings"
operationId: "saveSettings"
parameters:
- { name: "settings", in: "body", description: "Save settings", required: true, schema: { $ref: "#/definitions/Settings" }, x-example: { timeformat: "yyyy-MM-dd hh:mm:ss", artifactKinds: [ { icon: "mdi-server", id: "asset", name: "Asset" }, { icon: "mdi-bullseye", id: "ioc", name: "IOC" } ], artifactStates: [ { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" },{ icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" },{ icon: "mdi-check", id: "clean", name: "Clean", color: "success" } ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/SettingsResponse" }
examples:
test:
version: "0.0.0-test"
tier: community
timeformat: "yyyy-MM-dd hh:mm:ss"
ticketTypes:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
artifactKinds:
- { icon: "mdi-server", id: "asset", name: "Asset" }
- { icon: "mdi-bullseye", id: "ioc", name: "IOC" }
artifactStates:
- { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" }
- { icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" }
- { icon: "mdi-check", id: "clean", name: "Clean", color: "success" }
security: [ { roles: [ "settings:write" ] } ]
definitions:
Settings:
type: object
required: [ timeformat, artifactKinds, artifactStates ]
properties:
timeformat: { title: "Time Format", type: string }
artifactKinds: { title: "Artifact Kinds", type: array, items: { $ref: "#/definitions/Type" } }
artifactStates: { title: "Artifact States", type: array, items: { $ref: "#/definitions/Type" } }
SettingsResponse:
type: object
required: [ version, tier, timeformat, ticketTypes, artifactKinds, artifactStates ]
properties:
version: { title: "Version", type: string }
tier: { title: "Tier", type: string, enum: [ "community", "enterprise" ] }
timeformat: { title: "Time Format", type: string }
ticketTypes: { title: "Ticket Types", type: array, items: { $ref: "#/definitions/TicketTypeResponse" } }
artifactKinds: { title: "Artifact Kinds", type: array, items: { $ref: "#/definitions/Type" } }
artifactStates: { title: "Artifact States", type: array, items: { $ref: "#/definitions/Type" } }
roles: { title: "Roles", type: array, items: { type: string } }
Type:
type: object
required: [ id, name, icon ]
properties:
id: { title: ID, type: string, x-cols: 3, x-class: pr-2 }
name: { title: Name, type: string, x-cols: 3, x-class: pr-2 }
icon: { title: "Icon (https://materialdesignicons.com)", type: string, x-cols: 3, x-class: pr-2 }
color: { title: Color, type: string, x-cols: 3, enum: [ error, info, success, warning ] }

View File

@@ -1,18 +0,0 @@
swagger: "2.0"
info:
version: "0.0.3"
title: "Catalyst"
description: API for the catalyst incident response platform.
host: "."
basePath: "/api"
schemes:
# - "https"
- "http"
consumes: [ "application/json" ]
produces: [ "application/json" ]
paths: {}
definitions: {}

View File

@@ -1,82 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/tasks:
get:
tags: [ "tasks" ]
summary: "List tasks"
operationId: "listTasks"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TaskWithContext" } }
examples:
test: [ ]
security: [ { roles: [ "ticket:read" ] } ]
definitions:
Task:
type: object
required: [ name, type, done, created ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
TaskResponse:
type: object
required: [ name, type, done, created, order, active ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
# helper
order: { type: number, format: "int64", example: 2 }
active: { type: boolean, example: false }
TaskWithContext:
type: object
required: [ ticket_id, ticket_name, playbook_id, playbook_name, task_id, task ]
properties:
ticket_id: { type: number, format: "int64" }
ticket_name: { type: string }
playbook_id: { type: string }
playbook_name: { type: string }
task_id: { type: string }
task: { $ref: '#/definitions/TaskResponse' }

View File

@@ -1,104 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/templates:
get:
tags: [ "templates" ]
summary: "List templates"
operationId: "listTemplates"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TicketTemplateResponse" } }
examples:
test:
- id: default
name: Default
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Default\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:read" ] } ]
post:
tags: [ "templates" ]
summary: "Create a new template"
operationId: "createTemplate"
parameters:
- { name: "template", in: "body", description: "New template", required: true, schema: { $ref: "#/definitions/TicketTemplateForm" }, x-example: { name: "My Template", schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: "my-template"
name: "My Template"
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:write" ] } ]
/templates/{id}:
get:
tags: [ "templates" ]
summary: "Get a single template"
operationId: "getTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: default
name: Default
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Default\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:read" ] } ]
put:
tags: [ "templates" ]
summary: "Update an existing template"
operationId: "updateTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
- { name: "template", in: "body", description: "Template object that needs to be added", required: true, schema: { $ref: "#/definitions/TicketTemplateForm" }, x-example: { name: "My Template", schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: default
name: "My Template"
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:write" ] } ]
delete:
tags: [ "templates" ]
summary: "Delete a template"
operationId: "deleteTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "template:write" ] } ]
definitions:
TicketTemplateForm:
type: object
required: [ name, schema ]
properties:
id: { type: string }
name: { type: string }
schema: { type: string }
TicketTemplate:
type: object
required: [ name, schema ]
properties:
name: { type: string }
schema: { type: string }
TicketTemplateResponse:
type: object
required: [ id, name, schema ]
properties:
id: { type: string }
name: { type: string }
schema: { type: string }

File diff suppressed because it is too large Load Diff

View File

@@ -1,121 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/tickettypes:
get:
tags: [ "tickettypes" ]
summary: "List tickettypes"
operationId: "listTicketTypes"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TicketTypeResponse" } }
examples:
test:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
security: [ { roles: [ "tickettype:read" ] } ]
post:
tags: [ "tickettypes" ]
summary: "Create a new tickettype"
operationId: "createTicketType"
parameters:
- { name: "tickettype", in: "body", description: "New tickettype", required: true, schema: { $ref: "#/definitions/TicketTypeForm" }, x-example: { name: "TI Tickets", icon: "mdi-newspaper-variant-outline", default_template: "default", default_playbooks: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
id: "ti-tickets"
name: "TI Tickets"
icon: "mdi-newspaper-variant-outline"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:write" ] } ]
/tickettypes/{id}:
get:
tags: [ "tickettypes" ]
summary: "Get a single tickettype"
operationId: "getTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
icon: "mdi-alert"
id: "alert"
name: "Alerts"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:read" ] } ]
put:
tags: [ "tickettypes" ]
summary: "Update an existing tickettype"
operationId: "updateTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
- { name: "tickettype", in: "body", description: "TicketType object that needs to be added", required: true, schema: { $ref: "#/definitions/TicketTypeForm" }, x-example: { icon: "mdi-bell", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
icon: "mdi-bell"
id: "alert"
name: "Alerts"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:write" ] } ]
delete:
tags: [ "tickettypes" ]
summary: "Delete a tickettype"
operationId: "deleteTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "tickettype:write" ] } ]
definitions:
TicketTypeForm:
type: object
required: [ name, icon, default_template, default_playbooks ]
properties:
id: { type: string }
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }
TicketType:
type: object
required: [ name, icon, default_template, default_playbooks ]
properties:
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }
TicketTypeResponse:
type: object
required: [ id, name, icon, default_template, default_playbooks ]
properties:
id: { type: string }
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }

View File

@@ -1,94 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/currentuserdata:
get:
tags: [ "userdata" ]
summary: "Get current user data"
operationId: "currentUserData"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "currentuserdata:read" ] } ]
put:
tags: [ "userdata" ]
summary: "Update current user data"
operationId: "updateCurrentUserData"
parameters:
- { name: "userdata", in: "body", description: "User data object that needs to be added", required: true, schema: { $ref: "#/definitions/UserData" }, x-example: { name: "Bob Bad", email: "bob@example.org" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "currentuserdata:write" ] } ]
/userdata:
get:
tags: [ "userdata" ]
summary: "List userdata"
operationId: "listUserData"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/UserDataResponse" } }
examples:
test:
- { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "userdata:read" ] } ]
/userdata/{id}:
get:
tags: [ "userdata" ]
summary: "Get a single user data"
operationId: "getUserData"
parameters:
- { name: "id", in: "path", description: "User Data ID", required: true, type: string, x-example: "bob" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test:
id: bob
name: "Bob Bad"
email: "bob@example.org"
security: [ { roles: [ "userdata:read" ] } ]
put:
tags: [ "userdata" ]
summary: "Update an existing user data"
operationId: "updateUserData"
parameters:
- { name: "id", in: "path", description: "User Data ID", required: true, type: string, x-example: "bob" }
- { name: "userdata", in: "body", description: "User data object that needs to be added", required: true, schema: { $ref: "#/definitions/UserData" }, x-example: { name: "Bob Bad", email: "bob@example.org", blocked: false } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "userdata:write" ] } ]
definitions:
UserData:
type: object
properties:
name: { type: string, x-example: "Robert Smith" }
email: { type: string, x-example: "bob@example.org" }
image: { type: string, x-display: "custom-avatar" }
timeformat: { title: "Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)", type: string }
UserDataResponse:
type: object
required: [ id ]
properties:
id: { type: string }
name: { type: string, x-example: "Robert Smith" }
email: { type: string, x-example: "bob@example.org" }
image: { type: string, x-display: "custom-avatar" }
timeformat: { title: "Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)", type: string }

View File

@@ -1,122 +0,0 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/currentuser:
get:
tags: [ "users" ]
summary: "Get current user"
operationId: "currentUser"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: bob, roles: [ "admin" ], blocked: false, apikey: false }
security: [ { roles: [ "currentuser:read" ] } ]
/users:
get:
tags: [ "users" ]
summary: "List users"
operationId: "listUsers"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/UserResponse" } }
examples:
test:
- { id: bob, blocked: false, roles: [ "admin" ], apikey: false }
- { id: script, roles: [ "engineer" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
post:
tags: [ "users" ]
summary: "Create user"
operationId: "createUser"
parameters:
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst" ], blocked: false, apikey: true } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/NewUserResponse" }
examples:
test: { id: "syncscript", roles: [ "analyst" ], secret: "v39bOuobnlEljfWzjAgoKzhmnh1xSMxH", blocked: false }
security: [ { roles: [ "user:write" ] } ]
/users/{id}:
get:
tags: [ "users" ]
summary: "Get a single user"
operationId: "getUser"
parameters:
- { name: "id", in: "path", description: "user ID", required: true, type: string, x-example: "script" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: "script", roles: [ "engineer" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
put:
tags: [ "users" ]
summary: "Update user"
operationId: "updateUser"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "bob" }
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst", "admin" ], blocked: false, apikey: false } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test:
id: bob
roles: [ "analyst", "admin" ]
apikey: false
blocked: false
security: [ { roles: [ "user:write" ] } ]
delete:
tags: [ "users" ]
summary: "Delete user"
operationId: "deleteUser"
parameters:
- { name: "id", in: "path", description: "user ID", required: true, type: string, x-example: "script" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "user:write" ] } ]
definitions:
UserForm:
type: object
required: [ id, blocked, roles, apikey ]
properties:
id: { type: string }
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
User:
type: object
required: [ blocked, apikey, roles ]
properties:
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
sha256: { type: string } # for api keys
UserResponse:
type: object
required: [ id, blocked, roles, apikey ]
properties:
id: { type: string }
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
NewUserResponse:
type: object
required: [ id, blocked, roles ]
properties:
id: { type: string }
blocked: { type: boolean }
roles: { type: array, items: { type: string } }
secret: { type: string }

View File

@@ -1,42 +0,0 @@
---
server:
host: 0.0.0.0
port: 8082
default_redirection_url: "http://localhost/auth/callback"
log:
format: text
authentication_backend:
file:
path: /config/users_database.yml
access_control:
default_policy: one_factor
session:
domain: localhost
storage:
local:
path: /config/db.sqlite3
notifier:
filesystem:
filename: /config/notification.txt
identity_providers:
oidc:
cors:
# allowed_origins_from_client_redirect_uris: true
allowed_origins: [ "*" ]
clients:
- id: "catalyst"
description: API
secret: "secret"
public: false
authorization_policy: one_factor
scopes: [ openid, email, profile ]
redirect_uris:
- "http://localhost/auth/callback"
userinfo_signing_algorithm: none

View File

@@ -1,51 +0,0 @@
-----BEGIN RSA PRIVATE KEY-----
MIIJKQIBAAKCAgEA0OYfHDBvLOMQHjGb2bZGZh6j+nfWihAVWAycCR5ZOGwaRQuW
Z0iFzkDlsj0ENG65H5DkRB1mu93LXi7yzLs7Iu4mNQDtLH2EPN1HzgmwxIevL+Ud
6H6wCZQrZxp9bah/BKjChfURDK7gmUzY0F/sbi1upI4uVuwUrMsYroKBS4R4crHm
mqH1ACY8pih/d/4tpsGTs0ruLkFd5dGh/Vopcx7U4iPBxTL8SvkcUQ3TYqTjcKaV
Zb8DoiKaGmVRUFEqYtiBPykC0MNRDK8DZTuIbwZPQM0c3OWCGm2JPR6qQfgCacyY
JirsuN9gCMxXVCjLcaxCgWiftvhnr9YpdOYkX4ROY9qo4JNGN6Pd8Q5qX5GvSwa9
fdtMn0shCyv2MRgSq2SfZ5+DT3eUUrDQkNQoHOxxR9VPdYcPFDNLgAbydwnrlqwZ
X9rnaKuoRaJ6N4rOgJoaUR3LVXs8QrIGcJx9VRplm2NuElsDMm7fkAEEs2y/yAeW
qjkVnwITCKB9qoMSjpj8KpjHYdXWZqdTJWNLdKLK3iH/2lpp71utobSNLjdojifN
wmdQNzdVwAg56R7+YDDyCPEBsPO89s8b2E8SuYF2A9KRTD5Usd3k5aaMkaLrf4bu
igpW7MBWwE9HQjMSgX8HxHsBDl/TGhK/c/GsTBN9UlUD1MFi9yn5PmtL+lcCAwEA
AQKCAgBfm+NDOJu5lJbs6tGk3DruRDnSygRkHiZWrTbBKaW2ICwJy9rjnJq5IpS+
dhS1lrOd0efXkQlTFJkemyg/MbZIL21HNwxWH4BlhvV1RJQl7t/scNW2cj1JtQoE
X2hmxwaTcFXPj7Fk96kOINDe1fhVGPAZ2oU0/UodJ7s9K7fXg2LJQ8kCnvuOPPHn
LoPgvHHSb6iVF8dZXkuGguK8HQb5jHPsqGboRYSZH9io9EX6UPNTvLqF3d3g2Ctl
x1dnBi77HJJTAQcr2yj+QB+ytoOkJOQjCX2bUGOyis2ULrPIC6W3HI/KcDz2BC5k
+HlDqIQsm6W4qtTmnlx+auuAeFDOnr2lpPt4hVlyq3jkkSUqrcYs9NBsPdXz+80K
+NzRnv9EytHsliZJd4hUxwyh6y0BEo9QqgrpjlTFxYmRTGzZUu9qtcSCcVg9saZy
vvJxFuzs5zDBqL44pMbNF/NB6fDYgkDEUOdAO6QVl6lPc3sYJcyQ0JNR2cWwlCq7
EaamUJwTcCzUIT1ych/lYzVXqFF12urPavrqsaMbFHOQIRC9mjMgnm/eHZXB744e
nhe7YFL94fxvMD+Ekr+3tVFskVfYYUwuQnUOCwxprnZXQxSAiABhbANBg4LOzBqP
0yWSK7yrvjC0ktnt3q9NRtY8zRrZypric++zbhLddGdxydVCGQKCAQEA83f2WnZd
UdbP2XhDoY/hYoVm48mgn81neElxxwalom39BJ4meHGAeDq6XDOZtRn2tiiAh+HJ
JHU7twtHlHttkSjqP0a7zVCHbIostJZRLZRa1bAlu9hjynuHmMu3/7AcNJhYV1gH
cJTFo4w4EkFUT3zcuMrJpkWbJ+syDNa+x22Vx/YR0Wk7PPVE32lXzOYx4KYk8R8g
B7PvePJW3wPaWSC5sgPzmbpnbHTEP8pRN9N173IFF0S00wwV5Pea3ltOB5R2ALkZ
pkTo/ck5xmcEwEk8DZsybe/uE/gyBoSRhOEqCbgxb/qkq/guYaHo7lvjGRjhgF16
3orwBStzjZCjNQKCAQEA26anyawLGKgIix2eQAXO7GxPmQToWdp5JGZr9u7bnGrp
Q5qJBe4gx26WgDa17zonAD9YgE7Fv5WV/zjqiI06wNSDostz6OayUsn5tttFWlVv
QlBWBspQu8alhCq4OgxMfhxXEGQtWrqc3TlJMebEsiTlrqP7bnvpGwADVGuhUyQ7
t+L9oQ8SBgcJ+gGOc0P3GDPGni36itqxYNO/e1edkQAsdQh5TEsTsT1uNxPXOvv9
PMK0QhP/jECBjjQ8MAuMnYalQl0y8WqqQPgGKUHlx20Aydy9IBUTWpG2t0Gxcike
WncXomBJSEXppp8uNiz5gqKqyq4ODFZa14FlbZ5s2wKCAQEA6zmdxGzTYHxgOEXf
Ybq3EQ6+B5oIHBzBuQ+MY7PiV3pYmBuMI5XVf1OONgKMoNJC8F9VPvM/+H9jgEff
km9lvnd/Sj82tvj0vkMJSjhomdbZo9cZvdElKL4Mle1NCjXGKnJ993VPStAR2x9g
FRMVN+70+XzDMmfRrdGoe4sGq3sO2TC+qko3N3/oWMlYUNiem+MpkeR2d7q6xWmt
0K3SSYY6ouj6dC4KOljeOptnuL4PFZZdoMt6wOTOSneHIwmn697d23j6dQ/i4z+F
GFDz4CthX+vv3xOOO7Dx9CYkyfMZQGa7LOtGKfgQJ7fcal5QnTDSvciTK2uk1fnT
HJT/eQKCAQABbl2Lf9Z2q6malm/QhPkrKy19lr2Y0EaXyR8M9dNyenPYn+oiosGN
6xeJ8FFRJLTaWI9QDrNVIzld91X328u1M+1Do0W3D8G7rls1KMqT4xidev6Efs71
2j571PdsUWYyMCcVEUIGZE7fVh829wTzEDB66dCakK4dIevjoevkKclF0nHKmdmJ
NoSHH3l5IMk8XCIAJ9aJDxG2ysplmFWLe0/O12ZK6/ZN4lOUgkmVtcyZl78q6wxw
Mn9lmMuHmJuq7xSmkQri7cn8MGGB5U8E9J3bstd8nQaKQYbKPOBWGRR8jGgYA76W
fPYfwsu/SJ27jynDtkybIfjnjI1HoI+fAoIBAQCMrcRGmksweAgFs/fq24pYxV/K
oF98txTINrV7MJQamDfpzqlCML9Vo8WFk56BRGj9NqOU+kgvi3aTfRhN5kSeE6y/
Kb/u89dVYhKpzr2zAy9/msup8yPKD9sT/c7S3DJRlNp5DXy48MSntz0+k710SaYe
1GXOwfch8WwkkVpgWY/08WRuNbRbD7Jmkqjz/PtcirkewljpWn+05kUSqAgEyBfy
kCahABRtdH0FAMQjzqb5kS/g0K4BEje9ie81wvtd3r2b89WSgBDFCno/Oq8hKEC0
DP828OmbLWSiRvnYCcKxVaWnDvsgFTALySB89dQpTGEWFoHC1XbNJgnwg/9l
-----END RSA PRIVATE KEY-----

View File

@@ -1,13 +0,0 @@
users:
alice@example.com:
displayname: Alice
password: "$argon2id$v=19$m=65536,t=3,p=4$S3hTSS90U1QycjNEWURZTw$aJP1fI/byC/3A7NCz5lyrXR7NS+l+1YMnqj5qFopZRk"
email: alice@example.com
bob@example.com:
displayname: "Bob"
password: "$argon2id$v=19$m=65536,t=3,p=4$amxRcURFVUk4TlhPOXFmWg$sPRsvGg9rrqefRp0fFA7wQG3O8OcMnQhj4IckHYPEz8"
email: bob@example.com
admin@example.com:
displayname: "Admin"
password: "$argon2id$v=19$m=65536,t=3,p=4$SFBXa1BXblNZKytoZ1ZLYQ$JruWROu9opYmcPNw1cIiHms4k4466DqrKIPvJe94nfA"
email: admin@example.com

View File

@@ -1,40 +0,0 @@
version: '2.4'
services:
nginx:
image: nginx:1.25
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
ports: [ "80:80", "8529:8529", "9000:9000", "8082:8082", "9003:9003" ]
networks: [ catalyst ]
arangodb:
image: arangodb/arangodb:3.8.1
environment:
ARANGO_ROOT_PASSWORD: foobar
networks: [ catalyst ]
minio:
image: minio/minio:RELEASE.2021-12-10T23-03-39Z
environment:
MINIO_ROOT_USER: minio
MINIO_ROOT_PASSWORD: minio123
command: server /data -console-address ":9003"
networks: [ catalyst ]
authelia:
image: authelia/authelia:4
environment:
AUTHELIA_JWT_SECRET: "AUTHELIA_JWT_SECRET"
AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE: "/config/private.pem"
AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET: "AUTHELIA_HMAC_SECRET"
AUTHELIA_STORAGE_ENCRYPTION_KEY: "AUTHELIA_STORAGE_ENCRYPTION_KEY"
AUTHELIA_SESSION_SECRET: "AUTHELIA_SESSION_SECRET"
volumes:
- ./authelia/configuration.yml:/config/configuration.yml
- ./authelia/users_database.yml:/config/users_database.yml
- ./authelia/private.pem:/config/private.pem
networks: [ catalyst ]
networks:
catalyst:
name: catalyst

View File

@@ -1,117 +0,0 @@
user www-data;
worker_processes 5;
error_log /var/log/nginx/error.log;
events {
worker_connections 4096;
}
http {
include mime.types;
index index.html index.htm;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
server {
listen 80 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
}
location /wss {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
}
server {
listen 8529 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_arangodb arangodb;
proxy_pass http://$upstream_arangodb:8529;
}
}
server {
listen 9000 default_server;
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9000;
}
}
server {
listen 8082 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_authelia authelia;
proxy_pass http://$upstream_authelia:8082;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_set_header X-Forwarded-Uri $request_uri;
proxy_set_header X-Forwarded-Ssl on;
proxy_http_version 1.1;
proxy_set_header Connection "";
proxy_cache_bypass $cookie_session;
proxy_no_cache $cookie_session;
proxy_buffers 64 256k;
}
}
server {
listen 9003 default_server;
server_name _;
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
# Default is HTTP/1, keepalive is only enabled in HTTP/1.1
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9003;
}
}
}

View File

@@ -1,24 +0,0 @@
#!/bin/bash
set -e
export SECRET=4ef5b29539b70233dd40c02a1799d25079595565e05a193b09da2c3e60ada1cd
export SIMPLE_AUTH_ENABLE=false
export OIDC_ENABLE=true
export OIDC_ISSUER=http://localhost:8082
export OIDC_CLIENT_SECRET=secret
export ARANGO_DB_HOST=http://localhost:8529
export ARANGO_DB_PASSWORD=foobar
export S3_HOST=http://localhost:9000
export S3_REGION=eu-central-1
export S3_PASSWORD=minio123
export AUTH_BLOCK_NEW=false
export AUTH_DEFAULT_ROLES=analyst,admin
export EXTERNAL_ADDRESS=http://localhost
export CATALYST_ADDRESS=http://host.docker.internal
export INITIAL_API_KEY=d0169af94c40981eb4452a42fae536b6caa9be3a
go run ../cmd/catalyst-dev/*.go

Binary file not shown.

Before

Width:  |  Height:  |  Size: 248 KiB

Some files were not shown because too many files have changed in this diff Show More