Release catalyst

This commit is contained in:
Jonas Plum
2021-12-13 00:39:15 +01:00
commit 15cf0ebd49
339 changed files with 111677 additions and 0 deletions

93
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,93 @@
name: CI
on:
push: { branches: [ main ] }
pull_request:
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}
jobs:
test:
name: Test
runs-on: ubuntu-latest
env: { GIN_MODE: test }
steps:
- uses: actions/setup-go@v2
with: { go-version: '1.17' }
- uses: actions/setup-node@v2
with: { node-version: '14' }
- uses: actions/checkout@v2
- run: |
mkdir -p ui/dist/img
touch ui/dist/index.html ui/dist/favicon.ico ui/dist/manifest.json ui/dist/img/fake.png
- run: docker-compose up -d
working-directory: dev
- name: Install ArangoDB
run: |
curl -OL https://download.arangodb.com/arangodb34/DEBIAN/Release.key
sudo apt-key add Release.key
sudo apt-add-repository 'deb https://download.arangodb.com/arangodb34/DEBIAN/ /'
sudo apt-get update -y && sudo apt-get -y install arangodb3
- run: go test -coverprofile=cover.out -coverpkg=./... ./...
- run: go tool cover -func=cover.out
build-npm:
name: Build npm
runs-on: ubuntu-latest
steps:
- uses: actions/setup-node@v2
with: { node-version: '14' }
- uses: actions/checkout@v2
- run: yarn install && yarn build
working-directory: ui
- uses: actions/upload-artifact@v2
with: { name: ui, path: ui/dist, retention-days: 1 }
build:
name: Build
runs-on: ubuntu-latest
needs: [ build-npm, test ]
steps:
- uses: actions/setup-go@v2
with: { go-version: '1.17' }
- uses: actions/checkout@v2
- uses: actions/download-artifact@v2
with: { name: ui, path: ui/dist }
- run: go build -o catalyst ./cmd/catalyst/.
- uses: docker/login-action@v1
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Version
if: ${{ github.ref != '' }}
run: |
echo ${{ github.ref }}
echo ${{ github.ref }} > VERSION
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v3
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- uses: docker/build-push-action@v2
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
# deploy:
# name: Deploy
# runs-on: self-hosted
# needs: [ build ]
# steps:
# - uses: actions/checkout@v2
# - uses: docker/login-action@v1
# with:
# registry: ${{ env.REGISTRY }}
# username: ${{ github.actor }}
# password: ${{ secrets.GITHUB_TOKEN }}
# - run: docker-compose -f docker-compose.yml -f docker-compose.demo.yml pull
# - run: docker-compose -f docker-compose.yml -f docker-compose.demo.yml up -d --remove-orphans --force-recreate
# - run: docker-compose -f docker-compose.yml -f docker-compose.demo.yml restart

86
.gitignore vendored Normal file
View File

@@ -0,0 +1,86 @@
.idea
.antlr
.DS_Store
uploads
gen
*.bleve
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
env/
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*,cover
.hypothesis/
venv/
.venv/
.python-version
.pytest_cache
# Translations
*.mo
*.pot
# Django stuff:
*.log
# Sphinx documentation
docs/_build/
# PyBuilder
target/
#Ipython Notebook
.ipynb_checkpoints
# npm
wwwroot/*.js
typings
dist
node_modules
profile.cov
generated/caql/parser/*.interp
generated/caql/parser/*.tokens

12
Dockerfile Normal file
View File

@@ -0,0 +1,12 @@
FROM ubuntu:18.04
RUN apt-get update -y && apt-get -y install curl gnupg2 software-properties-common
RUN curl -OL https://download.arangodb.com/arangodb34/DEBIAN/Release.key
RUN apt-key add Release.key
RUN apt-add-repository 'deb https://download.arangodb.com/arangodb34/DEBIAN/ /'
RUN apt-get update -y && apt-get -y install arangodb3
COPY catalyst /app/catalyst
CMD /app/catalyst
EXPOSE 8000

104
LICENSE.md Normal file
View File

@@ -0,0 +1,104 @@
Copyright (c) 2021-present Jonas Plum
Portions of this software are licensed as follows:
* All third party components incorporated into Catalyst are licensed under the
original license provided by the owner of the applicable component. Those
files contain a license notice on top of the file and are listed in the
[NOTICE](NOTICE) file.
* Content outside of the above mentioned files above is
available under the "Elastic License 2.0" license as defined below.
# Elastic License 2.0
URL: https://www.elastic.co/licensing/elastic-license
## Acceptance
By using the software, you agree to all of the terms and conditions below.
## Copyright License
The licensor grants you a non-exclusive, royalty-free, worldwide,
non-sublicensable, non-transferable license to use, copy, distribute, make
available, and prepare derivative works of the software, in each case subject to
the limitations and conditions below.
## Limitations
You may not provide the software to third parties as a hosted or managed
service, where the service provides users with access to any substantial set of
the features or functionality of the software.
You may not move, change, disable, or circumvent the license key functionality
in the software, and you may not remove or obscure any functionality in the
software that is protected by the license key.
You may not alter, remove, or obscure any licensing, copyright, or other notices
of the licensor in the software. Any use of the licensors trademarks is subject
to applicable law.
## Patents
The licensor grants you a license, under any patent claims the licensor can
license, or becomes able to license, to make, have made, use, sell, offer for
sale, import and have imported the software, in each case subject to the
limitations and conditions in this license. This license does not cover any
patent claims that you cause to be infringed by modifications or additions to
the software. If you or your company make any written claim that the software
infringes or contributes to infringement of any patent, your patent license for
the software granted under these terms ends immediately. If your company makes
such a claim, your patent license ends immediately for work on behalf of your
company.
## Notices
You must ensure that anyone who gets a copy of any part of the software from you
also gets a copy of these terms.
If you modify the software, you must include in any modified copies of the
software prominent notices stating that you have modified the software.
## No Other Rights
These terms do not imply any licenses other than those expressly granted in
these terms.
## Termination
If you use the software in violation of these terms, such use is not licensed,
and your licenses will automatically terminate. If the licensor provides you
with a notice of your violation, and you cease all violation of this license no
later than 30 days after you receive that notice, your licenses will be
reinstated retroactively. However, if you violate these terms after such
reinstatement, any additional violation of these terms will cause your licenses
to terminate automatically and permanently.
## No Liability
*As far as the law allows, the software comes as is, without any warranty or
condition, and the licensor will not be liable to you for any damages arising
out of these terms or the use or nature of the software, under any kind of
legal claim.*
## Definitions
The **licensor** is the entity offering these terms, and the **software** is the
software the licensor makes available under these terms, including any portion
of it.
**you** refers to the individual or entity agreeing to these terms.
**your company** is any legal entity, sole proprietorship, or other kind of
organization that you work for, plus all organizations that have control over,
are under the control of, or are under common control with that
organization. **control** means ownership of substantially all the assets of an
entity, or the power to direct its management and policies by vote, contract, or
otherwise. Control can be direct or indirect.
**your licenses** are all the licenses granted to you for the software under
these terms.
**use** means anything you do with the software requiring one of your licenses.
**trademark** means trademarks, service marks, and similar rights.

16
NOTICE Normal file
View File

@@ -0,0 +1,16 @@
The following components are included in this product:
Badgerodon Collections
https://github.com/badgerodon/collections
Copyright (c) 2012 Caleb Doxsey
Licensed under the MIT License
go-toposort
https://github.com/philopon/go-toposort
Copyright (c) 2017 Hirotomo Moriwaki
Licensed under the MIT License
The Go programming language
https://go.dev/
Copyright (c) 2009 The Go Authors
See https://go.dev/LICENSE for license details.

78
README.md Normal file
View File

@@ -0,0 +1,78 @@
<h1 align="center">
<img width="30" alt="Screenshot of the playbook part of a ticket" src="ui/public/flask_white.svg" />
Catalyst</h1>
<h3 align="center">Speed up your reactions</h3>
<h4 align="center">
<a href="https://catalyst-soar.com">Website</a>
-
<a href="https://catalyst-soar.com/docs/category/catalyst-handbook">The Catalyst Handbook (Documentation)</a>
-
<a href="https://try.catalyst-soar.com">Try online</a> (user: bob, password: bob)
</h4>
Catalyst is an incident response platform or SOAR (Security Orchestration, Automation and Response) system. It can help
you to automate your alert handling and incident response procedures.
## Features
### Ticket (Alert & Incident) Management
![Screenshot of a ticket](docs/screenshots/ticket.png)
Tickets are the core of Catalyst. They represent alerts, incidents, forensics
investigations, threat hunts or any other event you want to handle in your
organisation.
## Ticket Templates
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/details.png" />
</center>
Templates define the custom information for tickets. The core information for
tickets like title, creation date or closing status is kept quite minimal and other
information like criticality, description or MITRE ATT&CK information can be
added individually.
## Conditional Custom Fields
<center>
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/conditional_custom_field_a.png" />
<img width="400" alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/conditional_custom_field_b.png" />
</center>
Custom Fields can be dependent on each other. So if you, for example choose
"malware" as an incident type a custom field ask you to define it further as
ransomware, worm, etc. which a "phishing" incident would ask for the number
of received mails in that campaign.
## Playbooks
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/phishing_playbook.png" />
</center>
Playbooks represent processes that can be attached to tickets. Playbooks can
contain manual and automated tasks. Complex workflows with different workflow
branches, parallel tasks and task dependencies can be modeled.
## Automations
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/script.png" />
</center>
Automations are scripts that automate tasks or enrich artifacts. Automations are
run in their own Docker containers. This enables them to be created in different
scripting languages and run securely in their own environment.
## Users
<center>
<img alt="Screenshot of the playbook part of a ticket" src="docs/screenshots/roles.png" />
</center>
Catalyst has two different types of users, normal users accessing the platform
via OIDC authentication and API keys for external script. A
fine-grained access model is available for both types and allows to define
possible actions for each user.

1
VERSION Normal file
View File

@@ -0,0 +1 @@
0.0.0-dev

403
auth.go Normal file
View File

@@ -0,0 +1,403 @@
package catalyst
import (
"context"
"crypto/sha256"
"encoding/base64"
"fmt"
"log"
"math/rand"
"net/http"
"strings"
"time"
"github.com/coreos/go-oidc/v3/oidc"
"github.com/gin-contrib/sessions"
"github.com/gin-gonic/gin"
"golang.org/x/oauth2"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/role"
)
type AuthConfig struct {
OIDCIssuer string
OAuth2 *oauth2.Config
OIDCClaimUsername string
OIDCClaimEmail string
// OIDCClaimGroups string
OIDCClaimName string
AuthBlockNew bool
AuthDefaultRoles []role.Role
provider *oidc.Provider
}
func (c *AuthConfig) Verifier(ctx context.Context) (*oidc.IDTokenVerifier, error) {
if c.provider == nil {
err := c.Load(ctx)
if err != nil {
return nil, err
}
}
return c.provider.Verifier(&oidc.Config{SkipClientIDCheck: true}), nil
}
func (c *AuthConfig) Load(ctx context.Context) error {
provider, err := oidc.NewProvider(ctx, c.OIDCIssuer)
if err != nil {
return err
}
c.provider = provider
c.OAuth2.Endpoint = provider.Endpoint()
return nil
}
const (
SessionName = "catalyst-session"
stateSession = "state"
userSession = "user"
)
func Authenticate(db *database.Database, config *AuthConfig) gin.HandlerFunc {
return func(ctx *gin.Context) {
iss := config.OIDCIssuer
keyHeader := ctx.Request.Header.Get("PRIVATE-TOKEN")
if keyHeader != "" {
keyAuth(db, keyHeader)(ctx)
return
}
authHeader := ctx.Request.Header.Get("User")
if authHeader != "" {
bearerAuth(db, authHeader, iss, config)(ctx)
return
}
sessionAuth(db, config)(ctx)
}
}
func oidcCtx(ctx *gin.Context) (context.Context, context.CancelFunc) {
/*
if config.TLSCertFile != "" && config.TLSKeyFile != "" {
cert, err := tls.LoadX509KeyPair(config.TLSCertFile, config.TLSKeyFile)
if err != nil {
return nil, err
}
rootCAs, _ := x509.SystemCertPool()
if rootCAs == nil {
rootCAs = x509.NewCertPool()
}
for _, c := range cert.Certificate {
rootCAs.AppendCertsFromPEM(c)
}
return oidc.ClientContext(ctx, &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{
RootCAs: rootCAs,
InsecureSkipVerify: true,
},
},
}), nil
}
*/
cctx, cancel := context.WithTimeout(ctx, time.Minute)
return cctx, cancel
}
func bearerAuth(db *database.Database, authHeader string, iss string, config *AuthConfig) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
if !strings.HasPrefix(authHeader, "Bearer ") {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "no bearer token"})
return
}
oidcCtx, cancel := oidcCtx(ctx)
defer cancel()
verifier, err := config.Verifier(oidcCtx)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "could not verify: " + err.Error()})
return
}
authToken, err := verifier.Verify(oidcCtx, authHeader[7:])
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not verify bearer token: %v", err)})
return
}
var claims map[string]interface{}
if err := authToken.Claims(&claims); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("failed to parse claims: %v", err)})
return
}
// if claims.Iss != iss {
// ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "wrong issuer"})
// return
// }
session := sessions.Default(ctx)
session.Set(userSession, claims)
if err = session.Save(); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, fmt.Sprintf("could not set session: %v", err))
return
}
if err = setContextClaims(ctx, db, claims, config); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not load user: %s", err)})
return
}
ctx.Next()
}
}
func keyAuth(db *database.Database, keyHeader string) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
h := fmt.Sprintf("%x", sha256.Sum256([]byte(keyHeader)))
key, err := db.UserByHash(ctx, h)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not verify private token: %v", err)})
return
}
setContextUser(ctx, key, db.Hooks)
ctx.Next()
}
}
func sessionAuth(db *database.Database, config *AuthConfig) func(ctx *gin.Context) {
return func(ctx *gin.Context) {
session := sessions.Default(ctx)
user := session.Get(userSession)
if user == nil {
redirectToLogin(ctx, session, config.OAuth2)
return
}
claims, ok := user.(map[string]interface{})
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "claims not in session"})
return
}
if err := setContextClaims(ctx, db, claims, config); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not load user: %s", err)})
return
}
ctx.Next()
}
}
func setContextClaims(ctx *gin.Context, db *database.Database, claims map[string]interface{}, config *AuthConfig) error {
newUser, newSetting, err := mapUserAndSettings(claims, config)
if err != nil {
return err
}
if _, ok := busdb.UserFromContext(ctx); !ok {
busdb.SetContext(ctx, &models.UserResponse{ID: "auth", Roles: []string{role.Admin}, Apikey: false, Blocked: false})
}
user, err := db.UserGetOrCreate(ctx, newUser)
if err != nil {
return err
}
_, err = db.UserDataGetOrCreate(ctx, newUser.ID, newSetting)
if err != nil {
return err
}
setContextUser(ctx, user, db.Hooks)
return nil
}
func setContextUser(ctx *gin.Context, user *models.UserResponse, hooks *hooks.Hooks) {
groups, err := hooks.GetGroups(ctx, user.ID)
if err == nil {
busdb.SetGroupContext(ctx, groups)
}
busdb.SetContext(ctx, user)
}
func mapUserAndSettings(claims map[string]interface{}, config *AuthConfig) (*models.UserForm, *models.UserData, error) {
// handle Bearer tokens
// if typ, ok := claims["typ"]; ok && typ == "Bearer" {
// return &models.User{
// Username: "bot",
// Blocked: false,
// Email: pointer.String("bot@example.org"),
// Roles: []string{"user:read", "settings:read", "ticket", "backup:read", "backup:restore"},
// Name: pointer.String("Bot"),
// }, nil
// }
username, err := getString(claims, config.OIDCClaimUsername)
if err != nil {
return nil, nil, err
}
email, err := getString(claims, config.OIDCClaimEmail)
if err != nil {
email = ""
}
name, err := getString(claims, config.OIDCClaimName)
if err != nil {
name = ""
}
return &models.UserForm{
ID: username,
Blocked: config.AuthBlockNew,
Roles: role.Strings(config.AuthDefaultRoles),
}, &models.UserData{
Email: &email,
Name: &name,
}, nil
}
func getString(m map[string]interface{}, key string) (string, error) {
if v, ok := m[key]; ok {
if s, ok := v.(string); ok {
return s, nil
}
return "", fmt.Errorf("mapping of %s failed, wrong type (%T)", key, v)
}
return "", fmt.Errorf("mapping of %s failed, missing value", key)
}
func redirectToLogin(ctx *gin.Context, session sessions.Session, oauth2Config *oauth2.Config) {
state, err := state()
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "generating state failed"})
return
}
session.Set(stateSession, state)
err = session.Save()
if err != nil {
log.Println(err)
}
ctx.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
log.Println("abort", ctx.Request.URL.String())
ctx.Abort()
}
func AuthorizeBlockedUser(ctx *gin.Context) {
user, ok := busdb.UserFromContext(ctx)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "no user in context"})
return
}
if user.Blocked {
ctx.AbortWithStatusJSON(http.StatusForbidden, gin.H{"error": "user is blocked"})
return
}
ctx.Next()
}
func AuthorizeRole(roles []role.Role) gin.HandlerFunc {
return func(ctx *gin.Context) {
user, ok := busdb.UserFromContext(ctx)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "no user in context"})
return
}
if !role.UserHasRoles(user, roles) {
ctx.AbortWithStatusJSON(http.StatusForbidden, gin.H{"error": fmt.Sprintf("missing role %s has %s", roles, user.Roles)})
return
}
ctx.Next()
}
}
func callback(config *AuthConfig) gin.HandlerFunc {
return func(ctx *gin.Context) {
session := sessions.Default(ctx)
state := session.Get(stateSession)
if state == "" {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "state missing"})
return
}
if state != ctx.Query("state") {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "state mismatch"})
return
}
oauth2Token, err := config.OAuth2.Exchange(ctx, ctx.Query("code"))
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": gin.H{"error": fmt.Sprintf("oauth2 exchange failed: %s", err)}})
return
}
// Extract the ID Token from OAuth2 token.
rawIDToken, ok := oauth2Token.Extra("id_token").(string)
if !ok {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "missing id token"})
return
}
oidcCtx, cancel := oidcCtx(ctx)
defer cancel()
verifier, err := config.Verifier(oidcCtx)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusUnauthorized, gin.H{"error": "could not verify: " + err.Error()})
return
}
// Parse and verify ID Token payload.
idToken, err := verifier.Verify(oidcCtx, rawIDToken)
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "token verification failed: " + err.Error()})
return
}
// Extract custom claims
var claims map[string]interface{}
if err := idToken.Claims(&claims); err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": "claim extraction failed"})
return
}
session.Set(userSession, claims)
err = session.Save()
if err != nil {
ctx.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": fmt.Sprintf("could not save session %s", err)})
return
}
ctx.Redirect(http.StatusFound, "/")
}
}
func state() (string, error) {
rnd := make([]byte, 32)
if _, err := rand.Read(rnd); err != nil {
return "", err
}
return base64.URLEncoding.EncodeToString(rnd), nil
}

26
automation/automation.go Normal file
View File

@@ -0,0 +1,26 @@
package automation
import (
"context"
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/role"
)
func New(apiurl, apikey string, bus *bus.Bus, db *database.Database) error {
if err := jobAutomation(jobContext(), apiurl, apikey, bus, db); err != nil {
log.Fatal(err)
}
return resultAutomation(bus, db)
}
func jobContext() context.Context {
// TODO: change roles?
bot := &models.UserResponse{ID: "bot", Roles: []string{role.Admin}}
return busdb.UserContext(context.Background(), bot)
}

186
automation/docker.go Normal file
View File

@@ -0,0 +1,186 @@
package automation
import (
"archive/tar"
"bufio"
"bytes"
"context"
"fmt"
"io"
"log"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/client"
"github.com/docker/docker/pkg/stdcopy"
"github.com/SecurityBrewery/catalyst/database"
)
func createContainer(ctx context.Context, image, script, data string) (string, string, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return "", "", err
}
logs, err := pullImage(ctx, cli, image)
if err != nil {
return "", logs, err
}
config := &container.Config{
Image: image, Cmd: []string{"/script", data}, WorkingDir: "/home",
AttachStderr: true, AttachStdout: true,
}
resp, err := cli.ContainerCreate(ctx, config, nil, nil, "")
if err != nil {
return "", logs, err
}
if err := copyFile(ctx, cli, "/script", script, resp.ID); err != nil {
return "", logs, err
}
return resp.ID, logs, nil
}
func pullImage(ctx context.Context, cli *client.Client, image string) (string, error) {
reader, err := cli.ImagePull(ctx, image, types.ImagePullOptions{})
if err != nil {
return "", err
}
defer reader.Close()
buf := &bytes.Buffer{}
_, err = io.Copy(buf, reader)
return buf.String(), err
}
func copyFile(ctx context.Context, cli *client.Client, path string, contentString string, id string) error {
tarBuf := &bytes.Buffer{}
tw := tar.NewWriter(tarBuf)
if err := tw.WriteHeader(&tar.Header{Name: path, Mode: 0755, Size: int64(len(contentString))}); err != nil {
return err
}
if _, err := tw.Write([]byte(contentString)); err != nil {
return err
}
if err := tw.Close(); err != nil {
return err
}
if err := cli.CopyToContainer(ctx, id, "/", tarBuf, types.CopyToContainerOptions{}); err != nil {
return err
}
return nil
}
func runDocker(ctx context.Context, jobID, containerID string, db *database.Database) (stdout []byte, stderr []byte, err error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return nil, nil, err
}
defer cli.ContainerRemove(ctx, containerID, types.ContainerRemoveOptions{Force: true})
if err := cli.ContainerStart(ctx, containerID, types.ContainerStartOptions{}); err != nil {
return nil, nil, err
}
stderrBuf, err := streamStdErr(ctx, cli, jobID, containerID, db)
if err != nil {
return nil, nil, err
}
if err := waitForContainer(ctx, cli, containerID, stderrBuf); err != nil {
return nil, nil, err
}
output, err := getStdOut(ctx, cli, containerID)
if err != nil {
log.Println(err)
}
return output.Bytes(), stderrBuf.Bytes(), nil
}
func streamStdErr(ctx context.Context, cli *client.Client, jobID, containerID string, db *database.Database) (*bytes.Buffer, error) {
stderrBuf := &bytes.Buffer{}
containerLogs, err := cli.ContainerLogs(ctx, containerID, types.ContainerLogsOptions{ShowStderr: true, Follow: true})
if err != nil {
return nil, err
}
go func() {
err := scanLines(ctx, jobID, containerLogs, stderrBuf, db)
if err != nil {
log.Println(err)
return
}
if err := containerLogs.Close(); err != nil {
log.Println(err)
return
}
}()
return stderrBuf, nil
}
func scanLines(ctx context.Context, jobID string, input io.ReadCloser, output io.Writer, db *database.Database) error {
r, w := io.Pipe()
go func() {
_, err := stdcopy.StdCopy(w, w, input)
if err != nil {
log.Println(err)
return
}
if err := w.Close(); err != nil {
log.Println(err)
return
}
}()
s := bufio.NewScanner(r)
for s.Scan() {
b := s.Bytes()
output.Write(b)
output.Write([]byte("\n"))
if err := db.JobLogAppend(ctx, jobID, string(b)+"\n"); err != nil {
log.Println(err)
continue
}
}
return s.Err()
}
func waitForContainer(ctx context.Context, cli *client.Client, containerID string, stderrBuf *bytes.Buffer) error {
statusCh, errCh := cli.ContainerWait(ctx, containerID, container.WaitConditionNotRunning)
select {
case err := <-errCh:
if err != nil {
return err
}
case exitStatus := <-statusCh:
if exitStatus.StatusCode != 0 {
return fmt.Errorf("container returned status code %d: stderr: %s", exitStatus.StatusCode, stderrBuf.String())
}
}
return nil
}
func getStdOut(ctx context.Context, cli *client.Client, containerID string) (*bytes.Buffer, error) {
output := &bytes.Buffer{}
containerLogs, err := cli.ContainerLogs(ctx, containerID, types.ContainerLogsOptions{ShowStdout: true, Follow: true})
if err != nil {
return nil, err
}
defer containerLogs.Close()
_, err = stdcopy.StdCopy(output, output, containerLogs)
if err != nil {
return nil, err
}
return output, nil
}

116
automation/job.go Normal file
View File

@@ -0,0 +1,116 @@
package automation
import (
"encoding/json"
"fmt"
"log"
"golang.org/x/net/context"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func jobAutomation(ctx context.Context, apiurl, apikey string, catalystBus *bus.Bus, db *database.Database) error {
return catalystBus.SubscribeJob(func(automationMsg *bus.JobMsg) {
job, err := db.JobCreate(ctx, automationMsg.ID, &models.JobForm{
Automation: automationMsg.Automation,
Payload: automationMsg.Message.Payload,
Origin: automationMsg.Origin,
})
if err != nil {
log.Println(err)
return
}
automation, err := db.AutomationGet(ctx, automationMsg.Automation)
if err != nil {
log.Println(err)
return
}
if automation.Script == "" {
log.Println("automation is empty")
return
}
if automationMsg.Message.Secrets == nil {
automationMsg.Message.Secrets = map[string]string{}
}
automationMsg.Message.Secrets["catalyst_apikey"] = apikey
automationMsg.Message.Secrets["catalyst_apiurl"] = apiurl
scriptMessage, _ := json.Marshal(automationMsg.Message)
containerID, logs, err := createContainer(ctx, automation.Image, automation.Script, string(scriptMessage))
if err != nil {
log.Println(err)
return
}
if _, err := db.JobUpdate(ctx, automationMsg.ID, &models.Job{
Automation: job.Automation,
Container: &containerID,
Origin: job.Origin,
Output: job.Output,
Log: &logs,
Payload: job.Payload,
Status: job.Status,
}); err != nil {
log.Println(err)
return
}
var result map[string]interface{}
stdout, _, err := runDocker(ctx, automationMsg.ID, containerID, db)
if err != nil {
result = map[string]interface{}{"error": fmt.Sprintf("error running script %s %s", err, string(stdout))}
} else {
var data map[string]interface{}
if err := json.Unmarshal(stdout, &data); err != nil {
result = map[string]interface{}{"error": string(stdout)}
} else {
result = data
}
}
if err := catalystBus.PublishResult(automationMsg.Automation, result, automationMsg.Origin); err != nil {
log.Println(err)
}
if err := db.JobComplete(ctx, automationMsg.ID, result); err != nil {
log.Println(err)
return
}
})
}
/*
func getAutomation(automationID string, config *Config) (*models.AutomationResponse, error) {
req, err := http.NewRequest(http.MethodGet, config.CatalystAPIUrl+"/automations/"+automationID, nil)
if err != nil {
return nil, err
}
req.Header.Set("PRIVATE-TOKEN", config.CatalystAPIKey)
resp, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
b, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var automation models.AutomationResponse
if err := json.Unmarshal(b, &automation); err != nil {
return nil, err
}
return &automation, nil
}
*/

38
automation/result.go Normal file
View File

@@ -0,0 +1,38 @@
package automation
import (
"log"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func resultAutomation(catalystBus *bus.Bus, db *database.Database) error {
return catalystBus.SubscribeResult(func(resultMsg *bus.ResultMsg) {
if resultMsg.Target != nil {
ctx := jobContext()
switch {
case resultMsg.Target.TaskOrigin != nil:
if _, err := db.TaskComplete(
ctx,
resultMsg.Target.TaskOrigin.TicketId,
resultMsg.Target.TaskOrigin.PlaybookId,
resultMsg.Target.TaskOrigin.TaskId,
resultMsg.Data,
); err != nil {
log.Println(err)
}
case resultMsg.Target.ArtifactOrigin != nil:
enrichment := &models.EnrichmentForm{
Data: resultMsg.Data,
Name: resultMsg.Automation,
}
_, err := db.EnrichArtifact(ctx, resultMsg.Target.ArtifactOrigin.TicketId, resultMsg.Target.ArtifactOrigin.Artifact, enrichment)
if err != nil {
log.Println(err)
}
}
}
})
}

149
backup.go Normal file
View File

@@ -0,0 +1,149 @@
package catalyst
import (
"archive/zip"
"bytes"
"io"
"io/fs"
"log"
"net/http"
"os"
"os/exec"
"path"
"strings"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/storage"
)
func BackupHandler(catalystStorage *storage.Storage, c *database.Config) gin.HandlerFunc {
return func(context *gin.Context) {
context.Header("Content-Disposition", "attachment; filename=backup.zip")
context.Header("Content-Type", "application/zip")
err := Backup(catalystStorage, c, context.Writer)
if err != nil {
log.Println(err)
context.AbortWithStatusJSON(http.StatusInternalServerError, gin.H{"error": err.Error()})
}
}
}
type WriterAtBuffer struct {
bytes.Buffer
}
func (fw WriterAtBuffer) WriteAt(p []byte, offset int64) (n int, err error) {
return fw.Write(p)
}
func Backup(catalystStorage *storage.Storage, c *database.Config, writer io.Writer) error {
archive := zip.NewWriter(writer)
defer archive.Close()
archive.SetComment(GetVersion())
// S3
if err := backupS3(catalystStorage, archive); err != nil {
return err
}
// Arango
return backupArango(c, archive)
}
func backupS3(catalystStorage *storage.Storage, archive *zip.Writer) error {
buckets, err := catalystStorage.S3().ListBuckets(nil)
if err != nil {
return err
}
for _, bucket := range buckets.Buckets {
objects, err := catalystStorage.S3().ListObjectsV2(&s3.ListObjectsV2Input{
Bucket: bucket.Name,
})
if err != nil {
return err
}
for _, content := range objects.Contents {
rbuf := &WriterAtBuffer{}
_, err := catalystStorage.Downloader().Download(rbuf, &s3.GetObjectInput{
Bucket: bucket.Name,
Key: content.Key,
})
if err != nil {
return err
}
a, err := archive.Create(path.Join("minio", *bucket.Name, *content.Key))
if err != nil {
return err
}
if _, err := io.Copy(a, rbuf); err != nil {
return err
}
}
}
return nil
}
func backupArango(c *database.Config, archive *zip.Writer) error {
dir, err := os.MkdirTemp("", "catalyst-backup")
if err != nil {
return err
}
defer os.RemoveAll(dir)
if err := arangodump(dir, c); err != nil {
return err
}
return zipDump(dir, archive)
}
func zipDump(dir string, archive *zip.Writer) error {
fsys := os.DirFS(dir)
return fs.WalkDir(fsys, ".", func(p string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() {
return nil
}
a, err := archive.Create(path.Join("arango", p))
if err != nil {
return err
}
f, err := fsys.Open(p)
if err != nil {
return err
}
if _, err := io.Copy(a, f); err != nil {
return err
}
return nil
})
}
func arangodump(dir string, config *database.Config) error {
host := strings.Replace(config.Host, "http", "tcp", 1)
name := config.Name
if config.Name == "" {
name = database.Name
}
args := []string{
"--output-directory", dir, "--server.endpoint", host,
"--server.username", config.User, "--server.password", config.Password,
"--server.database", name,
}
cmd := exec.Command("arangodump", args...)
return cmd.Run()
}

139
bus/bus.go Normal file
View File

@@ -0,0 +1,139 @@
package bus
import (
"encoding/json"
"log"
"github.com/arangodb/go-driver"
emitter "github.com/emitter-io/go/v2"
"github.com/SecurityBrewery/catalyst/generated/models"
)
const (
channelUpdate = "data"
channelJob = "job"
channelResult = "result"
)
type Bus struct {
config *Config
client *emitter.Client
}
type Config struct {
Host string
Key string
resultBusKey string
jobBusKey string
dataBusKey string
APIUrl string
}
type JobMsg struct {
ID string `json:"id"`
Automation string `json:"automation"`
Origin *models.Origin `json:"origin"`
Message *models.Message `json:"message"`
}
type ResultMsg struct {
Automation string `json:"automation"`
Data map[string]interface{} `json:"data,omitempty"`
Target *models.Origin `json:"target"`
}
func New(c *Config) (*Bus, error) {
client, err := emitter.Connect(c.Host, func(_ *emitter.Client, msg emitter.Message) {
log.Printf("received: '%s' topic: '%s'\n", msg.Payload(), msg.Topic())
})
if err != nil {
return nil, err
}
c.dataBusKey, err = client.GenerateKey(c.Key, channelUpdate+"/", "rwls", 0)
if err != nil {
return nil, err
}
c.jobBusKey, err = client.GenerateKey(c.Key, channelJob+"/", "rwls", 0)
if err != nil {
return nil, err
}
c.resultBusKey, err = client.GenerateKey(c.Key, channelResult+"/", "rwls", 0)
if err != nil {
return nil, err
}
return &Bus{config: c, client: client}, err
}
func (b *Bus) PublishUpdate(ids []driver.DocumentID) error {
return b.jsonPublish(ids, channelUpdate, b.config.dataBusKey)
}
func (b *Bus) PublishJob(id, automation string, payload interface{}, context *models.Context, origin *models.Origin) error {
return b.jsonPublish(&JobMsg{
ID: id,
Automation: automation,
Origin: origin,
Message: &models.Message{
Context: context,
Payload: payload,
},
}, channelJob, b.config.jobBusKey)
}
func (b *Bus) PublishResult(automation string, data map[string]interface{}, target *models.Origin) error {
return b.jsonPublish(&ResultMsg{Automation: automation, Data: data, Target: target}, channelResult, b.config.resultBusKey)
}
func (b *Bus) jsonPublish(msg interface{}, channel, key string) error {
payload, err := json.Marshal(msg)
if err != nil {
return err
}
return b.client.Publish(key, channel, payload)
}
func (b *Bus) SubscribeUpdate(f func(ids []driver.DocumentID)) error {
return b.safeSubscribe(b.config.dataBusKey, channelUpdate, func(c *emitter.Client, m emitter.Message) {
var msg []driver.DocumentID
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(msg)
})
}
func (b *Bus) SubscribeJob(f func(msg *JobMsg)) error {
return b.safeSubscribe(b.config.jobBusKey, channelJob, func(c *emitter.Client, m emitter.Message) {
var msg JobMsg
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(&msg)
})
}
func (b *Bus) SubscribeResult(f func(msg *ResultMsg)) error {
return b.safeSubscribe(b.config.resultBusKey, channelResult, func(c *emitter.Client, m emitter.Message) {
var msg ResultMsg
if err := json.Unmarshal(m.Payload(), &msg); err != nil {
log.Println(err)
return
}
go f(&msg)
})
}
func (b *Bus) safeSubscribe(key, channel string, handler func(c *emitter.Client, m emitter.Message)) error {
defer func() {
if r := recover(); r != nil {
log.Printf("Recovered %s in channel %s\n", r, channel)
}
}()
return b.client.Subscribe(key, channel, handler)
}

182
caql/blevebuilder.go Normal file
View File

@@ -0,0 +1,182 @@
package caql
import (
"errors"
"fmt"
"strconv"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
var TooComplexError = errors.New("unsupported features for index queries, use advanced search instead")
type bleveBuilder struct {
*parser.BaseCAQLParserListener
stack []string
err error
}
// push is a helper function for pushing new node to the listener Stack.
func (s *bleveBuilder) push(i string) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *bleveBuilder) pop() (n string) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
panic(ErrStack)
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *bleveBuilder) binaryPop() (interface{}, interface{}) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *bleveBuilder) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
// pass
case ctx.Reference() != nil:
// pass
case ctx.Operator_unary() != nil:
s.err = TooComplexError
return
case ctx.T_PLUS() != nil:
fallthrough
case ctx.T_MINUS() != nil:
fallthrough
case ctx.T_TIMES() != nil:
fallthrough
case ctx.T_DIV() != nil:
fallthrough
case ctx.T_MOD() != nil:
s.err = TooComplexError
return
case ctx.T_RANGE() != nil:
s.err = TooComplexError
return
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:<%s", left, right))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:>%s", left, right))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:<=%s", left, right))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:>=%s", left, right))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.err = TooComplexError
return
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s:%s", left, right))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("-%s:%s", left, right))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
s.err = TooComplexError
return
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
fallthrough
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
s.err = TooComplexError
return
case ctx.T_LIKE() != nil:
s.err = errors.New("index queries are like queries by default")
return
case ctx.T_REGEX_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.err = TooComplexError
return
} else {
s.push(fmt.Sprintf("%s:/%s/", left, right))
}
case ctx.T_REGEX_NON_MATCH() != nil:
s.err = errors.New("index query cannot contain regex non matches, use advanced search instead")
return
case ctx.T_AND() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s %s", left, right))
case ctx.T_OR() != nil:
s.err = errors.New("index query cannot contain OR, use advanced search instead")
return
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
return
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
s.err = errors.New("index query cannot contain ternary operations, use advanced search instead")
return
default:
panic("unknown expression")
}
}
// ExitReference is called when production reference is exited.
func (s *bleveBuilder) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
s.push(fmt.Sprintf("%s.%s", reference, ctx.T_STRING().GetText()))
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.Compound_value() != nil:
s.err = TooComplexError
return
case ctx.Function_call() != nil:
s.err = TooComplexError
return
case ctx.T_OPEN() != nil:
s.err = TooComplexError
return
case ctx.T_ARRAY_OPEN() != nil:
s.err = TooComplexError
return
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitValue_literal is called when production value_literal is exited.
func (s *bleveBuilder) ExitValue_literal(ctx *parser.Value_literalContext) {
if ctx.T_QUOTED_STRING() != nil {
st, err := unquote(ctx.GetText())
if err != nil {
panic(err)
}
s.push(strconv.Quote(st))
} else {
s.push(ctx.GetText())
}
}

50
caql/blevebuilder_test.go Normal file
View File

@@ -0,0 +1,50 @@
package caql
import (
"testing"
)
func TestBleveBuilder(t *testing.T) {
tests := []struct {
name string
saql string
wantBleve string
wantParseErr bool
wantRebuildErr bool
}{
{name: "Search 1", saql: `"Bob"`, wantBleve: `"Bob"`},
{name: "Search 2", saql: `"Bob" AND title == 'Name'`, wantBleve: `"Bob" title:"Name"`},
{name: "Search 3", saql: `"Bob" OR title == 'Name'`, wantRebuildErr: true},
{name: "Search 4", saql: `title == 'malware' AND 'wannacry'`, wantBleve: `title:"malware" "wannacry"`},
}
for _, tt := range tests {
parser := &Parser{}
t.Run(tt.name, func(t *testing.T) {
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.BleveString()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantBleve {
t.Errorf("String() got = %v, want %v", got, tt.wantBleve)
}
})
}
}

317
caql/builder.go Normal file
View File

@@ -0,0 +1,317 @@
package caql
import (
"fmt"
"strconv"
"strings"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type Searcher interface {
Search(term string) (ids []string, err error)
}
type aqlBuilder struct {
*parser.BaseCAQLParserListener
searcher Searcher
stack []string
prefix string
}
// push is a helper function for pushing new node to the listener Stack.
func (s *aqlBuilder) push(i string) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *aqlBuilder) pop() (n string) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
panic(ErrStack)
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *aqlBuilder) binaryPop() (string, string) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *aqlBuilder) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
if ctx.GetParent().GetParent() == nil {
s.push(s.toBoolString(s.pop()))
}
case ctx.Reference() != nil:
ref := s.pop()
if ref == "d.id" {
s.push("d._key")
} else {
s.push(ref)
}
// pass
case ctx.Operator_unary() != nil:
s.push(s.toBoolString(s.pop()))
case ctx.T_PLUS() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s + %s", left, right))
case ctx.T_MINUS() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s - %s", left, right))
case ctx.T_TIMES() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s * %s", left, right))
case ctx.T_DIV() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s / %s", left, right))
case ctx.T_MOD() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s %% %s", left, right))
case ctx.T_RANGE() != nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s..%s", left, right))
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s < %s", left, right))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s > %s", left, right))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s <= %s", left, right))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s >= %s", left, right))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT IN %s", left, right))
} else {
s.push(fmt.Sprintf("%s IN %s", left, right))
}
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s == %s", left, right))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
left, right := s.binaryPop()
s.push(fmt.Sprintf("%s != %s", left, right))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ALL %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ANY %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s NONE %s %s", left, ctx.GetEq_op().GetText(), right))
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ALL IN %s", left, right))
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ANY IN %s", left, right))
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s NONE IN %s", left, right))
case ctx.T_LIKE() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT LIKE %s", left, right))
} else {
s.push(fmt.Sprintf("%s LIKE %s", left, right))
}
case ctx.T_REGEX_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT =~ %s", left, right))
} else {
s.push(fmt.Sprintf("%s =~ %s", left, right))
}
case ctx.T_REGEX_NON_MATCH() != nil:
left, right := s.binaryPop()
if ctx.T_NOT() != nil {
s.push(fmt.Sprintf("%s NOT !~ %s", left, right))
} else {
s.push(fmt.Sprintf("%s !~ %s", left, right))
}
case ctx.T_AND() != nil:
left, right := s.binaryPop()
left = s.toBoolString(left)
right = s.toBoolString(right)
s.push(fmt.Sprintf("%s AND %s", left, right))
case ctx.T_OR() != nil:
left, right := s.binaryPop()
left = s.toBoolString(left)
right = s.toBoolString(right)
s.push(fmt.Sprintf("%s OR %s", left, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
right, middle, left := s.pop(), s.pop(), s.pop()
s.push(fmt.Sprintf("%s ? %s : %s", left, middle, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
right, left := s.pop(), s.pop()
s.push(fmt.Sprintf("%s ? : %s", left, right))
default:
panic("unknown expression")
}
}
func (s *aqlBuilder) toBoolString(v string) string {
_, err := unquote(v)
if err == nil {
ids, err := s.searcher.Search(v)
if err != nil {
panic("invalid search " + err.Error())
}
return fmt.Sprintf(`d._key IN ["%s"]`, strings.Join(ids, `","`))
}
return v
}
// ExitOperator_unary is called when production operator_unary is exited.
func (s *aqlBuilder) ExitOperator_unary(ctx *parser.Operator_unaryContext) {
value := s.pop()
switch {
case ctx.T_PLUS() != nil:
s.push(value)
case ctx.T_MINUS() != nil:
s.push(fmt.Sprintf("-%s", value))
case ctx.T_NOT() != nil:
s.push(fmt.Sprintf("NOT %s", value))
default:
panic(fmt.Sprintf("unexpected operation: %s", ctx.GetText()))
}
}
// ExitReference is called when production reference is exited.
func (s *aqlBuilder) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
if s.prefix != "" && !strings.HasPrefix(reference, s.prefix) {
reference = s.prefix + reference
}
s.push(fmt.Sprintf("%s.%s", reference, ctx.T_STRING().GetText()))
case ctx.T_STRING() != nil:
reference := ctx.T_STRING().GetText()
if s.prefix != "" && !strings.HasPrefix(reference, s.prefix) {
reference = s.prefix + reference
}
s.push(reference)
case ctx.Compound_value() != nil:
// pass
case ctx.Function_call() != nil:
// pass
case ctx.T_OPEN() != nil:
s.push(fmt.Sprintf("(%s)", s.pop()))
case ctx.T_ARRAY_OPEN() != nil:
key := s.pop()
reference := s.pop()
s.push(fmt.Sprintf("%s[%s]", reference, key))
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitCompound_value is called when production compound_value is exited.
func (s *aqlBuilder) ExitCompound_value(ctx *parser.Compound_valueContext) {
// pass
}
// ExitFunction_call is called when production function_call is exited.
func (s *aqlBuilder) ExitFunction_call(ctx *parser.Function_callContext) {
var array []string
for range ctx.AllExpression() {
// prepend element
array = append([]string{s.pop()}, array...)
}
parameter := strings.Join(array, ", ")
if !stringSliceContains(functionNames, strings.ToUpper(ctx.T_STRING().GetText())) {
panic("unknown function")
}
s.push(fmt.Sprintf("%s(%s)", strings.ToUpper(ctx.T_STRING().GetText()), parameter))
}
// ExitValue_literal is called when production value_literal is exited.
func (s *aqlBuilder) ExitValue_literal(ctx *parser.Value_literalContext) {
if ctx.T_QUOTED_STRING() != nil {
st, err := unquote(ctx.GetText())
if err != nil {
panic(err)
}
s.push(strconv.Quote(st))
} else {
s.push(ctx.GetText())
}
}
// ExitArray is called when production array is exited.
func (s *aqlBuilder) ExitArray(ctx *parser.ArrayContext) {
var elements []string
for range ctx.AllExpression() {
// elements = append(elements, s.pop())
elements = append([]string{s.pop()}, elements...)
}
s.push("[" + strings.Join(elements, ", ") + "]")
}
// ExitObject is called when production object is exited.
func (s *aqlBuilder) ExitObject(ctx *parser.ObjectContext) {
var elements []string
for range ctx.AllObject_element() {
key, value := s.pop(), s.pop()
elements = append([]string{fmt.Sprintf("%s: %v", key, value)}, elements...)
}
// s.push(object)
s.push("{" + strings.Join(elements, ", ") + "}")
}
// ExitObject_element is called when production object_element is exited.
func (s *aqlBuilder) ExitObject_element(ctx *parser.Object_elementContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.GetText())
s.push(ctx.GetText())
case ctx.Object_element_name() != nil, ctx.T_ARRAY_OPEN() != nil:
key, value := s.pop(), s.pop()
s.push(key)
s.push(value)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitObject_element_name is called when production object_element_name is exited.
func (s *aqlBuilder) ExitObject_element_name(ctx *parser.Object_element_nameContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.T_QUOTED_STRING() != nil:
s.push(ctx.T_QUOTED_STRING().GetText())
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}

8
caql/errors.go Normal file
View File

@@ -0,0 +1,8 @@
package caql
import "errors"
var (
ErrStack = errors.New("unexpected operator stack")
ErrUndefined = errors.New("variable not defined")
)

750
caql/function.go Normal file
View File

@@ -0,0 +1,750 @@
package caql
import (
"errors"
"fmt"
"math"
"math/rand"
"sort"
"strings"
"unicode/utf8"
"github.com/imdario/mergo"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
func (s *aqlInterpreter) function(ctx *parser.Function_callContext) {
switch strings.ToUpper(ctx.T_STRING().GetText()) {
default:
s.appendErrors(errors.New("unknown function"))
// Array https://www.arangodb.com/docs/stable/aql/functions-array.html
case "APPEND":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
seen := map[interface{}]bool{}
values, anyArray := s.pop().([]interface{}), s.pop().([]interface{})
if u {
for _, e := range anyArray {
seen[e] = true
}
}
for _, e := range values {
_, ok := seen[e]
if !ok || !u {
seen[e] = true
anyArray = append(anyArray, e)
}
}
s.push(anyArray)
case "COUNT_DISTINCT", "COUNT_UNIQUE":
count := 0
seen := map[interface{}]bool{}
array := s.pop().([]interface{})
for _, e := range array {
_, ok := seen[e]
if !ok {
seen[e] = true
count += 1
}
}
s.push(float64(count))
case "FIRST":
array := s.pop().([]interface{})
if len(array) == 0 {
s.push(nil)
} else {
s.push(array[0])
}
// case "FLATTEN":
// case "INTERLEAVE":
case "INTERSECTION":
iset := New(s.pop().([]interface{})...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Intersection(New(s.pop().([]interface{})...))
}
s.push(iset.Values())
// case "JACCARD":
case "LAST":
array := s.pop().([]interface{})
if len(array) == 0 {
s.push(nil)
} else {
s.push(array[len(array)-1])
}
case "COUNT", "LENGTH":
switch v := s.pop().(type) {
case nil:
s.push(float64(0))
case bool:
if v {
s.push(float64(1))
} else {
s.push(float64(0))
}
case float64:
s.push(float64(len(fmt.Sprint(v))))
case string:
s.push(float64(utf8.RuneCountInString(v)))
case []interface{}:
s.push(float64(len(v)))
case map[string]interface{}:
s.push(float64(len(v)))
default:
panic("unknown type")
}
case "MINUS":
var sets []*Set
for i := 0; i < len(ctx.AllExpression()); i++ {
sets = append(sets, New(s.pop().([]interface{})...))
}
iset := sets[len(sets)-1]
// for i := len(sets)-1; i > 0; i-- {
for i := 0; i < len(sets)-1; i++ {
iset = iset.Minus(sets[i])
}
s.push(iset.Values())
case "NTH":
pos := s.pop().(float64)
array := s.pop().([]interface{})
if int(pos) >= len(array) || pos < 0 {
s.push(nil)
} else {
s.push(array[int64(pos)])
}
// case "OUTERSECTION":
// array := s.pop().([]interface{})
// union := New(array...)
// intersection := New(s.pop().([]interface{})...)
// for i := 1; i < len(ctx.AllExpression()); i++ {
// array = s.pop().([]interface{})
// union = union.Union(New(array...))
// intersection = intersection.Intersection(New(array...))
// }
// s.push(union.Minus(intersection).Values())
case "POP":
array := s.pop().([]interface{})
s.push(array[:len(array)-1])
case "POSITION", "CONTAINS_ARRAY":
returnIndex := false
if len(ctx.AllExpression()) == 3 {
returnIndex = s.pop().(bool)
}
search := s.pop()
array := s.pop().([]interface{})
for idx, e := range array {
if e == search {
if returnIndex {
s.push(float64(idx))
} else {
s.push(true)
}
}
}
if returnIndex {
s.push(float64(-1))
} else {
s.push(false)
}
case "PUSH":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]interface{})
if u && contains(array, element) {
s.push(array)
} else {
s.push(append(array, element))
}
case "REMOVE_NTH":
position := s.pop().(float64)
anyArray := s.pop().([]interface{})
if position < 0 {
position = float64(len(anyArray) + int(position))
}
result := []interface{}{}
for idx, e := range anyArray {
if idx != int(position) {
result = append(result, e)
}
}
s.push(result)
case "REPLACE_NTH":
defaultPaddingValue := ""
if len(ctx.AllExpression()) == 4 {
defaultPaddingValue = s.pop().(string)
}
replaceValue := s.pop().(string)
position := s.pop().(float64)
anyArray := s.pop().([]interface{})
if position < 0 {
position = float64(len(anyArray) + int(position))
if position < 0 {
position = 0
}
}
switch {
case int(position) < len(anyArray):
anyArray[int(position)] = replaceValue
case int(position) == len(anyArray):
anyArray = append(anyArray, replaceValue)
default:
if defaultPaddingValue == "" {
panic("missing defaultPaddingValue")
}
for len(anyArray) < int(position) {
anyArray = append(anyArray, defaultPaddingValue)
}
anyArray = append(anyArray, replaceValue)
}
s.push(anyArray)
case "REMOVE_VALUE":
limit := math.Inf(1)
if len(ctx.AllExpression()) == 3 {
limit = s.pop().(float64)
}
value := s.pop()
array := s.pop().([]interface{})
result := []interface{}{}
for idx, e := range array {
if e != value || float64(idx) > limit {
result = append(result, e)
}
}
s.push(result)
case "REMOVE_VALUES":
values := s.pop().([]interface{})
array := s.pop().([]interface{})
result := []interface{}{}
for _, e := range array {
if !contains(values, e) {
result = append(result, e)
}
}
s.push(result)
case "REVERSE":
array := s.pop().([]interface{})
var reverse []interface{}
for _, e := range array {
reverse = append([]interface{}{e}, reverse...)
}
s.push(reverse)
case "SHIFT":
s.push(s.pop().([]interface{})[1:])
case "SLICE":
length := float64(-1)
full := true
if len(ctx.AllExpression()) == 3 {
length = s.pop().(float64)
full = false
}
start := int64(s.pop().(float64))
array := s.pop().([]interface{})
if start < 0 {
start = int64(len(array)) + start
}
if full {
length = float64(int64(len(array)) - start)
}
end := int64(0)
if length < 0 {
end = int64(len(array)) + int64(length)
} else {
end = start + int64(length)
}
s.push(array[start:end])
case "SORTED":
array := s.pop().([]interface{})
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "SORTED_UNIQUE":
array := s.pop().([]interface{})
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(unique(array))
case "UNION":
array := s.pop().([]interface{})
for i := 1; i < len(ctx.AllExpression()); i++ {
array = append(array, s.pop().([]interface{})...)
}
sort.Slice(array, func(i, j int) bool { return lt(array[i], array[j]) })
s.push(array)
case "UNION_DISTINCT":
iset := New(s.pop().([]interface{})...)
for i := 1; i < len(ctx.AllExpression()); i++ {
iset = iset.Union(New(s.pop().([]interface{})...))
}
s.push(unique(iset.Values()))
case "UNIQUE":
s.push(unique(s.pop().([]interface{})))
case "UNSHIFT":
u := false
if len(ctx.AllExpression()) == 3 {
u = s.pop().(bool)
}
element := s.pop()
array := s.pop().([]interface{})
if u && contains(array, element) {
s.push(array)
} else {
s.push(append([]interface{}{element}, array...))
}
// Bit https://www.arangodb.com/docs/stable/aql/functions-bit.html
// case "BIT_AND":
// case "BIT_CONSTRUCT":
// case "BIT_DECONSTRUCT":
// case "BIT_FROM_STRING":
// case "BIT_NEGATE":
// case "BIT_OR":
// case "BIT_POPCOUNT":
// case "BIT_SHIFT_LEFT":
// case "BIT_SHIFT_RIGHT":
// case "BIT_TEST":
// case "BIT_TO_STRING":
// case "BIT_XOR":
// Date https://www.arangodb.com/docs/stable/aql/functions-date.html
// case "DATE_NOW":
// case "DATE_ISO8601":
// case "DATE_TIMESTAMP":
// case "IS_DATESTRING":
// case "DATE_DAYOFWEEK":
// case "DATE_YEAR":
// case "DATE_MONTH":
// case "DATE_DAY":
// case "DATE_HOUR":
// case "DATE_MINUTE":
// case "DATE_SECOND":
// case "DATE_MILLISECOND":
// case "DATE_DAYOFYEAR":
// case "DATE_ISOWEEK":
// case "DATE_LEAPYEAR":
// case "DATE_QUARTER":
// case "DATE_DAYS_IN_MONTH":
// case "DATE_TRUNC":
// case "DATE_ROUND":
// case "DATE_FORMAT":
// case "DATE_ADD":
// case "DATE_SUBTRACT":
// case "DATE_DIFF":
// case "DATE_COMPARE":
// Document https://www.arangodb.com/docs/stable/aql/functions-document.html
case "ATTRIBUTES":
if len(ctx.AllExpression()) == 3 {
s.pop() // always sort
}
removeInternal := false
if len(ctx.AllExpression()) >= 2 {
removeInternal = s.pop().(bool)
}
var keys []interface{}
for k := range s.pop().(map[string]interface{}) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
keys = append(keys, k)
}
}
sort.Slice(keys, func(i, j int) bool { return lt(keys[i], keys[j]) })
s.push(keys)
// case "COUNT":
case "HAS":
right, left := s.pop(), s.pop()
_, ok := left.(map[string]interface{})[right.(string)]
s.push(ok)
// case "KEEP":
// case "LENGTH":
// case "MATCHES":
case "MERGE":
var docs []map[string]interface{}
if len(ctx.AllExpression()) == 1 {
for _, doc := range s.pop().([]interface{}) {
docs = append([]map[string]interface{}{doc.(map[string]interface{})}, docs...)
}
} else {
for i := 0; i < len(ctx.AllExpression()); i++ {
docs = append(docs, s.pop().(map[string]interface{}))
}
}
doc := docs[len(docs)-1]
for i := len(docs) - 2; i >= 0; i-- {
for k, v := range docs[i] {
doc[k] = v
}
}
s.push(doc)
case "MERGE_RECURSIVE":
var doc map[string]interface{}
for i := 0; i < len(ctx.AllExpression()); i++ {
err := mergo.Merge(&doc, s.pop().(map[string]interface{}))
if err != nil {
panic(err)
}
}
s.push(doc)
// case "PARSE_IDENTIFIER":
// case "TRANSLATE":
// case "UNSET":
// case "UNSET_RECURSIVE":
case "VALUES":
removeInternal := false
if len(ctx.AllExpression()) == 2 {
removeInternal = s.pop().(bool)
}
var values []interface{}
for k, v := range s.pop().(map[string]interface{}) {
isInternalKey := strings.HasPrefix(k, "_")
if !removeInternal || !isInternalKey {
values = append(values, v)
}
}
sort.Slice(values, func(i, j int) bool { return lt(values[i], values[j]) })
s.push(values)
// case "ZIP":
// Numeric https://www.arangodb.com/docs/stable/aql/functions-numeric.html
case "ABS":
s.push(math.Abs(s.pop().(float64)))
case "ACOS":
v := s.pop().(float64)
asin := math.Acos(v)
if v > 1 || v < -1 {
s.push(nil)
} else {
s.push(asin)
}
case "ASIN":
v := s.pop().(float64)
asin := math.Asin(v)
if v > 1 || v < -1 {
s.push(nil)
} else {
s.push(asin)
}
case "ATAN":
s.push(math.Atan(s.pop().(float64)))
case "ATAN2":
s.push(math.Atan2(s.pop().(float64), s.pop().(float64)))
case "AVERAGE", "AVG":
count := 0
sum := float64(0)
array := s.pop().([]interface{})
for _, element := range array {
if element != nil {
count += 1
sum += toNumber(element)
}
}
if count == 0 {
s.push(nil)
} else {
s.push(sum / float64(count))
}
case "CEIL":
s.push(math.Ceil(s.pop().(float64)))
case "COS":
s.push(math.Cos(s.pop().(float64)))
case "DEGREES":
s.push(s.pop().(float64) * 180 / math.Pi)
case "EXP":
s.push(math.Exp(s.pop().(float64)))
case "EXP2":
s.push(math.Exp2(s.pop().(float64)))
case "FLOOR":
s.push(math.Floor(s.pop().(float64)))
case "LOG":
l := math.Log(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "LOG2":
l := math.Log2(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "LOG10":
l := math.Log10(s.pop().(float64))
if l <= 0 {
s.push(nil)
} else {
s.push(l)
}
case "MAX":
var set bool
var max float64
array := s.pop().([]interface{})
for _, element := range array {
if element != nil {
if !set || toNumber(element) > max {
max = toNumber(element)
set = true
}
}
}
if set {
s.push(max)
} else {
s.push(nil)
}
case "MEDIAN":
array := s.pop().([]interface{})
var numbers []float64
for _, element := range array {
if f, ok := element.(float64); ok {
numbers = append(numbers, f)
}
}
sort.Float64s(numbers) // sort the numbers
middlePos := len(numbers) / 2
switch {
case len(numbers) == 0:
s.push(nil)
case len(numbers)%2 == 1:
s.push(numbers[middlePos])
default:
s.push((numbers[middlePos-1] + numbers[middlePos]) / 2)
}
case "MIN":
var set bool
var min float64
array := s.pop().([]interface{})
for _, element := range array {
if element != nil {
if !set || toNumber(element) < min {
min = toNumber(element)
set = true
}
}
}
if set {
s.push(min)
} else {
s.push(nil)
}
// case "PERCENTILE":
case "PI":
s.push(math.Pi)
case "POW":
right, left := s.pop(), s.pop()
s.push(math.Pow(left.(float64), right.(float64)))
case "PRODUCT":
product := float64(1)
array := s.pop().([]interface{})
for _, element := range array {
if element != nil {
product *= toNumber(element)
}
}
s.push(product)
case "RADIANS":
s.push(s.pop().(float64) * math.Pi / 180)
case "RAND":
s.push(rand.Float64())
case "RANGE":
var array []interface{}
var start, end, step float64
if len(ctx.AllExpression()) == 2 {
right, left := s.pop(), s.pop()
start = math.Trunc(left.(float64))
end = math.Trunc(right.(float64))
step = 1
} else {
middle, right, left := s.pop(), s.pop(), s.pop()
start = left.(float64)
end = right.(float64)
step = middle.(float64)
}
for i := start; i <= end; i += step {
array = append(array, i)
}
s.push(array)
case "ROUND":
x := s.pop().(float64)
t := math.Trunc(x)
if math.Abs(x-t) == 0.5 {
s.push(x + 0.5)
} else {
s.push(math.Round(x))
}
case "SIN":
s.push(math.Sin(s.pop().(float64)))
case "SQRT":
s.push(math.Sqrt(s.pop().(float64)))
// case "STDDEV_POPULATION":
// case "STDDEV_SAMPLE":
// case "STDDEV":
case "SUM":
sum := float64(0)
array := s.pop().([]interface{})
for _, element := range array {
sum += toNumber(element)
}
s.push(sum)
case "TAN":
s.push(math.Tan(s.pop().(float64)))
// case "VARIANCE_POPULATION", "VARIANCE":
// case "VARIANCE_SAMPLE":
// String https://www.arangodb.com/docs/stable/aql/functions-string.html
// case "CHAR_LENGTH":
// case "CONCAT":
// case "CONCAT_SEPARATOR":
// case "CONTAINS":
// case "CRC32":
// case "ENCODE_URI_COMPONENT":
// case "FIND_FIRST":
// case "FIND_LAST":
// case "FNV64":
// case "IPV4_FROM_NUMBER":
// case "IPV4_TO_NUMBER":
// case "IS_IPV4":
// case "JSON_PARSE":
// case "JSON_STRINGIFY":
// case "LEFT":
// case "LENGTH":
// case "LEVENSHTEIN_DISTANCE":
// case "LIKE":
case "LOWER":
s.push(strings.ToLower(s.pop().(string)))
// case "LTRIM":
// case "MD5":
// case "NGRAM_POSITIONAL_SIMILARITY":
// case "NGRAM_SIMILARITY":
// case "RANDOM_TOKEN":
// case "REGEX_MATCHES":
// case "REGEX_SPLIT":
// case "REGEX_TEST":
// case "REGEX_REPLACE":
// case "REVERSE":
// case "RIGHT":
// case "RTRIM":
// case "SHA1":
// case "SHA512":
// case "SOUNDEX":
// case "SPLIT":
// case "STARTS_WITH":
// case "SUBSTITUTE":
// case "SUBSTRING":
// case "TOKENS":
// case "TO_BASE64":
// case "TO_HEX":
// case "TRIM":
case "UPPER":
s.push(strings.ToUpper(s.pop().(string)))
// case "UUID":
// Type cast https://www.arangodb.com/docs/stable/aql/functions-type-cast.html
case "TO_BOOL":
s.push(toBool(s.pop()))
case "TO_NUMBER":
s.push(toNumber(s.pop()))
// case "TO_STRING":
// case "TO_ARRAY":
// case "TO_LIST":
// case "IS_NULL":
// case "IS_BOOL":
// case "IS_NUMBER":
// case "IS_STRING":
// case "IS_ARRAY":
// case "IS_LIST":
// case "IS_OBJECT":
// case "IS_DOCUMENT":
// case "IS_DATESTRING":
// case "IS_IPV4":
// case "IS_KEY":
// case "TYPENAME":
}
}
func unique(array []interface{}) []interface{} {
seen := map[interface{}]bool{}
var filtered []interface{}
for _, e := range array {
_, ok := seen[e]
if !ok {
seen[e] = true
filtered = append(filtered, e)
}
}
return filtered
}
func contains(values []interface{}, e interface{}) bool {
for _, v := range values {
if e == v {
return true
}
}
return false
}
func stringSliceContains(values []string, e string) bool {
for _, v := range values {
if e == v {
return true
}
}
return false
}
var functionNames = []string{
"APPEND", "COUNT_DISTINCT", "COUNT_UNIQUE", "FIRST", "FLATTEN", "INTERLEAVE", "INTERSECTION", "JACCARD", "LAST",
"COUNT", "LENGTH", "MINUS", "NTH", "OUTERSECTION", "POP", "POSITION", "CONTAINS_ARRAY", "PUSH", "REMOVE_NTH",
"REPLACE_NTH", "REMOVE_VALUE", "REMOVE_VALUES", "REVERSE", "SHIFT", "SLICE", "SORTED", "SORTED_UNIQUE", "UNION",
"UNION_DISTINCT", "UNIQUE", "UNSHIFT", "BIT_AND", "BIT_CONSTRUCT", "BIT_DECONSTRUCT", "BIT_FROM_STRING",
"BIT_NEGATE", "BIT_OR", "BIT_POPCOUNT", "BIT_SHIFT_LEFT", "BIT_SHIFT_RIGHT", "BIT_TEST", "BIT_TO_STRING",
"BIT_XOR", "DATE_NOW", "DATE_ISO8601", "DATE_TIMESTAMP", "IS_DATESTRING", "DATE_DAYOFWEEK", "DATE_YEAR",
"DATE_MONTH", "DATE_DAY", "DATE_HOUR", "DATE_MINUTE", "DATE_SECOND", "DATE_MILLISECOND", "DATE_DAYOFYEAR",
"DATE_ISOWEEK", "DATE_LEAPYEAR", "DATE_QUARTER", "DATE_DAYS_IN_MONTH", "DATE_TRUNC", "DATE_ROUND", "DATE_FORMAT",
"DATE_ADD", "DATE_SUBTRACT", "DATE_DIFF", "DATE_COMPARE", "ATTRIBUTES", "COUNT", "HAS", "KEEP", "LENGTH",
"MATCHES", "MERGE", "MERGE_RECURSIVE", "PARSE_IDENTIFIER", "TRANSLATE", "UNSET", "UNSET_RECURSIVE", "VALUES",
"ZIP", "ABS", "ACOS", "ASIN", "ATAN", "ATAN2", "AVERAGE", "AVG", "CEIL", "COS", "DEGREES", "EXP", "EXP2", "FLOOR",
"LOG", "LOG2", "LOG10", "MAX", "MEDIAN", "MIN", "PERCENTILE", "PI", "POW", "PRODUCT", "RADIANS", "RAND", "RANGE",
"ROUND", "SIN", "SQRT", "STDDEV_POPULATION", "STDDEV_SAMPLE", "STDDEV", "SUM", "TAN", "VARIANCE_POPULATION",
"VARIANCE", "VARIANCE_SAMPLE", "CHAR_LENGTH", "CONCAT", "CONCAT_SEPARATOR", "CONTAINS", "CRC32",
"ENCODE_URI_COMPONENT", "FIND_FIRST", "FIND_LAST", "FNV64", "IPV4_FROM_NUMBER", "IPV4_TO_NUMBER", "IS_IPV4",
"JSON_PARSE", "JSON_STRINGIFY", "LEFT", "LENGTH", "LEVENSHTEIN_DISTANCE", "LIKE", "LOWER", "LTRIM", "MD5",
"NGRAM_POSITIONAL_SIMILARITY", "NGRAM_SIMILARITY", "RANDOM_TOKEN", "REGEX_MATCHES", "REGEX_SPLIT", "REGEX_TEST",
"REGEX_REPLACE", "REVERSE", "RIGHT", "RTRIM", "SHA1", "SHA512", "SOUNDEX", "SPLIT", "STARTS_WITH", "SUBSTITUTE",
"SUBSTRING", "TOKENS", "TO_BASE64", "TO_HEX", "TRIM", "UPPER", "UUID", "TO_BOOL", "TO_NUMBER", "TO_STRING",
"TO_ARRAY", "TO_LIST", "IS_NULL", "IS_BOOL", "IS_NUMBER", "IS_STRING", "IS_ARRAY", "IS_LIST", "IS_OBJECT",
"IS_DOCUMENT", "IS_DATESTRING", "IS_IPV4", "IS_KEY", "TYPENAME"}

380
caql/function_test.go Normal file
View File

@@ -0,0 +1,380 @@
package caql
import (
"encoding/json"
"math"
"reflect"
"testing"
)
func TestFunctions(t *testing.T) {
tests := []struct {
name string
saql string
wantRebuild string
wantValue interface{}
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
values string
}{
// https://www.arangodb.com/docs/3.7/aql/functions-array.html
{name: "APPEND", saql: `APPEND([1, 2, 3], [5, 6, 9])`, wantRebuild: `APPEND([1, 2, 3], [5, 6, 9])`, wantValue: jsonParse(`[1, 2, 3, 5, 6, 9]`)},
{name: "APPEND", saql: `APPEND([1, 2, 3], [3, 4, 5, 2, 9], true)`, wantRebuild: `APPEND([1, 2, 3], [3, 4, 5, 2, 9], true)`, wantValue: jsonParse(`[1, 2, 3, 4, 5, 9]`)},
{name: "COUNT_DISTINCT", saql: `COUNT_DISTINCT([1, 2, 3])`, wantRebuild: `COUNT_DISTINCT([1, 2, 3])`, wantValue: 3},
{name: "COUNT_DISTINCT", saql: `COUNT_DISTINCT(["yes", "no", "yes", "sauron", "no", "yes"])`, wantRebuild: `COUNT_DISTINCT(["yes", "no", "yes", "sauron", "no", "yes"])`, wantValue: 3},
{name: "FIRST", saql: `FIRST([1, 2, 3])`, wantRebuild: `FIRST([1, 2, 3])`, wantValue: 1},
{name: "FIRST", saql: `FIRST([])`, wantRebuild: `FIRST([])`, wantValue: nil},
// {name: "FLATTEN", saql: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]])`, wantRebuild: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]])`, wantValue:},
// {name: "FLATTEN", saql: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]], 2)`, wantRebuild: `FLATTEN([1, 2, [3, 4], 5, [6, 7], [8, [9, 10]]], 2)`, wantValue:},
// {name: "INTERLEAVE", saql: `INTERLEAVE([1, 1, 1], [2, 2, 2], [3, 3, 3])`, wantRebuild: `INTERLEAVE([1, 1, 1], [2, 2, 2], [3, 3, 3])`, wantValue:},
// {name: "INTERLEAVE", saql: `INTERLEAVE([1], [2, 2], [3, 3, 3])`, wantRebuild: `INTERLEAVE([1], [2, 2], [3, 3, 3])`, wantValue:},
{name: "INTERSECTION", saql: `INTERSECTION([1,2,3,4,5], [2,3,4,5,6], [3,4,5,6,7])`, wantRebuild: `INTERSECTION([1, 2, 3, 4, 5], [2, 3, 4, 5, 6], [3, 4, 5, 6, 7])`, wantValue: jsonParse(`[3, 4, 5]`)},
{name: "INTERSECTION", saql: `INTERSECTION([2,4,6], [8,10,12], [14,16,18])`, wantRebuild: `INTERSECTION([2, 4, 6], [8, 10, 12], [14, 16, 18])`, wantValue: jsonParse(`[]`)},
// {name: "JACCARD", saql: `JACCARD([1,2,3,4], [3,4,5,6])`, wantRebuild: `JACCARD([1,2,3,4], [3,4,5,6])`, wantValue: 0.3333333333333333},
// {name: "JACCARD", saql: `JACCARD([1,1,2,2,2,3], [2,2,3,4])`, wantRebuild: `JACCARD([1,1,2,2,2,3], [2,2,3,4])`, wantValue: 0.5},
// {name: "JACCARD", saql: `JACCARD([1,2,3], [])`, wantRebuild: `JACCARD([1, 2, 3], [])`, wantValue: 0},
// {name: "JACCARD", saql: `JACCARD([], [])`, wantRebuild: `JACCARD([], [])`, wantValue: 1},
{name: "LAST", saql: `LAST([1,2,3,4,5])`, wantRebuild: `LAST([1, 2, 3, 4, 5])`, wantValue: 5},
{name: "LENGTH", saql: `LENGTH("🥑")`, wantRebuild: `LENGTH("🥑")`, wantValue: 1},
{name: "LENGTH", saql: `LENGTH(1234)`, wantRebuild: `LENGTH(1234)`, wantValue: 4},
{name: "LENGTH", saql: `LENGTH([1,2,3,4,5,6,7])`, wantRebuild: `LENGTH([1, 2, 3, 4, 5, 6, 7])`, wantValue: 7},
{name: "LENGTH", saql: `LENGTH(false)`, wantRebuild: `LENGTH(false)`, wantValue: 0},
{name: "LENGTH", saql: `LENGTH({a:1, b:2, c:3, d:4, e:{f:5,g:6}})`, wantRebuild: `LENGTH({a: 1, b: 2, c: 3, d: 4, e: {f: 5, g: 6}})`, wantValue: 5},
{name: "MINUS", saql: `MINUS([1,2,3,4], [3,4,5,6], [5,6,7,8])`, wantRebuild: `MINUS([1, 2, 3, 4], [3, 4, 5, 6], [5, 6, 7, 8])`, wantValue: jsonParse(`[1, 2]`)},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], 2)`, wantRebuild: `NTH(["foo", "bar", "baz"], 2)`, wantValue: "baz"},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], 3)`, wantRebuild: `NTH(["foo", "bar", "baz"], 3)`, wantValue: nil},
{name: "NTH", saql: `NTH(["foo", "bar", "baz"], -1)`, wantRebuild: `NTH(["foo", "bar", "baz"], -1)`, wantValue: nil},
// {name: "OUTERSECTION", saql: `OUTERSECTION([1, 2, 3], [2, 3, 4], [3, 4, 5])`, wantRebuild: `OUTERSECTION([1, 2, 3], [2, 3, 4], [3, 4, 5])`, wantValue: jsonParse(`[1, 5]`)},
{name: "POP", saql: `POP([1, 2, 3, 4])`, wantRebuild: `POP([1, 2, 3, 4])`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "POP", saql: `POP([1])`, wantRebuild: `POP([1])`, wantValue: jsonParse(`[]`)},
{name: "POSITION", saql: `POSITION([2,4,6,8], 4)`, wantRebuild: `POSITION([2, 4, 6, 8], 4)`, wantValue: true},
{name: "POSITION", saql: `POSITION([2,4,6,8], 4, true)`, wantRebuild: `POSITION([2, 4, 6, 8], 4, true)`, wantValue: 1},
{name: "PUSH", saql: `PUSH([1, 2, 3], 4)`, wantRebuild: `PUSH([1, 2, 3], 4)`, wantValue: jsonParse(`[1, 2, 3, 4]`)},
{name: "PUSH", saql: `PUSH([1, 2, 2, 3], 2, true)`, wantRebuild: `PUSH([1, 2, 2, 3], 2, true)`, wantValue: jsonParse(`[1, 2, 2, 3]`)},
{name: "REMOVE_NTH", saql: `REMOVE_NTH(["a", "b", "c", "d", "e"], 1)`, wantRebuild: `REMOVE_NTH(["a", "b", "c", "d", "e"], 1)`, wantValue: jsonParse(`["a", "c", "d", "e"]`)},
{name: "REMOVE_NTH", saql: `REMOVE_NTH(["a", "b", "c", "d", "e"], -2)`, wantRebuild: `REMOVE_NTH(["a", "b", "c", "d", "e"], -2)`, wantValue: jsonParse(`["a", "b", "c", "e"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 1 , "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 1, "z")`, wantValue: jsonParse(`["a", "z", "c"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 3 , "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 3, "z")`, wantValue: jsonParse(`["a", "b", "c", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], 6, "z", "y")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], 6, "z", "y")`, wantValue: jsonParse(`["a", "b", "c", "y", "y", "y", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], -1, "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], -1, "z")`, wantValue: jsonParse(`["a", "b", "z"]`)},
{name: "REPLACE_NTH", saql: `REPLACE_NTH(["a", "b", "c"], -9, "z")`, wantRebuild: `REPLACE_NTH(["a", "b", "c"], -9, "z")`, wantValue: jsonParse(`["z", "b", "c"]`)},
{name: "REMOVE_VALUE", saql: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a")`, wantRebuild: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a")`, wantValue: jsonParse(`["b", "b", "c"]`)},
{name: "REMOVE_VALUE", saql: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a", 1)`, wantRebuild: `REMOVE_VALUE(["a", "b", "b", "a", "c"], "a", 1)`, wantValue: jsonParse(`["b", "b", "a", "c"]`)},
{name: "REMOVE_VALUES", saql: `REMOVE_VALUES(["a", "a", "b", "c", "d", "e", "f"], ["a", "f", "d"])`, wantRebuild: `REMOVE_VALUES(["a", "a", "b", "c", "d", "e", "f"], ["a", "f", "d"])`, wantValue: jsonParse(`["b", "c", "e"]`)},
{name: "REVERSE", saql: `REVERSE ([2,4,6,8,10])`, wantRebuild: `REVERSE([2, 4, 6, 8, 10])`, wantValue: jsonParse(`[10, 8, 6, 4, 2]`)},
{name: "SHIFT", saql: `SHIFT([1, 2, 3, 4])`, wantRebuild: `SHIFT([1, 2, 3, 4])`, wantValue: jsonParse(`[2, 3, 4]`)},
{name: "SHIFT", saql: `SHIFT([1])`, wantRebuild: `SHIFT([1])`, wantValue: jsonParse(`[]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 0, 1)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 0, 1)`, wantValue: jsonParse(`[1]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 1, 2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 1, 2)`, wantValue: jsonParse(`[2, 3]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 3)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 3)`, wantValue: jsonParse(`[4, 5]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 1, -1)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 1, -1)`, wantValue: jsonParse(`[2, 3, 4]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], 0, -2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], 0, -2)`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "SLICE", saql: `SLICE([1, 2, 3, 4, 5], -3, 2)`, wantRebuild: `SLICE([1, 2, 3, 4, 5], -3, 2)`, wantValue: jsonParse(`[3, 4]`)},
{name: "SORTED", saql: `SORTED([8,4,2,10,6])`, wantRebuild: `SORTED([8, 4, 2, 10, 6])`, wantValue: jsonParse(`[2, 4, 6, 8, 10]`)},
{name: "SORTED_UNIQUE", saql: `SORTED_UNIQUE([8,4,2,10,6,2,8,6,4])`, wantRebuild: `SORTED_UNIQUE([8, 4, 2, 10, 6, 2, 8, 6, 4])`, wantValue: jsonParse(`[2, 4, 6, 8, 10]`)},
{name: "UNION", saql: `UNION([1, 2, 3], [1, 2])`, wantRebuild: `UNION([1, 2, 3], [1, 2])`, wantValue: jsonParse(`[1, 1, 2, 2, 3]`)},
{name: "UNION_DISTINCT", saql: `UNION_DISTINCT([1, 2, 3], [1, 2])`, wantRebuild: `UNION_DISTINCT([1, 2, 3], [1, 2])`, wantValue: jsonParse(`[1, 2, 3]`)},
{name: "UNIQUE", saql: `UNIQUE([1,2,2,3,3,3,4,4,4,4,5,5,5,5,5])`, wantRebuild: `UNIQUE([1, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 5])`, wantValue: jsonParse(`[1, 2, 3, 4, 5]`)},
{name: "UNSHIFT", saql: `UNSHIFT([1, 2, 3], 4)`, wantRebuild: `UNSHIFT([1, 2, 3], 4)`, wantValue: jsonParse(`[4, 1, 2, 3]`)},
{name: "UNSHIFT", saql: `UNSHIFT([1, 2, 3], 2, true)`, wantRebuild: `UNSHIFT([1, 2, 3], 2, true)`, wantValue: jsonParse(`[1, 2, 3]`)},
// https://www.arangodb.com/docs/3.7/aql/functions-bit.html
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([1, 2, 3])`, wantRebuild: `BIT_CONSTRUCT([1, 2, 3])`, wantValue: 14},
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([0, 4, 8])`, wantRebuild: `BIT_CONSTRUCT([0, 4, 8])`, wantValue: 273},
// {name: "BIT_CONSTRUCT", saql: `BIT_CONSTRUCT([0, 1, 10, 31])`, wantRebuild: `BIT_CONSTRUCT([0, 1, 10, 31])`, wantValue: 2147484675},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(14)`, wantRebuild: `BIT_DECONSTRUCT(14) `, wantValue: []interface{}{1, 2, 3}},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(273)`, wantRebuild: `BIT_DECONSTRUCT(273)`, wantValue: []interface{}{0, 4, 8}},
// {name: "BIT_DECONSTRUCT", saql: `BIT_DECONSTRUCT(2147484675)`, wantRebuild: `BIT_DECONSTRUCT(2147484675)`, wantValue: []interface{}{0, 1, 10, 31}},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("0111")`, wantRebuild: `BIT_FROM_STRING("0111")`, wantValue: 7},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("000000000000010")`, wantRebuild: `BIT_FROM_STRING("000000000000010")`, wantValue: 2},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("11010111011101")`, wantRebuild: `BIT_FROM_STRING("11010111011101")`, wantValue: 13789},
// {name: "BIT_FROM_STRING", saql: `BIT_FROM_STRING("100000000000000000000")`, wantRebuild: `BIT_FROM_STRING("100000000000000000000")`, wantValue: 1048756},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(0, 8)`, wantRebuild: `BIT_NEGATE(0, 8)`, wantValue: 255},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(0, 10)`, wantRebuild: `BIT_NEGATE(0, 10)`, wantValue: 1023},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(3, 4)`, wantRebuild: `BIT_NEGATE(3, 4)`, wantValue: 12},
// {name: "BIT_NEGATE", saql: `BIT_NEGATE(446359921, 32)`, wantRebuild: `BIT_NEGATE(446359921, 32)`, wantValue: 3848607374},
// {name: "BIT_OR", saql: `BIT_OR([1, 4, 8, 16])`, wantRebuild: `BIT_OR([1, 4, 8, 16])`, wantValue: 29},
// {name: "BIT_OR", saql: `BIT_OR([3, 7, 63])`, wantRebuild: `BIT_OR([3, 7, 63])`, wantValue: 63},
// {name: "BIT_OR", saql: `BIT_OR([255, 127, null, 63])`, wantRebuild: `BIT_OR([255, 127, null, 63])`, wantValue: 255},
// {name: "BIT_OR", saql: `BIT_OR(255, 127)`, wantRebuild: `BIT_OR(255, 127)`, wantValue: 255},
// {name: "BIT_OR", saql: `BIT_OR("foo")`, wantRebuild: `BIT_OR("foo")`, wantValue: nil},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(0)`, wantRebuild: `BIT_POPCOUNT(0)`, wantValue: 0},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(255)`, wantRebuild: `BIT_POPCOUNT(255)`, wantValue: 8},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT(69399252)`, wantRebuild: `BIT_POPCOUNT(69399252)`, wantValue: 12},
// {name: "BIT_POPCOUNT", saql: `BIT_POPCOUNT("foo")`, wantRebuild: `BIT_POPCOUNT("foo")`, wantValue: nil},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(0, 1, 8)`, wantRebuild: `BIT_SHIFT_LEFT(0, 1, 8)`, wantValue: 0},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(7, 1, 16)`, wantRebuild: `BIT_SHIFT_LEFT(7, 1, 16)`, wantValue: 14},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(2, 10, 16)`, wantRebuild: `BIT_SHIFT_LEFT(2, 10, 16)`, wantValue: 2048},
// {name: "BIT_SHIFT_LEFT", saql: `BIT_SHIFT_LEFT(878836, 16, 32)`, wantRebuild: `BIT_SHIFT_LEFT(878836, 16, 32)`, wantValue: 1760821248},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(0, 1, 8)`, wantRebuild: `BIT_SHIFT_RIGHT(0, 1, 8)`, wantValue: 0},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(33, 1, 16)`, wantRebuild: `BIT_SHIFT_RIGHT(33, 1, 16)`, wantValue: 16},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(65536, 13, 16)`, wantRebuild: `BIT_SHIFT_RIGHT(65536, 13, 16)`, wantValue: 8},
// {name: "BIT_SHIFT_RIGHT", saql: `BIT_SHIFT_RIGHT(878836, 4, 32)`, wantRebuild: `BIT_SHIFT_RIGHT(878836, 4, 32)`, wantValue: 54927},
// {name: "BIT_TEST", saql: `BIT_TEST(0, 3)`, wantRebuild: `BIT_TEST(0, 3)`, wantValue: false},
// {name: "BIT_TEST", saql: `BIT_TEST(255, 0)`, wantRebuild: `BIT_TEST(255, 0)`, wantValue: true},
// {name: "BIT_TEST", saql: `BIT_TEST(7, 2)`, wantRebuild: `BIT_TEST(7, 2)`, wantValue: true},
// {name: "BIT_TEST", saql: `BIT_TEST(255, 8)`, wantRebuild: `BIT_TEST(255, 8)`, wantValue: false},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(7, 4)`, wantRebuild: `BIT_TO_STRING(7, 4)`, wantValue: "0111"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(255, 8)`, wantRebuild: `BIT_TO_STRING(255, 8)`, wantValue: "11111111"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(60, 8)`, wantRebuild: `BIT_TO_STRING(60, 8)`, wantValue: "00011110"},
// {name: "BIT_TO_STRING", saql: `BIT_TO_STRING(1048576, 32)`, wantRebuild: `BIT_TO_STRING(1048576, 32)`, wantValue: "00000000000100000000000000000000"},
// {name: "BIT_XOR", saql: `BIT_XOR([1, 4, 8, 16])`, wantRebuild: `BIT_XOR([1, 4, 8, 16])`, wantValue: 29},
// {name: "BIT_XOR", saql: `BIT_XOR([3, 7, 63])`, wantRebuild: `BIT_XOR([3, 7, 63])`, wantValue: 59},
// {name: "BIT_XOR", saql: `BIT_XOR([255, 127, null, 63])`, wantRebuild: `BIT_XOR([255, 127, null, 63])`, wantValue: 191},
// {name: "BIT_XOR", saql: `BIT_XOR(255, 257)`, wantRebuild: `BIT_XOR(255, 257)`, wantValue: 510},
// {name: "BIT_XOR", saql: `BIT_XOR("foo")`, wantRebuild: `BIT_XOR("foo")`, wantValue: nil},
// https://www.arangodb.com/docs/3.7/aql/functions-date.html
// DATE_TIMESTAMP("2014-05-07T14:19:09.522")
// DATE_TIMESTAMP("2014-05-07T14:19:09.522Z")
// DATE_TIMESTAMP("2014-05-07 14:19:09.522")
// DATE_TIMESTAMP("2014-05-07 14:19:09.522Z")
// DATE_TIMESTAMP(2014, 5, 7, 14, 19, 9, 522)
// DATE_TIMESTAMP(1399472349522)
// DATE_ISO8601("2014-05-07T14:19:09.522Z")
// DATE_ISO8601("2014-05-07 14:19:09.522Z")
// DATE_ISO8601(2014, 5, 7, 14, 19, 9, 522)
// DATE_ISO8601(1399472349522)
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(2016, 12, -1)`, wantRebuild: `DATE_TIMESTAMP(2016, 12, -1)`, wantValue: nil},
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(2016, 2, 32)`, wantRebuild: `DATE_TIMESTAMP(2016, 2, 32)`, wantValue: 1456963200000},
// {name: "DATE_TIMESTAMP", saql: `DATE_TIMESTAMP(1970, 1, 1, 26)`, wantRebuild: `DATE_TIMESTAMP(1970, 1, 1, 26)`, wantValue: 93600000},
// {name: "DATE_TRUNC", saql: `DATE_TRUNC('2017-02-03', 'month')`, wantRebuild: `DATE_TRUNC('2017-02-03', 'month')`, wantValue: "2017-02-01T00:00:00.000Z"},
// {name: "DATE_TRUNC", saql: `DATE_TRUNC('2017-02-03 04:05:06', 'hours')`, wantRebuild: `DATE_TRUNC('2017-02-03 04:05:06', 'hours')`, wantValue: "2017-02-03 04:00:00.000Z"},
// {name: "DATE_ROUND", saql: `DATE_ROUND('2000-04-28T11:11:11.111Z', 1, 'day')`, wantRebuild: `DATE_ROUND('2000-04-28T11:11:11.111Z', 1, 'day')`, wantValue: "2000-04-28T00:00:00.000Z"},
// {name: "DATE_ROUND", saql: `DATE_ROUND('2000-04-10T11:39:29Z', 15, 'minutes')`, wantRebuild: `DATE_ROUND('2000-04-10T11:39:29Z', 15, 'minutes')`, wantValue: "2000-04-10T11:30:00.000Z"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT(DATE_NOW(), "%q/%yyyy")`, wantRebuild: `DATE_FORMAT(DATE_NOW(), "%q/%yyyy")`},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss,%fff")`, wantRebuild: `DATE_FORMAT(DATE_NOW(), "%dd.%mm.%yyyy %hh:%ii:%ss,%fff")`, wantValue: "18.09.2015 15:30:49,374"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("1969", "Summer of '%yy")`, wantRebuild: `DATE_FORMAT("1969", "Summer of '%yy")`, wantValue: "Summer of '69"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("2016", "%%l = %l")`, wantRebuild: `DATE_FORMAT("2016", "%%l = %l")`, wantValue: "%l = 1"},
// {name: "DATE_FORMAT", saql: `DATE_FORMAT("2016-03-01", "%xxx%")`, wantRebuild: `DATE_FORMAT("2016-03-01", "%xxx%")`, wantValue: "063, trailing % ignored"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), -1, "day")`, wantRebuild: `DATE_ADD(DATE_NOW(), -1, "day")`, wantValue: "yesterday; also see DATE_SUBTRACT()"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), 3, "months")`, wantRebuild: `DATE_ADD(DATE_NOW(), 3, "months")`, wantValue: "in three months"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_ADD("2015-04-01", 5, "years"), 1, "month")`, wantRebuild: `DATE_ADD(DATE_ADD("2015-04-01", 5, "years"), 1, "month")`, wantValue: "May 1st 2020"},
// {name: "DATE_ADD", saql: `DATE_ADD("2015-04-01", 12*5 + 1, "months")`, wantRebuild: `DATE_ADD("2015-04-01", 12*5 + 1, "months")`, wantValue: "also May 1st 2020"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), -4, "years")`, wantRebuild: `DATE_ADD(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), -4, "years")`, wantValue: "Christmas four years ago"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_ADD("2016-02", "month", 1), -1, "day")`, wantRebuild: `DATE_ADD(DATE_ADD("2016-02", "month", 1), -1, "day")`, wantValue: "last day of February (29th, because 2016 is a leap year!)"},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P1Y")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P1Y")`},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P3M2W")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P3M2W")`},
// {name: "DATE_ADD", saql: `DATE_ADD(DATE_NOW(), "P5DT26H")`, wantRebuild: `DATE_ADD(DATE_NOW(), "P5DT26H")`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "PT4H")`, wantRebuild: `DATE_ADD("2000-01-01", "PT4H")`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "PT30M44.4S"`, wantRebuild: `DATE_ADD("2000-01-01", "PT30M44.4S"`},
// {name: "DATE_ADD", saql: `DATE_ADD("2000-01-01", "P1Y2M3W4DT5H6M7.89S"`, wantRebuild: `DATE_ADD("2000-01-01", "P1Y2M3W4DT5H6M7.89S"`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), 1, "day")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), 1, "day")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), 4, "years")`, wantRebuild: `DATE_SUBTRACT(DATE_TIMESTAMP(DATE_YEAR(DATE_NOW()), 12, 24), 4, "years")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_ADD("2016-02", "month", 1), 1, "day")`, wantRebuild: `DATE_SUBTRACT(DATE_ADD("2016-02", "month", 1), 1, "day")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), "P4D")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), "P4D")`},
// {name: "DATE_SUBTRACT", saql: `DATE_SUBTRACT(DATE_NOW(), "PT1H3M")`, wantRebuild: `DATE_SUBTRACT(DATE_NOW(), "PT1H3M")`},
// DATE_COMPARE("1985-04-04", DATE_NOW(), "months", "days")
// DATE_COMPARE("1984-02-29", DATE_NOW(), "months", "days")
// DATE_COMPARE("2001-01-01T15:30:45.678Z", "2001-01-01T08:08:08.008Z", "years", "days")
// https://www.arangodb.com/docs/3.7/aql/functions-document.html
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"})`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"})`, wantValue: jsonParse(`["_custom", "_key", "foo"]`)},
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, true)`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, true)`, wantValue: jsonParse(`["foo"]`)},
{name: "ATTRIBUTES", saql: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, false, true)`, wantRebuild: `ATTRIBUTES({"foo": "bar", "_key": "123", "_custom": "yes"}, false, true)`, wantValue: jsonParse(`["_custom", "_key", "foo"]`)},
{name: "HAS", saql: `HAS({name: "Jane"}, "name")`, wantRebuild: `HAS({name: "Jane"}, "name")`, wantValue: true},
{name: "HAS", saql: `HAS({name: "Jane"}, "age")`, wantRebuild: `HAS({name: "Jane"}, "age")`, wantValue: false},
{name: "HAS", saql: `HAS({name: null}, "name")`, wantRebuild: `HAS({name: null}, "name")`, wantValue: true},
// KEEP(doc, "firstname", "name", "likes")
// KEEP(doc, ["firstname", "name", "likes"])
// MATCHES({name: "jane", age: 27, active: true}, {age: 27, active: true})
// MATCHES({"test": 1}, [{"test": 1, "foo": "bar"}, {"foo": 1}, {"test": 1}], true)
{name: "MERGE", saql: `MERGE({"user1": {"name": "Jane"}}, {"user2": {"name": "Tom"}})`, wantRebuild: `MERGE({"user1": {"name": "Jane"}}, {"user2": {"name": "Tom"}})`, wantValue: jsonParse(`{"user1": {"name": "Jane"}, "user2": {"name": "Tom"}}`)},
{name: "MERGE", saql: `MERGE({"users": {"name": "Jane"}}, {"users": {"name": "Tom"}})`, wantRebuild: `MERGE({"users": {"name": "Jane"}}, {"users": {"name": "Tom"}})`, wantValue: jsonParse(`{"users": {"name": "Tom"}}`)},
{name: "MERGE", saql: `MERGE([{foo: "bar"}, {quux: "quetzalcoatl", ruled: true}, {bar: "baz", foo: "done"}])`, wantRebuild: `MERGE([{foo: "bar"}, {quux: "quetzalcoatl", ruled: true}, {bar: "baz", foo: "done"}])`, wantValue: jsonParse(`{"foo": "done", "quux": "quetzalcoatl", "ruled": true, "bar": "baz"}`)},
{name: "MERGE_RECURSIVE", saql: `MERGE_RECURSIVE({"user-1": {"name": "Jane", "livesIn": {"city": "LA"}}}, {"user-1": {"age": 42, "livesIn": {"state": "CA"}}})`, wantRebuild: `MERGE_RECURSIVE({"user-1": {"name": "Jane", "livesIn": {"city": "LA"}}}, {"user-1": {"age": 42, "livesIn": {"state": "CA"}}})`, wantValue: jsonParse(`{"user-1": {"name": "Jane", "livesIn": {"city": "LA", "state": "CA"}, "age": 42}}`)},
// {name: "TRANSLATE", saql: `TRANSLATE("FR", {US: "United States", UK: "United Kingdom", FR: "France"})`, wantRebuild: `TRANSLATE("FR", {US: "United States", UK: "United Kingdom", FR: "France"})`, wantValue: "France"},
// {name: "TRANSLATE", saql: `TRANSLATE(42, {foo: "bar", bar: "baz"})`, wantRebuild: `TRANSLATE(42, {foo: "bar", bar: "baz"})`, wantValue: 42},
// {name: "TRANSLATE", saql: `TRANSLATE(42, {foo: "bar", bar: "baz"}, "not found!")`, wantRebuild: `TRANSLATE(42, {foo: "bar", bar: "baz"}, "not found!")`, wantValue: "not found!"},
// UNSET(doc, "_id", "_key", "foo", "bar")
// UNSET(doc, ["_id", "_key", "foo", "bar"])
// UNSET_RECURSIVE(doc, "_id", "_key", "foo", "bar")
// UNSET_RECURSIVE(doc, ["_id", "_key", "foo", "bar"])
{name: "VALUES", saql: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35})`, wantRebuild: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35})`, wantValue: jsonParse(`[35, "Jane", "users/jane"]`)},
{name: "VALUES", saql: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35}, true)`, wantRebuild: `VALUES({"_key": "users/jane", "name": "Jane", "age": 35}, true)`, wantValue: jsonParse(`[35, "Jane"]`)},
// {name: "ZIP", saql: `ZIP(["name", "active", "hobbies"], ["some user", true, ["swimming", "riding"]])`, wantRebuild: `ZIP(["name", "active", "hobbies"], ["some user", true, ["swimming", "riding"]])`, wantValue: jsonParse(`{"name": "some user", "active": true, "hobbies": ["swimming", "riding"]}`)},
// https://www.arangodb.com/docs/3.7/aql/functions-numeric.html
{name: "ABS", saql: `ABS(-5)`, wantRebuild: `ABS(-5)`, wantValue: 5},
{name: "ABS", saql: `ABS(+5)`, wantRebuild: `ABS(5)`, wantValue: 5},
{name: "ABS", saql: `ABS(3.5)`, wantRebuild: `ABS(3.5)`, wantValue: 3.5},
{name: "ACOS", saql: `ACOS(-1)`, wantRebuild: `ACOS(-1)`, wantValue: 3.141592653589793},
{name: "ACOS", saql: `ACOS(0)`, wantRebuild: `ACOS(0)`, wantValue: 1.5707963267948966},
{name: "ACOS", saql: `ACOS(1)`, wantRebuild: `ACOS(1)`, wantValue: 0},
{name: "ACOS", saql: `ACOS(2)`, wantRebuild: `ACOS(2)`, wantValue: nil},
{name: "ASIN", saql: `ASIN(1)`, wantRebuild: `ASIN(1)`, wantValue: 1.5707963267948966},
{name: "ASIN", saql: `ASIN(0)`, wantRebuild: `ASIN(0)`, wantValue: 0},
{name: "ASIN", saql: `ASIN(-1)`, wantRebuild: `ASIN(-1)`, wantValue: -1.5707963267948966},
{name: "ASIN", saql: `ASIN(2)`, wantRebuild: `ASIN(2)`, wantValue: nil},
{name: "ATAN", saql: `ATAN(-1)`, wantRebuild: `ATAN(-1)`, wantValue: -0.7853981633974483},
{name: "ATAN", saql: `ATAN(0)`, wantRebuild: `ATAN(0)`, wantValue: 0},
{name: "ATAN", saql: `ATAN(10)`, wantRebuild: `ATAN(10)`, wantValue: 1.4711276743037347},
{name: "AVERAGE", saql: `AVERAGE([5, 2, 9, 2])`, wantRebuild: `AVERAGE([5, 2, 9, 2])`, wantValue: 4.5},
{name: "AVERAGE", saql: `AVERAGE([-3, -5, 2])`, wantRebuild: `AVERAGE([-3, -5, 2])`, wantValue: -2},
{name: "AVERAGE", saql: `AVERAGE([999, 80, 4, 4, 4, 3, 3, 3])`, wantRebuild: `AVERAGE([999, 80, 4, 4, 4, 3, 3, 3])`, wantValue: 137.5},
{name: "CEIL", saql: `CEIL(2.49)`, wantRebuild: `CEIL(2.49)`, wantValue: 3},
{name: "CEIL", saql: `CEIL(2.50)`, wantRebuild: `CEIL(2.50)`, wantValue: 3},
{name: "CEIL", saql: `CEIL(-2.50)`, wantRebuild: `CEIL(-2.50)`, wantValue: -2},
{name: "CEIL", saql: `CEIL(-2.51)`, wantRebuild: `CEIL(-2.51)`, wantValue: -2},
{name: "COS", saql: `COS(1)`, wantRebuild: `COS(1)`, wantValue: 0.5403023058681398},
{name: "COS", saql: `COS(0)`, wantRebuild: `COS(0)`, wantValue: 1},
{name: "COS", saql: `COS(-3.141592653589783)`, wantRebuild: `COS(-3.141592653589783)`, wantValue: -1},
{name: "COS", saql: `COS(RADIANS(45))`, wantRebuild: `COS(RADIANS(45))`, wantValue: 0.7071067811865476},
{name: "DEGREES", saql: `DEGREES(0.7853981633974483)`, wantRebuild: `DEGREES(0.7853981633974483)`, wantValue: 45},
{name: "DEGREES", saql: `DEGREES(0)`, wantRebuild: `DEGREES(0)`, wantValue: 0},
{name: "DEGREES", saql: `DEGREES(3.141592653589793)`, wantRebuild: `DEGREES(3.141592653589793)`, wantValue: 180},
{name: "EXP", saql: `EXP(1)`, wantRebuild: `EXP(1)`, wantValue: 2.718281828459045},
{name: "EXP", saql: `EXP(10)`, wantRebuild: `EXP(10)`, wantValue: 22026.46579480671},
{name: "EXP", saql: `EXP(0)`, wantRebuild: `EXP(0)`, wantValue: 1},
{name: "EXP2", saql: `EXP2(16)`, wantRebuild: `EXP2(16)`, wantValue: 65536},
{name: "EXP2", saql: `EXP2(1)`, wantRebuild: `EXP2(1)`, wantValue: 2},
{name: "EXP2", saql: `EXP2(0)`, wantRebuild: `EXP2(0)`, wantValue: 1},
{name: "FLOOR", saql: `FLOOR(2.49)`, wantRebuild: `FLOOR(2.49)`, wantValue: 2},
{name: "FLOOR", saql: `FLOOR(2.50)`, wantRebuild: `FLOOR(2.50)`, wantValue: 2},
{name: "FLOOR", saql: `FLOOR(-2.50)`, wantRebuild: `FLOOR(-2.50)`, wantValue: -3},
{name: "FLOOR", saql: `FLOOR(-2.51)`, wantRebuild: `FLOOR(-2.51)`, wantValue: -3},
{name: "LOG", saql: `LOG(2.718281828459045)`, wantRebuild: `LOG(2.718281828459045)`, wantValue: 1},
{name: "LOG", saql: `LOG(10)`, wantRebuild: `LOG(10)`, wantValue: 2.302585092994046},
{name: "LOG", saql: `LOG(0)`, wantRebuild: `LOG(0)`, wantValue: nil},
{name: "LOG2", saql: `LOG2(1024)`, wantRebuild: `LOG2(1024)`, wantValue: 10},
{name: "LOG2", saql: `LOG2(8)`, wantRebuild: `LOG2(8)`, wantValue: 3},
{name: "LOG2", saql: `LOG2(0)`, wantRebuild: `LOG2(0)`, wantValue: nil},
{name: "LOG10", saql: `LOG10(10000)`, wantRebuild: `LOG10(10000)`, wantValue: 4},
{name: "LOG10", saql: `LOG10(10)`, wantRebuild: `LOG10(10)`, wantValue: 1},
{name: "LOG10", saql: `LOG10(0)`, wantRebuild: `LOG10(0)`, wantValue: nil},
{name: "MAX", saql: `MAX([5, 9, -2, null, 1])`, wantRebuild: `MAX([5, 9, -2, null, 1])`, wantValue: 9},
{name: "MAX", saql: `MAX([null, null])`, wantRebuild: `MAX([null, null])`, wantValue: nil},
{name: "MEDIAN", saql: `MEDIAN([1, 2, 3])`, wantRebuild: `MEDIAN([1, 2, 3])`, wantValue: 2},
{name: "MEDIAN", saql: `MEDIAN([1, 2, 3, 4])`, wantRebuild: `MEDIAN([1, 2, 3, 4])`, wantValue: 2.5},
{name: "MEDIAN", saql: `MEDIAN([4, 2, 3, 1])`, wantRebuild: `MEDIAN([4, 2, 3, 1])`, wantValue: 2.5},
{name: "MEDIAN", saql: `MEDIAN([999, 80, 4, 4, 4, 3, 3, 3])`, wantRebuild: `MEDIAN([999, 80, 4, 4, 4, 3, 3, 3])`, wantValue: 4},
{name: "MIN", saql: `MIN([5, 9, -2, null, 1])`, wantRebuild: `MIN([5, 9, -2, null, 1])`, wantValue: -2},
{name: "MIN", saql: `MIN([null, null])`, wantRebuild: `MIN([null, null])`, wantValue: nil},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50)`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50)`, wantValue: 2},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50, "rank")`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50, "rank")`, wantValue: 2},
// {name: "PERCENTILE", saql: `PERCENTILE([1, 2, 3, 4], 50, "interpolation")`, wantRebuild: `PERCENTILE([1, 2, 3, 4], 50, "interpolation")`, wantValue: 2.5},
{name: "PI", saql: `PI()`, wantRebuild: `PI()`, wantValue: 3.141592653589793},
{name: "POW", saql: `POW(2, 4)`, wantRebuild: `POW(2, 4)`, wantValue: 16},
{name: "POW", saql: `POW(5, -1)`, wantRebuild: `POW(5, -1)`, wantValue: 0.2},
{name: "POW", saql: `POW(5, 0)`, wantRebuild: `POW(5, 0)`, wantValue: 1},
{name: "PRODUCT", saql: `PRODUCT([1, 2, 3, 4])`, wantRebuild: `PRODUCT([1, 2, 3, 4])`, wantValue: 24},
{name: "PRODUCT", saql: `PRODUCT([null, -5, 6])`, wantRebuild: `PRODUCT([null, -5, 6])`, wantValue: -30},
{name: "PRODUCT", saql: `PRODUCT([])`, wantRebuild: `PRODUCT([])`, wantValue: 1},
{name: "RADIANS", saql: `RADIANS(180)`, wantRebuild: `RADIANS(180)`, wantValue: 3.141592653589793},
{name: "RADIANS", saql: `RADIANS(90)`, wantRebuild: `RADIANS(90)`, wantValue: 1.5707963267948966},
{name: "RADIANS", saql: `RADIANS(0)`, wantRebuild: `RADIANS(0)`, wantValue: 0},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.3503170117504508},
// {name: "RAND", saql: `RAND()`, wantRebuild: `RAND()`, wantValue: 0.6138226173882478},
{name: "RANGE", saql: `RANGE(1, 4)`, wantRebuild: `RANGE(1, 4)`, wantValue: []interface{}{float64(1), float64(2), float64(3), float64(4)}},
{name: "RANGE", saql: `RANGE(1, 4, 2)`, wantRebuild: `RANGE(1, 4, 2)`, wantValue: []interface{}{float64(1), float64(3)}},
{name: "RANGE", saql: `RANGE(1, 4, 3)`, wantRebuild: `RANGE(1, 4, 3)`, wantValue: []interface{}{float64(1), float64(4)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5)`, wantRebuild: `RANGE(1.5, 2.5)`, wantValue: []interface{}{float64(1), float64(2)}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 1)`, wantRebuild: `RANGE(1.5, 2.5, 1)`, wantValue: []interface{}{1.5, 2.5}},
{name: "RANGE", saql: `RANGE(1.5, 2.5, 0.5)`, wantRebuild: `RANGE(1.5, 2.5, 0.5)`, wantValue: []interface{}{1.5, 2.0, 2.5}},
{name: "RANGE", saql: `RANGE(-0.75, 1.1, 0.5)`, wantRebuild: `RANGE(-0.75, 1.1, 0.5)`, wantValue: []interface{}{-0.75, -0.25, 0.25, 0.75}},
{name: "ROUND", saql: `ROUND(2.49)`, wantRebuild: `ROUND(2.49)`, wantValue: 2},
{name: "ROUND", saql: `ROUND(2.50)`, wantRebuild: `ROUND(2.50)`, wantValue: 3},
{name: "ROUND", saql: `ROUND(-2.50)`, wantRebuild: `ROUND(-2.50)`, wantValue: -2},
{name: "ROUND", saql: `ROUND(-2.51)`, wantRebuild: `ROUND(-2.51)`, wantValue: -3},
{name: "SQRT", saql: `SQRT(9)`, wantRebuild: `SQRT(9)`, wantValue: 3},
{name: "SQRT", saql: `SQRT(2)`, wantRebuild: `SQRT(2)`, wantValue: 1.4142135623730951},
{name: "POW", saql: `POW(4096, 1/4)`, wantRebuild: `POW(4096, 1 / 4)`, wantValue: 8},
{name: "POW", saql: `POW(27, 1/3)`, wantRebuild: `POW(27, 1 / 3)`, wantValue: 3},
{name: "POW", saql: `POW(9, 1/2)`, wantRebuild: `POW(9, 1 / 2)`, wantValue: 3},
// {name: "STDDEV_POPULATION", saql: `STDDEV_POPULATION([1, 3, 6, 5, 2])`, wantRebuild: `STDDEV_POPULATION([1, 3, 6, 5, 2])`, wantValue: 1.854723699099141},
// {name: "STDDEV_SAMPLE", saql: `STDDEV_SAMPLE([1, 3, 6, 5, 2])`, wantRebuild: `STDDEV_SAMPLE([1, 3, 6, 5, 2])`, wantValue: 2.0736441353327724},
{name: "SUM", saql: `SUM([1, 2, 3, 4])`, wantRebuild: `SUM([1, 2, 3, 4])`, wantValue: 10},
{name: "SUM", saql: `SUM([null, -5, 6])`, wantRebuild: `SUM([null, -5, 6])`, wantValue: 1},
{name: "SUM", saql: `SUM([])`, wantRebuild: `SUM([])`, wantValue: 0},
{name: "TAN", saql: `TAN(10)`, wantRebuild: `TAN(10)`, wantValue: 0.6483608274590866},
{name: "TAN", saql: `TAN(5)`, wantRebuild: `TAN(5)`, wantValue: -3.380515006246586},
{name: "TAN", saql: `TAN(0)`, wantRebuild: `TAN(0)`, wantValue: 0},
// {name: "VARIANCE_POPULATION", saql: `VARIANCE_POPULATION([1, 3, 6, 5, 2])`, wantRebuild: `VARIANCE_POPULATION([1, 3, 6, 5, 2])`, wantValue: 3.4400000000000004},
// {name: "VARIANCE_SAMPLE", saql: `VARIANCE_SAMPLE([1, 3, 6, 5, 2])`, wantRebuild: `VARIANCE_SAMPLE([1, 3, 6, 5, 2])`, wantValue: 4.300000000000001},
// Errors
{name: "Function Error 1", saql: "UNKNOWN(value)", wantRebuild: "UNKNOWN(value)", wantRebuildErr: true, wantEvalErr: true, values: `{"value": true}`},
{name: "Function Error 2", saql: "ABS(value, value2)", wantRebuild: "ABS(value, value2)", wantEvalErr: true, values: `{"value": true, "value2": false}`},
{name: "Function Error 3", saql: `ABS("abs")`, wantRebuild: `ABS("abs")`, wantEvalErr: true},
}
for _, tt := range tests {
parser := &Parser{}
t.Run(tt.name, func(t *testing.T) {
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.String()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantRebuild {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJson map[string]interface{}
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJson)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJson)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
wantValue := tt.wantValue
if i, ok := wantValue.(int); ok {
wantValue = float64(i)
}
valueFloat, ok := value.(float64)
wantValueFloat, ok2 := wantValue.(float64)
if ok && ok2 {
if math.Abs(valueFloat-wantValueFloat) > 0.0001 {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
} else {
if !reflect.DeepEqual(value, wantValue) {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
}
})
}
}
func jsonParse(s string) interface{} {
if s == "" {
return nil
}
var j interface{}
err := json.Unmarshal([]byte(s), &j)
if err != nil {
panic(s + err.Error())
}
return j
}

355
caql/interpreter.go Normal file
View File

@@ -0,0 +1,355 @@
package caql
import (
"fmt"
"strconv"
"strings"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type aqlInterpreter struct {
*parser.BaseCAQLParserListener
values map[string]interface{}
stack []interface{}
errs []error
}
// push is a helper function for pushing new node to the listener Stack.
func (s *aqlInterpreter) push(i interface{}) {
s.stack = append(s.stack, i)
}
// pop is a helper function for poping a node from the listener Stack.
func (s *aqlInterpreter) pop() (n interface{}) {
// Check that we have nodes in the stack.
size := len(s.stack)
if size < 1 {
s.appendErrors(ErrStack)
return
}
// Pop the last value from the Stack.
n, s.stack = s.stack[size-1], s.stack[:size-1]
return
}
func (s *aqlInterpreter) binaryPop() (interface{}, interface{}) {
right, left := s.pop(), s.pop()
return left, right
}
// ExitExpression is called when production expression is exited.
func (s *aqlInterpreter) ExitExpression(ctx *parser.ExpressionContext) {
switch {
case ctx.Value_literal() != nil:
// pass
case ctx.Reference() != nil:
// pass
case ctx.Operator_unary() != nil:
// pass
case ctx.T_PLUS() != nil:
s.push(plus(s.binaryPop()))
case ctx.T_MINUS() != nil:
s.push(minus(s.binaryPop()))
case ctx.T_TIMES() != nil:
s.push(times(s.binaryPop()))
case ctx.T_DIV() != nil:
s.push(div(s.binaryPop()))
case ctx.T_MOD() != nil:
s.push(mod(s.binaryPop()))
case ctx.T_RANGE() != nil:
s.push(aqlrange(s.binaryPop()))
case ctx.T_LT() != nil && ctx.GetEq_op() == nil:
s.push(lt(s.binaryPop()))
case ctx.T_GT() != nil && ctx.GetEq_op() == nil:
s.push(gt(s.binaryPop()))
case ctx.T_LE() != nil && ctx.GetEq_op() == nil:
s.push(le(s.binaryPop()))
case ctx.T_GE() != nil && ctx.GetEq_op() == nil:
s.push(ge(s.binaryPop()))
case ctx.T_IN() != nil && ctx.GetEq_op() == nil:
s.push(maybeNot(ctx, in(s.binaryPop())))
case ctx.T_EQ() != nil && ctx.GetEq_op() == nil:
s.push(eq(s.binaryPop()))
case ctx.T_NE() != nil && ctx.GetEq_op() == nil:
s.push(ne(s.binaryPop()))
case ctx.T_ALL() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ANY() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(any(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_NONE() != nil && ctx.GetEq_op() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]interface{}), getOp(ctx.GetEq_op().GetTokenType()), right))
case ctx.T_ALL() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(all(left.([]interface{}), in, right))
case ctx.T_ANY() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(any(left.([]interface{}), in, right))
case ctx.T_NONE() != nil && ctx.T_NOT() != nil && ctx.T_IN() != nil:
right, left := s.pop(), s.pop()
s.push(none(left.([]interface{}), in, right))
case ctx.T_LIKE() != nil:
m, err := like(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_REGEX_MATCH() != nil:
m, err := regexMatch(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_REGEX_NON_MATCH() != nil:
m, err := regexNonMatch(s.binaryPop())
s.appendErrors(err)
s.push(maybeNot(ctx, m))
case ctx.T_AND() != nil:
s.push(and(s.binaryPop()))
case ctx.T_OR() != nil:
s.push(or(s.binaryPop()))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 3:
right, middle, left := s.pop(), s.pop(), s.pop()
s.push(ternary(left, middle, right))
case ctx.T_QUESTION() != nil && len(ctx.AllExpression()) == 2:
right, left := s.pop(), s.pop()
s.push(ternary(left, nil, right))
default:
panic("unkown expression")
}
}
func (s *aqlInterpreter) appendErrors(err error) {
if err != nil {
s.errs = append(s.errs, err)
}
}
// ExitOperator_unary is called when production operator_unary is exited.
func (s *aqlInterpreter) ExitOperator_unary(ctx *parser.Operator_unaryContext) {
value := s.pop()
switch {
case ctx.T_PLUS() != nil:
s.push(value.(float64))
case ctx.T_MINUS() != nil:
s.push(-value.(float64))
case ctx.T_NOT() != nil:
s.push(!toBool(value))
default:
panic(fmt.Sprintf("unexpected operation: %s", ctx.GetText()))
}
}
// ExitReference is called when production reference is exited.
func (s *aqlInterpreter) ExitReference(ctx *parser.ReferenceContext) {
switch {
case ctx.DOT() != nil:
reference := s.pop()
s.push(reference.(map[string]interface{})[ctx.T_STRING().GetText()])
case ctx.T_STRING() != nil:
s.push(s.getVar(ctx.T_STRING().GetText()))
case ctx.Compound_value() != nil:
// pass
case ctx.Function_call() != nil:
// pass
case ctx.T_OPEN() != nil:
// pass
case ctx.T_ARRAY_OPEN() != nil:
key := s.pop()
reference := s.pop()
if f, ok := key.(float64); ok {
index := int(f)
if index < 0 {
index = len(reference.([]interface{})) + index
}
s.push(reference.([]interface{})[index])
return
}
s.push(reference.(map[string]interface{})[key.(string)])
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitCompound_value is called when production compound_value is exited.
func (s *aqlInterpreter) ExitCompound_value(ctx *parser.Compound_valueContext) {
// pass
}
// ExitFunction_call is called when production function_call is exited.
func (s *aqlInterpreter) ExitFunction_call(ctx *parser.Function_callContext) {
s.function(ctx)
}
// ExitValue_literal is called when production value_literal is exited.
func (s *aqlInterpreter) ExitValue_literal(ctx *parser.Value_literalContext) {
switch {
case ctx.T_QUOTED_STRING() != nil:
st, err := unquote(ctx.GetText())
s.appendErrors(err)
s.push(st)
case ctx.T_INT() != nil:
t := ctx.GetText()
switch {
case strings.HasPrefix(strings.ToLower(t), "0b"):
i64, err := strconv.ParseInt(t[2:], 2, 64)
s.appendErrors(err)
s.push(float64(i64))
case strings.HasPrefix(strings.ToLower(t), "0x"):
i64, err := strconv.ParseInt(t[2:], 16, 64)
s.appendErrors(err)
s.push(float64(i64))
default:
i, err := strconv.Atoi(t)
s.appendErrors(err)
s.push(float64(i))
}
case ctx.T_FLOAT() != nil:
i, err := strconv.ParseFloat(ctx.GetText(), 64)
s.appendErrors(err)
s.push(i)
case ctx.T_NULL() != nil:
s.push(nil)
case ctx.T_TRUE() != nil:
s.push(true)
case ctx.T_FALSE() != nil:
s.push(false)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitArray is called when production array is exited.
func (s *aqlInterpreter) ExitArray(ctx *parser.ArrayContext) {
array := []interface{}{}
for range ctx.AllExpression() {
// prepend element
array = append([]interface{}{s.pop()}, array...)
}
s.push(array)
}
// ExitObject is called when production object is exited.
func (s *aqlInterpreter) ExitObject(ctx *parser.ObjectContext) {
object := map[string]interface{}{}
for range ctx.AllObject_element() {
key, value := s.pop(), s.pop()
object[key.(string)] = value
}
s.push(object)
}
// ExitObject_element is called when production object_element is exited.
func (s *aqlInterpreter) ExitObject_element(ctx *parser.Object_elementContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.GetText())
s.push(s.getVar(ctx.GetText()))
case ctx.Object_element_name() != nil, ctx.T_ARRAY_OPEN() != nil:
key, value := s.pop(), s.pop()
s.push(key)
s.push(value)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
// ExitObject_element_name is called when production object_element_name is exited.
func (s *aqlInterpreter) ExitObject_element_name(ctx *parser.Object_element_nameContext) {
switch {
case ctx.T_STRING() != nil:
s.push(ctx.T_STRING().GetText())
case ctx.T_QUOTED_STRING() != nil:
st, err := unquote(ctx.T_QUOTED_STRING().GetText())
if err != nil {
s.appendErrors(fmt.Errorf("%w: %s", err, ctx.GetText()))
}
s.push(st)
default:
panic(fmt.Sprintf("unexpected value: %s", ctx.GetText()))
}
}
func (s *aqlInterpreter) getVar(identifier string) interface{} {
v, ok := s.values[identifier]
if !ok {
s.appendErrors(ErrUndefined)
}
return v
}
func maybeNot(ctx *parser.ExpressionContext, m bool) bool {
if ctx.T_NOT() != nil {
return !m
}
return m
}
func getOp(tokenType int) func(left, right interface{}) bool {
switch tokenType {
case parser.CAQLLexerT_EQ:
return eq
case parser.CAQLLexerT_NE:
return ne
case parser.CAQLLexerT_LT:
return lt
case parser.CAQLLexerT_GT:
return gt
case parser.CAQLLexerT_LE:
return le
case parser.CAQLLexerT_GE:
return ge
case parser.CAQLLexerT_IN:
return in
default:
panic("unkown token type")
}
}
func all(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
for _, e := range slice {
if !op(e, expr) {
return false
}
}
return true
}
func any(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
for _, e := range slice {
if op(e, expr) {
return true
}
}
return false
}
func none(slice []interface{}, op func(interface{}, interface{}) bool, expr interface{}) bool {
for _, e := range slice {
if op(e, expr) {
return false
}
}
return true
}

497
caql/operations.go Normal file
View File

@@ -0,0 +1,497 @@
package caql
import (
"math"
"regexp"
"sort"
"strconv"
"strings"
)
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
func or(left, right interface{}) interface{} {
if toBool(left) {
return left
}
return right
}
func and(left, right interface{}) interface{} {
if !toBool(left) {
return left
}
return right
}
func toBool(i interface{}) bool {
switch v := i.(type) {
case nil:
return false
case bool:
return v
case int:
return v != 0
case float64:
return v != 0
case string:
return v != ""
case []interface{}:
return true
case map[string]interface{}:
return true
default:
panic("bool conversion failed")
}
}
// Arithmetic operators https://www.arangodb.com/docs/3.7/aql/operators.html#arithmetic-operators
func plus(left, right interface{}) float64 {
return toNumber(left) + toNumber(right)
}
func minus(left, right interface{}) float64 {
return toNumber(left) - toNumber(right)
}
func times(left, right interface{}) float64 {
return round(toNumber(left) * toNumber(right))
}
func round(r float64) float64 {
return math.Round(r*100000) / 100000
}
func div(left, right interface{}) float64 {
b := toNumber(right)
if b == 0 {
return 0
}
return round(toNumber(left) / b)
}
func mod(left, right interface{}) float64 {
return math.Mod(toNumber(left), toNumber(right))
}
func toNumber(i interface{}) float64 {
switch v := i.(type) {
case nil:
return 0
case bool:
if v {
return 1
}
return 0
case float64:
switch {
case math.IsNaN(v):
return 0
case math.IsInf(v, 0):
return 0
}
return v
case string:
f, err := strconv.ParseFloat(strings.TrimSpace(v), 64)
if err != nil {
return 0
}
return f
case []interface{}:
if len(v) == 0 {
return 0
}
if len(v) == 1 {
return toNumber(v[0])
}
return 0
case map[string]interface{}:
return 0
default:
panic("number conversion error")
}
}
// Logical operators https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
// Order https://www.arangodb.com/docs/3.7/aql/fundamentals-type-value-order.html
func eq(left, right interface{}) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return false
}
switch l := left.(type) {
case nil:
return true
case bool, float64, string:
return left == right
case []interface{}:
ra := right.([]interface{})
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return false
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return false
}
}
return true
default:
panic("unknown type")
}
}
func ne(left, right interface{}) bool {
return !eq(left, right)
}
func lt(left, right interface{}) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV < rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) < toNumber(right)
case int:
return l < right.(int)
case float64:
return l < right.(float64)
case string:
return l < right.(string)
case []interface{}:
ra := right.([]interface{})
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return false
case map[string]interface{}:
ro := right.(map[string]interface{})
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func keys(l map[string]interface{}, ro map[string]interface{}) []string {
var keys []string
seen := map[string]bool{}
for _, a := range []map[string]interface{}{l, ro} {
for k := range a {
if _, ok := seen[k]; !ok {
seen[k] = true
keys = append(keys, k)
}
}
}
sort.Strings(keys)
return keys
}
func gt(left, right interface{}) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV > rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) > toNumber(right)
case int:
return l > right.(int)
case float64:
return l > right.(float64)
case string:
return l > right.(string)
case []interface{}:
ra := right.([]interface{})
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return false
case map[string]interface{}:
ro := right.(map[string]interface{})
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return false
default:
panic("unknown type")
}
}
func le(left, right interface{}) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV <= rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) <= toNumber(right)
case int:
return l <= right.(int)
case float64:
return l <= right.(float64)
case string:
return l <= right.(string)
case []interface{}:
ra := right.([]interface{})
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return le(li, rai)
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return lt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func ge(left, right interface{}) bool {
leftV, rightV := typeValue(left), typeValue(right)
if leftV != rightV {
return leftV >= rightV
}
switch l := left.(type) {
case nil:
return false
case bool:
return toNumber(l) >= toNumber(right)
case int:
return l >= right.(int)
case float64:
return l >= right.(float64)
case string:
return l >= right.(string)
case []interface{}:
ra := right.([]interface{})
max := len(l)
if len(ra) > max {
max = len(ra)
}
for i := 0; i < max; i++ {
var li interface{} = nil
var rai interface{} = nil
if len(l) > i {
li = l[i]
}
if len(ra) > i {
rai = ra[i]
}
if !eq(li, rai) {
return ge(li, rai)
}
}
return true
case map[string]interface{}:
ro := right.(map[string]interface{})
for _, key := range keys(l, ro) {
var li interface{} = nil
var rai interface{} = nil
if lv, ok := l[key]; ok {
li = lv
}
if rv, ok := ro[key]; ok {
rai = rv
}
if !eq(li, rai) {
return gt(li, rai)
}
}
return true
default:
panic("unknown type")
}
}
func in(left, right interface{}) bool {
a, ok := right.([]interface{})
if !ok {
return false
}
for _, v := range a {
if left == v {
return true
}
}
return false
}
func like(left, right interface{}) (bool, error) {
return match(right.(string), left.(string))
}
func regexMatch(left, right interface{}) (bool, error) {
return regexp.Match(right.(string), []byte(left.(string)))
}
func regexNonMatch(left, right interface{}) (bool, error) {
m, err := regexp.Match(right.(string), []byte(left.(string)))
return !m, err
}
func typeValue(v interface{}) int {
switch v.(type) {
case nil:
return 0
case bool:
return 1
case float64, int:
return 2
case string:
return 3
case []interface{}:
return 4
case map[string]interface{}:
return 5
default:
panic("unknown type")
}
}
// Ternary operator https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
func ternary(left, middle, right interface{}) interface{} {
if toBool(left) {
if middle != nil {
return middle
}
return left
}
return right
}
// Range operators https://www.arangodb.com/docs/3.7/aql/operators.html#range-operator
func aqlrange(left, right interface{}) []float64 {
var v []float64
for i := int(left.(float64)); i <= int(right.(float64)); i++ {
v = append(v, float64(i))
}
return v
}

120
caql/parser.go Normal file
View File

@@ -0,0 +1,120 @@
package caql
import (
"errors"
"fmt"
"strconv"
"github.com/antlr/antlr4/runtime/Go/antlr"
"github.com/SecurityBrewery/catalyst/generated/caql/parser"
)
type Parser struct {
Searcher Searcher
Prefix string
}
func (p *Parser) Parse(aql string) (t *Tree, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
// Setup the input
inputStream := antlr.NewInputStream(aql)
errorListener := &errorListener{}
// Create the Lexer
lexer := parser.NewCAQLLexer(inputStream)
lexer.RemoveErrorListeners()
lexer.AddErrorListener(errorListener)
stream := antlr.NewCommonTokenStream(lexer, antlr.TokenDefaultChannel)
// Create the Parser
aqlParser := parser.NewCAQLParser(stream)
aqlParser.RemoveErrorListeners()
aqlParser.AddErrorListener(errorListener)
aqlParser.SetErrorHandler(antlr.NewBailErrorStrategy())
if errorListener.errs != nil {
err = errorListener.errs[0]
}
return &Tree{aqlParser: aqlParser, parseContext: aqlParser.Parse(), searcher: p.Searcher, prefix: p.Prefix}, err
}
type Tree struct {
parseContext parser.IParseContext
aqlParser *parser.CAQLParser
searcher Searcher
prefix string
}
func (t *Tree) Eval(values map[string]interface{}) (i interface{}, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
interpreter := aqlInterpreter{values: values}
antlr.ParseTreeWalkerDefault.Walk(&interpreter, t.parseContext)
if interpreter.errs != nil {
return nil, interpreter.errs[0]
}
return interpreter.stack[0], nil
}
func (t *Tree) String() (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
builder := aqlBuilder{searcher: t.searcher, prefix: t.prefix}
antlr.ParseTreeWalkerDefault.Walk(&builder, t.parseContext)
return builder.stack[0], err
}
func (t *Tree) BleveString() (s string, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("%s", r)
}
}()
builder := bleveBuilder{}
antlr.ParseTreeWalkerDefault.Walk(&builder, t.parseContext)
if builder.err != nil {
return "", builder.err
}
return builder.stack[0], err
}
type errorListener struct {
*antlr.DefaultErrorListener
errs []error
}
func (el *errorListener) SyntaxError(recognizer antlr.Recognizer, offendingSymbol interface{}, line, column int, msg string, e antlr.RecognitionException) {
el.errs = append(el.errs, fmt.Errorf("line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg))
}
func (el *errorListener) ReportAmbiguity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, exact bool, ambigAlts *antlr.BitSet, configs antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportAmbiguity"))
}
func (el *errorListener) ReportAttemptingFullContext(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, conflictingAlts *antlr.BitSet, configs antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportAttemptingFullContext"))
}
func (el *errorListener) ReportContextSensitivity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex, prediction int, configs antlr.ATNConfigSet) {
el.errs = append(el.errs, errors.New("ReportContextSensitivity"))
}

352
caql/rql_test.go Normal file
View File

@@ -0,0 +1,352 @@
package caql
import (
"encoding/json"
"reflect"
"testing"
)
type MockSearcher struct{}
func (m MockSearcher) Search(_ string) (ids []string, err error) {
return []string{"1", "2", "3"}, nil
}
func TestParseSAQLEval(t *testing.T) {
tests := []struct {
name string
saql string
wantRebuild string
wantValue interface{}
wantParseErr bool
wantRebuildErr bool
wantEvalErr bool
values string
}{
// Custom
{name: "Compare 1", saql: "1 <= 2", wantRebuild: "1 <= 2", wantValue: true},
{name: "Compare 2", saql: "1 >= 2", wantRebuild: "1 >= 2", wantValue: false},
{name: "Compare 3", saql: "1 == 2", wantRebuild: "1 == 2", wantValue: false},
{name: "Compare 4", saql: "1 > 2", wantRebuild: "1 > 2", wantValue: false},
{name: "Compare 5", saql: "1 < 2", wantRebuild: "1 < 2", wantValue: true},
{name: "Compare 6", saql: "1 != 2", wantRebuild: "1 != 2", wantValue: true},
{name: "SymbolRef 1", saql: "name", wantRebuild: "name", wantValue: false, values: `{"name": false}`},
{name: "SymbolRef 2", saql: "d.name", wantRebuild: "d.name", wantValue: false, values: `{"d": {"name": false}}`},
{name: "SymbolRef 3", saql: "name == false", wantRebuild: "name == false", wantValue: true, values: `{"name": false}`},
{name: "SymbolRef Error 1", saql: "name, title", wantParseErr: true},
{name: "SymbolRef Error 2", saql: "unknown", wantRebuild: "unknown", wantValue: false, wantEvalErr: true, values: `{}`},
{name: "Misc 1", saql: `active == true && age < 39`, wantRebuild: `active == true AND age < 39`, wantValue: true, values: `{"active": true, "age": 2}`},
{name: "Misc 2", saql: `(attr == 10) AND foo == 'bar' OR NOT baz`, wantRebuild: `(attr == 10) AND foo == "bar" OR NOT baz`, wantValue: false, values: `{"attr": 2, "foo": "bar", "baz": true}`},
{name: "Misc 3", saql: `attr == 10 AND (foo == 'bar' OR foo == 'baz')`, wantRebuild: `attr == 10 AND (foo == "bar" OR foo == "baz")`, wantValue: false, values: `{"attr": 2, "foo": "bar", "baz": true}`},
{name: "Misc 4", saql: `5 > 1 AND "a" != "b"`, wantRebuild: `5 > 1 AND "a" != "b"`, wantValue: true},
{name: "LIKE 1", saql: `"foo" LIKE "%f%"`, wantRebuild: `"foo" LIKE "%f%"`, wantValue: true},
{name: "LIKE 2", saql: `"foo" NOT LIKE "%f%"`, wantRebuild: `"foo" NOT LIKE "%f%"`, wantValue: false},
{name: "LIKE 3", saql: `NOT "foo" LIKE "%f%"`, wantRebuild: `NOT "foo" LIKE "%f%"`, wantValue: false},
{name: "Summand 1", saql: "1 + 2", wantRebuild: "1 + 2", wantValue: 3},
{name: "Summand 2", saql: "1 - 2", wantRebuild: "1 - 2", wantValue: -1},
{name: "Factor 1", saql: "1 * 2", wantRebuild: "1 * 2", wantValue: 2},
{name: "Factor 2", saql: "1 / 2", wantRebuild: "1 / 2", wantValue: 0.5},
{name: "Factor 3", saql: "1.0 / 2.0", wantRebuild: "1.0 / 2.0", wantValue: 0.5},
{name: "Factor 4", saql: "1 % 2", wantRebuild: "1 % 2", wantValue: 1},
{name: "Term 1", saql: "(1 + 2) * 2", wantRebuild: "(1 + 2) * 2", wantValue: 6},
{name: "Term 2", saql: "2 * (1 + 2)", wantRebuild: "2 * (1 + 2)", wantValue: 6},
// https://www.arangodb.com/docs/3.7/aql/fundamentals-data-types.html
{name: "Null 1", saql: `null`, wantRebuild: "null"},
{name: "Bool 1", saql: `true`, wantRebuild: "true", wantValue: true},
{name: "Bool 2", saql: `false`, wantRebuild: "false", wantValue: false},
{name: "Numeric 1", saql: "1", wantRebuild: "1", wantValue: 1},
{name: "Numeric 2", saql: "+1", wantRebuild: "1", wantValue: 1},
{name: "Numeric 3", saql: "42", wantRebuild: "42", wantValue: 42},
{name: "Numeric 4", saql: "-1", wantRebuild: "-1", wantValue: -1},
{name: "Numeric 5", saql: "-42", wantRebuild: "-42", wantValue: -42},
{name: "Numeric 6", saql: "1.23", wantRebuild: "1.23", wantValue: 1.23},
{name: "Numeric 7", saql: "-99.99", wantRebuild: "-99.99", wantValue: -99.99},
{name: "Numeric 8", saql: "0.5", wantRebuild: "0.5", wantValue: 0.5},
{name: "Numeric 9", saql: ".5", wantRebuild: ".5", wantValue: 0.5},
{name: "Numeric 10", saql: "-4.87e103", wantRebuild: "-4.87e103", wantValue: -4.87e+103},
{name: "Numeric 11", saql: "0b10", wantRebuild: "0b10", wantValue: 2},
{name: "Numeric 12", saql: "0x10", wantRebuild: "0x10", wantValue: 16},
{name: "Numeric Error 1", saql: "1.", wantParseErr: true},
{name: "Numeric Error 2", saql: "01.23", wantParseErr: true},
{name: "Numeric Error 3", saql: "00.23", wantParseErr: true},
{name: "Numeric Error 4", saql: "00", wantParseErr: true},
// {name: "String 1", saql: `"yikes!"`, wantRebuild: `"yikes!"`, wantValue: "yikes!"},
// {name: "String 2", saql: `"don't know"`, wantRebuild: `"don't know"`, wantValue: "don't know"},
// {name: "String 3", saql: `"this is a \"quoted\" word"`, wantRebuild: `"this is a \"quoted\" word"`, wantValue: "this is a \"quoted\" word"},
// {name: "String 4", saql: `"this is a longer string."`, wantRebuild: `"this is a longer string."`, wantValue: "this is a longer string."},
// {name: "String 5", saql: `"the path separator on Windows is \\"`, wantRebuild: `"the path separator on Windows is \\"`, wantValue: "the path separator on Windows is \\"},
// {name: "String 6", saql: `'yikes!'`, wantRebuild: `"yikes!"`, wantValue: "yikes!"},
// {name: "String 7", saql: `'don\'t know'`, wantRebuild: `"don't know"`, wantValue: "don't know"},
// {name: "String 8", saql: `'this is a "quoted" word'`, wantRebuild: `"this is a \"quoted\" word"`, wantValue: "this is a \"quoted\" word"},
// {name: "String 9", saql: `'this is a longer string.'`, wantRebuild: `"this is a longer string."`, wantValue: "this is a longer string."},
// {name: "String 10", saql: `'the path separator on Windows is \\'`, wantRebuild: `"the path separator on Windows is \\"`, wantValue: `the path separator on Windows is \`},
{name: "Array 1", saql: "[]", wantRebuild: "[]", wantValue: []interface{}{}},
{name: "Array 2", saql: `[true]`, wantRebuild: `[true]`, wantValue: []interface{}{true}},
{name: "Array 3", saql: `[1, 2, 3]`, wantRebuild: `[1, 2, 3]`, wantValue: []interface{}{float64(1), float64(2), float64(3)}},
{
name: "Array 4", saql: `[-99, "yikes!", [false, ["no"], []], 1]`, wantRebuild: `[-99, "yikes!", [false, ["no"], []], 1]`,
wantValue: []interface{}{-99.0, "yikes!", []interface{}{false, []interface{}{"no"}, []interface{}{}}, float64(1)},
},
{name: "Array 5", saql: `[["fox", "marshal"]]`, wantRebuild: `[["fox", "marshal"]]`, wantValue: []interface{}{[]interface{}{"fox", "marshal"}}},
{name: "Array 6", saql: `[1, 2, 3,]`, wantRebuild: `[1, 2, 3]`, wantValue: []interface{}{float64(1), float64(2), float64(3)}},
{name: "Array Error 1", saql: "(1,2,3)", wantParseErr: true},
{name: "Array Access 1", saql: "u.friends[0]", wantRebuild: "u.friends[0]", wantValue: 7, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 2", saql: "u.friends[2]", wantRebuild: "u.friends[2]", wantValue: 9, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 3", saql: "u.friends[-1]", wantRebuild: "u.friends[-1]", wantValue: 9, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Array Access 4", saql: "u.friends[-2]", wantRebuild: "u.friends[-2]", wantValue: 8, values: `{"u": {"friends": [7,8,9]}}`},
{name: "Object 1", saql: "{}", wantRebuild: "{}", wantValue: map[string]interface{}{}},
{name: "Object 2", saql: `{a: 1}`, wantRebuild: "{a: 1}", wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 3", saql: `{'a': 1}`, wantRebuild: `{'a': 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 4", saql: `{"a": 1}`, wantRebuild: `{"a": 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 5", saql: `{'return': 1}`, wantRebuild: `{'return': 1}`, wantValue: map[string]interface{}{"return": float64(1)}},
{name: "Object 6", saql: `{"return": 1}`, wantRebuild: `{"return": 1}`, wantValue: map[string]interface{}{"return": float64(1)}},
{name: "Object 9", saql: `{a: 1,}`, wantRebuild: "{a: 1}", wantValue: map[string]interface{}{"a": float64(1)}},
{name: "Object 10", saql: `{"a": 1,}`, wantRebuild: `{"a": 1}`, wantValue: map[string]interface{}{"a": float64(1)}},
// {"Object 8", "{`return`: 1}", `{"return": 1}`, true},
// {"Object 7", "{´return´: 1}", `{"return": 1}`, true},
{name: "Object Error 1: return is a keyword", saql: `{like: 1}`, wantParseErr: true},
{name: "Object Access 1", saql: "u.address.city.name", wantRebuild: "u.address.city.name", wantValue: "Munich", values: `{"u": {"address": {"city": {"name": "Munich"}}}}`},
{name: "Object Access 2", saql: "u.friends[0].name.first", wantRebuild: "u.friends[0].name.first", wantValue: "Kevin", values: `{"u": {"friends": [{"name": {"first": "Kevin"}}]}}`},
{name: "Object Access 3", saql: `u["address"]["city"]["name"]`, wantRebuild: `u["address"]["city"]["name"]`, wantValue: "Munich", values: `{"u": {"address": {"city": {"name": "Munich"}}}}`},
{name: "Object Access 4", saql: `u["friends"][0]["name"]["first"]`, wantRebuild: `u["friends"][0]["name"]["first"]`, wantValue: "Kevin", values: `{"u": {"friends": [{"name": {"first": "Kevin"}}]}}`},
{name: "Object Access 5", saql: "u._key", wantRebuild: "u._key", wantValue: false, values: `{"u": {"_key": false}}`},
// This query language does not support binds
// https://www.arangodb.com/docs/3.7/aql/fundamentals-bind-parameters.html
// {name: "Bind 1", saql: "u.id == @id && u.name == @name", wantRebuild: `u.id == @id AND u.name == @name`, wantValue: true},
// {name: "Bind 2", saql: "u.id == CONCAT('prefix', @id, 'suffix') && u.name == @name", wantRebuild: `u.id == CONCAT('prefix', @id, 'suffix') AND u.name == @name`, wantValue: false},
// {name: "Bind 3", saql: "doc.@attr.@subattr", wantRebuild: `doc.@attr.@subattr`, wantValue: true, values: `{"doc": {"@attr": {"@subattr": true}}}`},
// {name: "Bind 4", saql: "doc[@attr][@subattr]", wantRebuild: `doc[@attr][@subattr]`, wantValue: true, values: `{"doc": {"@attr": {"@subattr": true}}}`},
// https://www.arangodb.com/docs/3.7/aql/fundamentals-type-value-order.html
{name: "Compare 7", saql: `null < false`, wantRebuild: `null < false`, wantValue: true},
{name: "Compare 8", saql: `null < true`, wantRebuild: `null < true`, wantValue: true},
{name: "Compare 9", saql: `null < 1`, wantRebuild: `null < 1`, wantValue: true},
{name: "Compare 10", saql: `null < ''`, wantRebuild: `null < ""`, wantValue: true},
{name: "Compare 11", saql: `null < ' '`, wantRebuild: `null < " "`, wantValue: true},
{name: "Compare 12", saql: `null < '3'`, wantRebuild: `null < "3"`, wantValue: true},
{name: "Compare 13", saql: `null < 'abc'`, wantRebuild: `null < "abc"`, wantValue: true},
{name: "Compare 14", saql: `null < []`, wantRebuild: `null < []`, wantValue: true},
{name: "Compare 15", saql: `null < {}`, wantRebuild: `null < {}`, wantValue: true},
{name: "Compare 16", saql: `false < true`, wantRebuild: `false < true`, wantValue: true},
{name: "Compare 17", saql: `false < 5`, wantRebuild: `false < 5`, wantValue: true},
{name: "Compare 18", saql: `false < ''`, wantRebuild: `false < ""`, wantValue: true},
{name: "Compare 19", saql: `false < ' '`, wantRebuild: `false < " "`, wantValue: true},
{name: "Compare 20", saql: `false < '7'`, wantRebuild: `false < "7"`, wantValue: true},
{name: "Compare 21", saql: `false < 'abc'`, wantRebuild: `false < "abc"`, wantValue: true},
{name: "Compare 22", saql: `false < []`, wantRebuild: `false < []`, wantValue: true},
{name: "Compare 23", saql: `false < {}`, wantRebuild: `false < {}`, wantValue: true},
{name: "Compare 24", saql: `true < 9`, wantRebuild: `true < 9`, wantValue: true},
{name: "Compare 25", saql: `true < ''`, wantRebuild: `true < ""`, wantValue: true},
{name: "Compare 26", saql: `true < ' '`, wantRebuild: `true < " "`, wantValue: true},
{name: "Compare 27", saql: `true < '11'`, wantRebuild: `true < "11"`, wantValue: true},
{name: "Compare 28", saql: `true < 'abc'`, wantRebuild: `true < "abc"`, wantValue: true},
{name: "Compare 29", saql: `true < []`, wantRebuild: `true < []`, wantValue: true},
{name: "Compare 30", saql: `true < {}`, wantRebuild: `true < {}`, wantValue: true},
{name: "Compare 31", saql: `13 < ''`, wantRebuild: `13 < ""`, wantValue: true},
{name: "Compare 32", saql: `15 < ' '`, wantRebuild: `15 < " "`, wantValue: true},
{name: "Compare 33", saql: `17 < '18'`, wantRebuild: `17 < "18"`, wantValue: true},
{name: "Compare 34", saql: `21 < 'abc'`, wantRebuild: `21 < "abc"`, wantValue: true},
{name: "Compare 35", saql: `23 < []`, wantRebuild: `23 < []`, wantValue: true},
{name: "Compare 36", saql: `25 < {}`, wantRebuild: `25 < {}`, wantValue: true},
{name: "Compare 37", saql: `'' < ' '`, wantRebuild: `"" < " "`, wantValue: true},
{name: "Compare 38", saql: `'' < '27'`, wantRebuild: `"" < "27"`, wantValue: true},
{name: "Compare 39", saql: `'' < 'abc'`, wantRebuild: `"" < "abc"`, wantValue: true},
{name: "Compare 40", saql: `'' < []`, wantRebuild: `"" < []`, wantValue: true},
{name: "Compare 41", saql: `'' < {}`, wantRebuild: `"" < {}`, wantValue: true},
{name: "Compare 42", saql: `[] < {}`, wantRebuild: `[] < {}`, wantValue: true},
{name: "Compare 43", saql: `[] < [29]`, wantRebuild: `[] < [29]`, wantValue: true},
{name: "Compare 44", saql: `[1] < [2]`, wantRebuild: `[1] < [2]`, wantValue: true},
{name: "Compare 45", saql: `[1, 2] < [2]`, wantRebuild: `[1, 2] < [2]`, wantValue: true},
{name: "Compare 46", saql: `[99, 99] < [100]`, wantRebuild: `[99, 99] < [100]`, wantValue: true},
{name: "Compare 47", saql: `[false] < [true]`, wantRebuild: `[false] < [true]`, wantValue: true},
{name: "Compare 48", saql: `[false, 1] < [false, '']`, wantRebuild: `[false, 1] < [false, ""]`, wantValue: true},
{name: "Compare 49", saql: `{} < {"a": 1}`, wantRebuild: `{} < {"a": 1}`, wantValue: true},
{name: "Compare 50", saql: `{} == {"a": null}`, wantRebuild: `{} == {"a": null}`, wantValue: true},
{name: "Compare 51", saql: `{"a": 1} < {"a": 2}`, wantRebuild: `{"a": 1} < {"a": 2}`, wantValue: true},
{name: "Compare 52", saql: `{"b": 1} < {"a": 0}`, wantRebuild: `{"b": 1} < {"a": 0}`, wantValue: true},
{name: "Compare 53", saql: `{"a": {"c": true}} < {"a": {"c": 0}}`, wantRebuild: `{"a": {"c": true}} < {"a": {"c": 0}}`, wantValue: true},
{name: "Compare 54", saql: `{"a": {"c": true, "a": 0}} < {"a": {"c": false, "a": 1}}`, wantRebuild: `{"a": {"c": true, "a": 0}} < {"a": {"c": false, "a": 1}}`, wantValue: true},
{name: "Compare 55", saql: `{"a": 1, "b": 2} == {"b": 2, "a": 1}`, wantRebuild: `{"a": 1, "b": 2} == {"b": 2, "a": 1}`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html
{name: "Compare 56", saql: `0 == null`, wantRebuild: `0 == null`, wantValue: false},
{name: "Compare 57", saql: `1 > 0`, wantRebuild: `1 > 0`, wantValue: true},
{name: "Compare 58", saql: `true != null`, wantRebuild: `true != null`, wantValue: true},
{name: "Compare 59", saql: `45 <= "yikes!"`, wantRebuild: `45 <= "yikes!"`, wantValue: true},
{name: "Compare 60", saql: `65 != "65"`, wantRebuild: `65 != "65"`, wantValue: true},
{name: "Compare 61", saql: `65 == 65`, wantRebuild: `65 == 65`, wantValue: true},
{name: "Compare 62", saql: `1.23 > 1.32`, wantRebuild: `1.23 > 1.32`, wantValue: false},
{name: "Compare 63", saql: `1.5 IN [2, 3, 1.5]`, wantRebuild: `1.5 IN [2, 3, 1.5]`, wantValue: true},
{name: "Compare 64", saql: `"foo" IN null`, wantRebuild: `"foo" IN null`, wantValue: false},
{name: "Compare 65", saql: `42 NOT IN [17, 40, 50]`, wantRebuild: `42 NOT IN [17, 40, 50]`, wantValue: true},
{name: "Compare 66", saql: `"abc" == "abc"`, wantRebuild: `"abc" == "abc"`, wantValue: true},
{name: "Compare 67", saql: `"abc" == "ABC"`, wantRebuild: `"abc" == "ABC"`, wantValue: false},
{name: "Compare 68", saql: `"foo" LIKE "f%"`, wantRebuild: `"foo" LIKE "f%"`, wantValue: true},
{name: "Compare 69", saql: `"foo" NOT LIKE "f%"`, wantRebuild: `"foo" NOT LIKE "f%"`, wantValue: false},
{name: "Compare 70", saql: `"foo" =~ "^f[o].$"`, wantRebuild: `"foo" =~ "^f[o].$"`, wantValue: true},
{name: "Compare 71", saql: `"foo" !~ "[a-z]+bar$"`, wantRebuild: `"foo" !~ "[a-z]+bar$"`, wantValue: true},
{name: "Compare 72", saql: `"abc" LIKE "a%"`, wantRebuild: `"abc" LIKE "a%"`, wantValue: true},
{name: "Compare 73", saql: `"abc" LIKE "_bc"`, wantRebuild: `"abc" LIKE "_bc"`, wantValue: true},
{name: "Compare 74", saql: `"a_b_foo" LIKE "a\\_b\\_foo"`, wantRebuild: `"a_b_foo" LIKE "a\\_b\\_foo"`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#array-comparison-operators
{name: "Compare Array 1", saql: `[1, 2, 3] ALL IN [2, 3, 4]`, wantRebuild: `[1, 2, 3] ALL IN [2, 3, 4]`, wantValue: false},
{name: "Compare Array 2", saql: `[1, 2, 3] ALL IN [1, 2, 3]`, wantRebuild: `[1, 2, 3] ALL IN [1, 2, 3]`, wantValue: true},
{name: "Compare Array 3", saql: `[1, 2, 3] NONE IN [3]`, wantRebuild: `[1, 2, 3] NONE IN [3]`, wantValue: false},
{name: "Compare Array 4", saql: `[1, 2, 3] NONE IN [23, 42]`, wantRebuild: `[1, 2, 3] NONE IN [23, 42]`, wantValue: true},
{name: "Compare Array 5", saql: `[1, 2, 3] ANY IN [4, 5, 6]`, wantRebuild: `[1, 2, 3] ANY IN [4, 5, 6]`, wantValue: false},
{name: "Compare Array 6", saql: `[1, 2, 3] ANY IN [1, 42]`, wantRebuild: `[1, 2, 3] ANY IN [1, 42]`, wantValue: true},
{name: "Compare Array 7", saql: `[1, 2, 3] ANY == 2`, wantRebuild: `[1, 2, 3] ANY == 2`, wantValue: true},
{name: "Compare Array 8", saql: `[1, 2, 3] ANY == 4`, wantRebuild: `[1, 2, 3] ANY == 4`, wantValue: false},
{name: "Compare Array 9", saql: `[1, 2, 3] ANY > 0`, wantRebuild: `[1, 2, 3] ANY > 0`, wantValue: true},
{name: "Compare Array 10", saql: `[1, 2, 3] ANY <= 1`, wantRebuild: `[1, 2, 3] ANY <= 1`, wantValue: true},
{name: "Compare Array 11", saql: `[1, 2, 3] NONE < 99`, wantRebuild: `[1, 2, 3] NONE < 99`, wantValue: false},
{name: "Compare Array 12", saql: `[1, 2, 3] NONE > 10`, wantRebuild: `[1, 2, 3] NONE > 10`, wantValue: true},
{name: "Compare Array 13", saql: `[1, 2, 3] ALL > 2`, wantRebuild: `[1, 2, 3] ALL > 2`, wantValue: false},
{name: "Compare Array 14", saql: `[1, 2, 3] ALL > 0`, wantRebuild: `[1, 2, 3] ALL > 0`, wantValue: true},
{name: "Compare Array 15", saql: `[1, 2, 3] ALL >= 3`, wantRebuild: `[1, 2, 3] ALL >= 3`, wantValue: false},
{name: "Compare Array 16", saql: `["foo", "bar"] ALL != "moo"`, wantRebuild: `["foo", "bar"] ALL != "moo"`, wantValue: true},
{name: "Compare Array 17", saql: `["foo", "bar"] NONE == "bar"`, wantRebuild: `["foo", "bar"] NONE == "bar"`, wantValue: false},
{name: "Compare Array 18", saql: `["foo", "bar"] ANY == "foo"`, wantRebuild: `["foo", "bar"] ANY == "foo"`, wantValue: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#logical-operators
{name: "Logical 1", saql: "active == true OR age < 39", wantRebuild: "active == true OR age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 2", saql: "active == true || age < 39", wantRebuild: "active == true OR age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 3", saql: "active == true AND age < 39", wantRebuild: "active == true AND age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 4", saql: "active == true && age < 39", wantRebuild: "active == true AND age < 39", wantValue: true, values: `{"active": true, "age": 4}`},
{name: "Logical 5", saql: "!active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 6", saql: "NOT active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 7", saql: "not active", wantRebuild: "NOT active", wantValue: false, values: `{"active": true}`},
{name: "Logical 8", saql: "NOT NOT active", wantRebuild: "NOT NOT active", wantValue: true, values: `{"active": true}`},
{name: "Logical 9", saql: `u.age > 15 && u.address.city != ""`, wantRebuild: `u.age > 15 AND u.address.city != ""`, wantValue: false, values: `{"u": {"age": 2, "address": {"city": "Munich"}}}`},
{name: "Logical 10", saql: `true || false`, wantRebuild: `true OR false`, wantValue: true},
{name: "Logical 11", saql: `NOT u.isInvalid`, wantRebuild: `NOT u.isInvalid`, wantValue: false, values: `{"u": {"isInvalid": true}}`},
{name: "Logical 12", saql: `1 || ! 0`, wantRebuild: `1 OR NOT 0`, wantValue: 1},
{name: "Logical 13", saql: `25 > 1 && 42 != 7`, wantRebuild: `25 > 1 AND 42 != 7`, wantValue: true},
{name: "Logical 14", saql: `22 IN [23, 42] || 23 NOT IN [22, 7]`, wantRebuild: `22 IN [23, 42] OR 23 NOT IN [22, 7]`, wantValue: true},
{name: "Logical 15", saql: `25 != 25`, wantRebuild: `25 != 25`, wantValue: false},
{name: "Logical 16", saql: `1 || 7`, wantRebuild: `1 OR 7`, wantValue: 1},
// {name: "Logical 17", saql: `null || "foo"`, wantRebuild: `null OR "foo"`, wantValue: "foo"},
{name: "Logical 17", saql: `null || "foo"`, wantRebuild: `null OR d._key IN ["1","2","3"]`, wantValue: "foo", values: `{"d": {"_key": "1"}}`}, // eval != rebuild
{name: "Logical 18", saql: `null && true`, wantRebuild: `null AND true`, wantValue: nil},
{name: "Logical 19", saql: `true && 23`, wantRebuild: `true AND 23`, wantValue: 23},
{name: "Logical 20", saql: "true == (6 < 8)", wantRebuild: "true == (6 < 8)", wantValue: true},
{name: "Logical 21", saql: "true == 6 < 8", wantRebuild: "true == 6 < 8", wantValue: true}, // does not work in go
// https://www.arangodb.com/docs/3.7/aql/operators.html#arithmetic-operators
{name: "Arithmetic 1", saql: `1 + 1`, wantRebuild: `1 + 1`, wantValue: 2},
{name: "Arithmetic 2", saql: `33 - 99`, wantRebuild: `33 - 99`, wantValue: -66},
{name: "Arithmetic 3", saql: `12.4 * 4.5`, wantRebuild: `12.4 * 4.5`, wantValue: 55.8},
{name: "Arithmetic 4", saql: `13.0 / 0.1`, wantRebuild: `13.0 / 0.1`, wantValue: 130.0},
{name: "Arithmetic 5", saql: `23 % 7`, wantRebuild: `23 % 7`, wantValue: 2},
{name: "Arithmetic 6", saql: `-15`, wantRebuild: `-15`, wantValue: -15},
{name: "Arithmetic 7", saql: `+9.99`, wantRebuild: `9.99`, wantValue: 9.99},
{name: "Arithmetic 8", saql: `1 + "a"`, wantRebuild: `1 + "a"`, wantValue: 1},
{name: "Arithmetic 9", saql: `1 + "99"`, wantRebuild: `1 + "99"`, wantValue: 100},
{name: "Arithmetic 10", saql: `1 + null`, wantRebuild: `1 + null`, wantValue: 1},
{name: "Arithmetic 11", saql: `null + 1`, wantRebuild: `null + 1`, wantValue: 1},
{name: "Arithmetic 12", saql: `3 + []`, wantRebuild: `3 + []`, wantValue: 3},
{name: "Arithmetic 13", saql: `24 + [2]`, wantRebuild: `24 + [2]`, wantValue: 26},
{name: "Arithmetic 14", saql: `24 + [2, 4]`, wantRebuild: `24 + [2, 4]`, wantValue: 24},
{name: "Arithmetic 15", saql: `25 - null`, wantRebuild: `25 - null`, wantValue: 25},
{name: "Arithmetic 16", saql: `17 - true`, wantRebuild: `17 - true`, wantValue: 16},
{name: "Arithmetic 17", saql: `23 * {}`, wantRebuild: `23 * {}`, wantValue: 0},
{name: "Arithmetic 18", saql: `5 * [7]`, wantRebuild: `5 * [7]`, wantValue: 35},
{name: "Arithmetic 19", saql: `24 / "12"`, wantRebuild: `24 / "12"`, wantValue: 2},
{name: "Arithmetic Error 1: Divison by zero", saql: `1 / 0`, wantRebuild: `1 / 0`, wantValue: 0},
// https://www.arangodb.com/docs/3.7/aql/operators.html#ternary-operator
{name: "Ternary 1", saql: `u.age > 15 || u.active == true ? u.userId : null`, wantRebuild: `u.age > 15 OR u.active == true ? u.userId : null`, wantValue: 45, values: `{"u": {"active": true, "age": 2, "userId": 45}}`},
{name: "Ternary 2", saql: `u.value ? : 'value is null, 0 or not present'`, wantRebuild: `u.value ? : "value is null, 0 or not present"`, wantValue: "value is null, 0 or not present", values: `{"u": {"value": 0}}`},
// https://www.arangodb.com/docs/3.7/aql/operators.html#range-operator
{name: "Range 1", saql: `2010..2013`, wantRebuild: `2010..2013`, wantValue: []float64{2010, 2011, 2012, 2013}},
// {"Array operators 1", `u.friends[*].name`, `u.friends[*].name`, false},
// Security
{name: "Security 1", saql: `doc.value == 1 || true REMOVE doc IN collection //`, wantParseErr: true},
{name: "Security 2", saql: `doc.value == 1 || true INSERT {foo: "bar"} IN collection //`, wantParseErr: true},
// https://www.arangodb.com/docs/3.7/aql/operators.html#operator-precedence
{name: "Precendence", saql: `2 > 15 && "a" != ""`, wantRebuild: `2 > 15 AND "a" != ""`, wantValue: false},
}
for _, tt := range tests {
parser := &Parser{
Searcher: &MockSearcher{},
}
t.Run(tt.name, func(t *testing.T) {
expr, err := parser.Parse(tt.saql)
if (err != nil) != tt.wantParseErr {
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
if expr != nil {
t.Error(expr.String())
}
return
}
if err != nil {
return
}
got, err := expr.String()
if (err != nil) != tt.wantRebuildErr {
t.Error(expr.String())
t.Errorf("String() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
if got != tt.wantRebuild {
t.Errorf("String() got = %v, want %v", got, tt.wantRebuild)
}
var myJson map[string]interface{}
if tt.values != "" {
err = json.Unmarshal([]byte(tt.values), &myJson)
if err != nil {
t.Fatal(err)
}
}
value, err := expr.Eval(myJson)
if (err != nil) != tt.wantEvalErr {
t.Error(expr.String())
t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantParseErr)
return
}
if err != nil {
return
}
wantValue := tt.wantValue
if i, ok := wantValue.(int); ok {
wantValue = float64(i)
}
if !reflect.DeepEqual(value, wantValue) {
t.Error(expr.String())
t.Errorf("Eval() got = %T %#v, want %T %#v", value, value, wantValue, wantValue)
}
})
}
}

154
caql/set.go Normal file
View File

@@ -0,0 +1,154 @@
// Adapted from https://github.com/badgerodon/collections under the MIT License
// Original License:
//
// Copyright (c) 2012 Caleb Doxsey
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package caql
import "sort"
type (
Set struct {
hash map[interface{}]nothing
}
nothing struct{}
)
// Create a new set
func New(initial ...interface{}) *Set {
s := &Set{make(map[interface{}]nothing)}
for _, v := range initial {
s.Insert(v)
}
return s
}
// Find the difference between two sets
func (s *Set) Difference(set *Set) *Set {
n := make(map[interface{}]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; !exists {
n[k] = nothing{}
}
}
return &Set{n}
}
// Call f for each item in the set
func (s *Set) Do(f func(interface{})) {
for k := range s.hash {
f(k)
}
}
// Test to see whether or not the element is in the set
func (s *Set) Has(element interface{}) bool {
_, exists := s.hash[element]
return exists
}
// Add an element to the set
func (s *Set) Insert(element interface{}) {
s.hash[element] = nothing{}
}
// Find the intersection of two sets
func (s *Set) Intersection(set *Set) *Set {
n := make(map[interface{}]nothing)
for k := range s.hash {
if _, exists := set.hash[k]; exists {
n[k] = nothing{}
}
}
return &Set{n}
}
// Return the number of items in the set
func (s *Set) Len() int {
return len(s.hash)
}
// Test whether or not this set is a proper subset of "set"
func (s *Set) ProperSubsetOf(set *Set) bool {
return s.SubsetOf(set) && s.Len() < set.Len()
}
// Remove an element from the set
func (s *Set) Remove(element interface{}) {
delete(s.hash, element)
}
func (s *Set) Minus(set *Set) *Set {
n := make(map[interface{}]nothing)
for k := range s.hash {
n[k] = nothing{}
}
for _, v := range set.Values() {
delete(n, v)
}
return &Set{n}
}
// Test whether or not this set is a subset of "set"
func (s *Set) SubsetOf(set *Set) bool {
if s.Len() > set.Len() {
return false
}
for k := range s.hash {
if _, exists := set.hash[k]; !exists {
return false
}
}
return true
}
// Find the union of two sets
func (s *Set) Union(set *Set) *Set {
n := make(map[interface{}]nothing)
for k := range s.hash {
n[k] = nothing{}
}
for k := range set.hash {
n[k] = nothing{}
}
return &Set{n}
}
func (s *Set) Values() []interface{} {
values := []interface{}{}
for k := range s.hash {
values = append(values, k)
}
sort.Slice(values, func(i, j int) bool { return lt(values[i], values[j]) })
return values
}

96
caql/set_test.go Normal file
View File

@@ -0,0 +1,96 @@
// Adapted from https://github.com/badgerodon/collections under the MIT License
// Original License:
//
// Copyright (c) 2012 Caleb Doxsey
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package caql
import (
"testing"
)
func Test(t *testing.T) {
s := New()
s.Insert(5)
if s.Len() != 1 {
t.Errorf("Length should be 1")
}
if !s.Has(5) {
t.Errorf("Membership test failed")
}
s.Remove(5)
if s.Len() != 0 {
t.Errorf("Length should be 0")
}
if s.Has(5) {
t.Errorf("The set should be empty")
}
// Difference
s1 := New(1, 2, 3, 4, 5, 6)
s2 := New(4, 5, 6)
s3 := s1.Difference(s2)
if s3.Len() != 3 {
t.Errorf("Length should be 3")
}
if !(s3.Has(1) && s3.Has(2) && s3.Has(3)) {
t.Errorf("Set should only contain 1, 2, 3")
}
// Intersection
s3 = s1.Intersection(s2)
if s3.Len() != 3 {
t.Errorf("Length should be 3 after intersection")
}
if !(s3.Has(4) && s3.Has(5) && s3.Has(6)) {
t.Errorf("Set should contain 4, 5, 6")
}
// Union
s4 := New(7, 8, 9)
s3 = s2.Union(s4)
if s3.Len() != 6 {
t.Errorf("Length should be 6 after union")
}
if !(s3.Has(7)) {
t.Errorf("Set should contain 4, 5, 6, 7, 8, 9")
}
// Subset
if !s1.SubsetOf(s1) {
t.Errorf("set should be a subset of itself")
}
// Proper Subset
if s1.ProperSubsetOf(s1) {
t.Errorf("set should not be a subset of itself")
}
}

79
caql/unquote.go Normal file
View File

@@ -0,0 +1,79 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"strconv"
"strings"
"unicode/utf8"
)
// unquote interprets s as a single-quoted, double-quoted,
// or backquoted string literal, returning the string value
// that s quotes.
func unquote(s string) (string, error) {
n := len(s)
if n < 2 {
return "", strconv.ErrSyntax
}
quote := s[0]
if quote != s[n-1] {
return "", strconv.ErrSyntax
}
s = s[1 : n-1]
if quote == '`' {
if strings.ContainsRune(s, '`') {
return "", strconv.ErrSyntax
}
if strings.ContainsRune(s, '\r') {
// -1 because we know there is at least one \r to remove.
buf := make([]byte, 0, len(s)-1)
for i := 0; i < len(s); i++ {
if s[i] != '\r' {
buf = append(buf, s[i])
}
}
return string(buf), nil
}
return s, nil
}
if quote != '"' && quote != '\'' {
return "", strconv.ErrSyntax
}
if strings.ContainsRune(s, '\n') {
return "", strconv.ErrSyntax
}
// Is it trivial? Avoid allocation.
if !strings.ContainsRune(s, '\\') && !strings.ContainsRune(s, rune(quote)) {
switch quote {
case '"', '\'':
if utf8.ValidString(s) {
return s, nil
}
}
}
var runeTmp [utf8.UTFMax]byte
buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations.
for len(s) > 0 {
c, multibyte, ss, err := strconv.UnquoteChar(s, quote)
if err != nil {
return "", err
}
s = ss
if c < utf8.RuneSelf || !multibyte {
buf = append(buf, byte(c))
} else {
n := utf8.EncodeRune(runeTmp[:], c)
buf = append(buf, runeTmp[:n]...)
}
}
return string(buf), nil
}

125
caql/unquote_test.go Normal file
View File

@@ -0,0 +1,125 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"strconv"
"testing"
)
type quoteTest struct {
in string
out string
ascii string
graphic string
}
var quotetests = []quoteTest{
{in: "\a\b\f\r\n\t\v", out: `"\a\b\f\r\n\t\v"`, ascii: `"\a\b\f\r\n\t\v"`, graphic: `"\a\b\f\r\n\t\v"`},
{"\\", `"\\"`, `"\\"`, `"\\"`},
{"abc\xffdef", `"abc\xffdef"`, `"abc\xffdef"`, `"abc\xffdef"`},
{"\u263a", `"☺"`, `"\u263a"`, `"☺"`},
{"\U0010ffff", `"\U0010ffff"`, `"\U0010ffff"`, `"\U0010ffff"`},
{"\x04", `"\x04"`, `"\x04"`, `"\x04"`},
// Some non-printable but graphic runes. Final column is double-quoted.
{"!\u00a0!\u2000!\u3000!", `"!\u00a0!\u2000!\u3000!"`, `"!\u00a0!\u2000!\u3000!"`, "\"!\u00a0!\u2000!\u3000!\""},
}
type unQuoteTest struct {
in string
out string
}
var unquotetests = []unQuoteTest{
{`""`, ""},
{`"a"`, "a"},
{`"abc"`, "abc"},
{`"☺"`, "☺"},
{`"hello world"`, "hello world"},
{`"\xFF"`, "\xFF"},
{`"\377"`, "\377"},
{`"\u1234"`, "\u1234"},
{`"\U00010111"`, "\U00010111"},
{`"\U0001011111"`, "\U0001011111"},
{`"\a\b\f\n\r\t\v\\\""`, "\a\b\f\n\r\t\v\\\""},
{`"'"`, "'"},
{`'a'`, "a"},
{`'☹'`, "☹"},
{`'\a'`, "\a"},
{`'\x10'`, "\x10"},
{`'\377'`, "\377"},
{`'\u1234'`, "\u1234"},
{`'\U00010111'`, "\U00010111"},
{`'\t'`, "\t"},
{`' '`, " "},
{`'\''`, "'"},
{`'"'`, "\""},
{"``", ``},
{"`a`", `a`},
{"`abc`", `abc`},
{"`☺`", ``},
{"`hello world`", `hello world`},
{"`\\xFF`", `\xFF`},
{"`\\377`", `\377`},
{"`\\`", `\`},
{"`\n`", "\n"},
{"` `", ` `},
{"` `", ` `},
{"`a\rb`", "ab"},
}
var misquoted = []string{
``,
`"`,
`"a`,
`"'`,
`b"`,
`"\"`,
`"\9"`,
`"\19"`,
`"\129"`,
`'\'`,
`'\9'`,
`'\19'`,
`'\129'`,
// `'ab'`,
`"\x1!"`,
`"\U12345678"`,
`"\z"`,
"`",
"`xxx",
"`\"",
`"\'"`,
`'\"'`,
"\"\n\"",
"\"\\n\n\"",
"'\n'",
}
func TestUnquote(t *testing.T) {
for _, tt := range unquotetests {
if out, err := unquote(tt.in); err != nil || out != tt.out {
t.Errorf("unquote(%#q) = %q, %v want %q, nil", tt.in, out, err, tt.out)
}
}
// run the quote tests too, backward
for _, tt := range quotetests {
if in, err := unquote(tt.out); in != tt.in {
t.Errorf("unquote(%#q) = %q, %v, want %q, nil", tt.out, in, err, tt.in)
}
}
for _, s := range misquoted {
if out, err := unquote(s); out != "" || err != strconv.ErrSyntax {
t.Errorf("unquote(%#q) = %q, %v want %q, %v", s, out, err, "", strconv.ErrSyntax)
}
}
}

155
caql/wildcard.go Normal file
View File

@@ -0,0 +1,155 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import (
"errors"
"strings"
"unicode/utf8"
)
// ErrBadPattern indicates a pattern was malformed.
var ErrBadPattern = errors.New("syntax error in pattern")
// match reports whether name matches the shell pattern.
// The pattern syntax is:
//
// pattern:
// { term }
// term:
// '%' matches any sequence of non-/ characters
// '_' matches any single non-/ character
// c matches character c (c != '%', '_', '\\')
// '\\' c matches character c
//
// match requires pattern to match all of name, not just a substring.
// The only possible returned error is ErrBadPattern, when pattern
// is malformed.
//
func match(pattern, name string) (matched bool, err error) {
Pattern:
for len(pattern) > 0 {
var star bool
var chunk string
star, chunk, pattern = scanChunk(pattern)
if star && chunk == "" {
// Trailing * matches rest of string unless it has a /.
return !strings.ContainsRune(name, '/'), nil
}
// Look for match at current position.
t, ok, err := matchChunk(chunk, name)
// if we're the last chunk, make sure we've exhausted the name
// otherwise we'll give a false result even if we could still match
// using the star
if ok && (len(t) == 0 || len(pattern) > 0) {
name = t
continue
}
if err != nil {
return false, err
}
if star {
// Look for match skipping i+1 bytes.
// Cannot skip /.
for i := 0; i < len(name) && name[i] != '/'; i++ {
t, ok, err := matchChunk(chunk, name[i+1:])
if ok {
// if we're the last chunk, make sure we exhausted the name
if len(pattern) == 0 && len(t) > 0 {
continue
}
name = t
continue Pattern
}
if err != nil {
return false, err
}
}
}
// Before returning false with no error,
// check that the remainder of the pattern is syntactically valid.
for len(pattern) > 0 {
_, chunk, pattern = scanChunk(pattern)
if _, _, err := matchChunk(chunk, ""); err != nil {
return false, err
}
}
return false, nil
}
return len(name) == 0, nil
}
// scanChunk gets the next segment of pattern, which is a non-star string
// possibly preceded by a star.
func scanChunk(pattern string) (star bool, chunk, rest string) {
for len(pattern) > 0 && pattern[0] == '%' {
pattern = pattern[1:]
star = true
}
var i int
Scan:
for i = 0; i < len(pattern); i++ {
switch pattern[i] {
case '\\':
// error check handled in matchChunk: bad pattern.
if i+1 < len(pattern) {
i++
}
case '%':
break Scan
}
}
return star, pattern[0:i], pattern[i:]
}
// matchChunk checks whether chunk matches the beginning of s.
// If so, it returns the remainder of s (after the match).
// Chunk is all single-character operators: literals, char classes, and ?.
func matchChunk(chunk, s string) (rest string, ok bool, err error) {
// failed records whether the match has failed.
// After the match fails, the loop continues on processing chunk,
// checking that the pattern is well-formed but no longer reading s.
failed := false
for len(chunk) > 0 {
if !failed && len(s) == 0 {
failed = true
}
switch chunk[0] {
case '_':
if !failed {
if s[0] == '/' {
failed = true
}
_, n := utf8.DecodeRuneInString(s)
s = s[n:]
}
chunk = chunk[1:]
case '\\':
chunk = chunk[1:]
if len(chunk) == 0 {
return "", false, ErrBadPattern
}
fallthrough
default:
if !failed {
if chunk[0] != s[0] {
failed = true
}
s = s[1:]
}
chunk = chunk[1:]
}
}
if failed {
return "", false, nil
}
return s, true, nil
}

50
caql/wildcard_test.go Normal file
View File

@@ -0,0 +1,50 @@
// Adapted from https://github.com/golang/go
// Original License:
//
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the https://go.dev/LICENSE file.
package caql
import "testing"
type MatchTest struct {
pattern, s string
match bool
err error
}
var matchTests = []MatchTest{
{"abc", "abc", true, nil},
{"%", "abc", true, nil},
{"%c", "abc", true, nil},
{"a%", "a", true, nil},
{"a%", "abc", true, nil},
{"a%", "ab/c", false, nil},
{"a%/b", "abc/b", true, nil},
{"a%/b", "a/c/b", false, nil},
{"a%b%c%d%e%/f", "axbxcxdxe/f", true, nil},
{"a%b%c%d%e%/f", "axbxcxdxexxx/f", true, nil},
{"a%b%c%d%e%/f", "axbxcxdxe/xxx/f", false, nil},
{"a%b%c%d%e%/f", "axbxcxdxexxx/fff", false, nil},
{"a%b_c%x", "abxbbxdbxebxczzx", true, nil},
{"a%b_c%x", "abxbbxdbxebxczzy", false, nil},
{"a\\%b", "a%b", true, nil},
{"a\\%b", "ab", false, nil},
{"a_b", "a☺b", true, nil},
{"a___b", "a☺b", false, nil},
{"a_b", "a/b", false, nil},
{"a%b", "a/b", false, nil},
{"\\", "a", false, ErrBadPattern},
{"%x", "xxx", true, nil},
}
func TestMatch(t *testing.T) {
for _, tt := range matchTests {
ok, err := match(tt.pattern, tt.s)
if ok != tt.match || err != tt.err {
t.Errorf("match(%#q, %#q) = %v, %v want %v, %v", tt.pattern, tt.s, ok, err, tt.match, tt.err)
}
}
}

65
cmd/catalyst-dev/main.go Normal file
View File

@@ -0,0 +1,65 @@
package main
import (
"context"
"log"
"net/http/httputil"
"net/url"
"github.com/arangodb/go-driver"
"github.com/gin-contrib/sessions"
"github.com/gin-contrib/sessions/cookie"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/test"
)
func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
config, err := cmd.ParseCatalystConfig()
if err != nil {
log.Fatal(err)
}
// create app and clear db after start
theCatalyst, err := catalyst.New(&hooks.Hooks{
DatabaseAfterConnectFuncs: []func(ctx context.Context, client driver.Client, name string){test.Clear},
}, config)
if err != nil {
log.Fatal(err)
}
demoUser := &models.UserResponse{ID: "demo", Roles: []string{role.Admin}}
ctx := busdb.UserContext(context.Background(), demoUser)
if err := test.SetupTestData(ctx, theCatalyst.DB); err != nil {
log.Fatal(err)
}
// proxy static requests
theCatalyst.Server.NoRoute(
sessions.Sessions(catalyst.SessionName, cookie.NewStore(config.Secret)),
catalyst.Authenticate(theCatalyst.DB, config.Auth),
catalyst.AuthorizeBlockedUser,
proxy,
)
if err = theCatalyst.Server.RunWithSigHandler(); err != nil {
log.Fatal(err)
}
}
func proxy(ctx *gin.Context) {
u, _ := url.Parse("http://localhost:8080")
proxy := httputil.NewSingleHostReverseProxy(u)
ctx.Request.Host = ctx.Request.URL.Host
proxy.ServeHTTP(ctx.Writer, ctx.Request)
}

27
cmd/catalyst/main.go Normal file
View File

@@ -0,0 +1,27 @@
package main
import (
"log"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/cmd"
"github.com/SecurityBrewery/catalyst/hooks"
)
func main() {
log.SetFlags(log.LstdFlags | log.Lshortfile)
config, err := cmd.ParseCatalystConfig()
if err != nil {
log.Fatal(err)
}
theCatalyst, err := catalyst.New(&hooks.Hooks{}, config)
if err != nil {
log.Fatal(err)
}
if err = theCatalyst.Server.RunWithSigHandler(); err != nil {
log.Fatal(err)
}
}

151
cmd/cmd.go Normal file
View File

@@ -0,0 +1,151 @@
package cmd
import (
"fmt"
"github.com/alecthomas/kong"
kongyaml "github.com/alecthomas/kong-yaml"
"github.com/coreos/go-oidc/v3/oidc"
"golang.org/x/oauth2"
"github.com/SecurityBrewery/catalyst"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/role"
"github.com/SecurityBrewery/catalyst/storage"
)
type CLI struct {
Secret string `env:"SECRET" required:"" help:"A random secret value (can be created with 'openssl rand -hex 32')"`
ExternalAddress string `env:"EXTERNAL_ADDRESS" required:""`
CatalystAddress string `env:"CATALYST_ADDRESS" default:"http://catalyst"`
OIDCIssuer string `env:"OIDC_ISSUER" required:""`
OIDCClientID string `env:"OIDC_CLIENT_ID" default:"catalyst"`
OIDCClientSecret string `env:"OIDC_CLIENT_SECRET" required:""`
OIDCScopes []string `env:"OIDC_SCOPES" help:"Additional scopes, ['oidc', 'profile', 'email'] are always added." placeholder:"customscopes"`
OIDCClaimUsername string `env:"OIDC_CLAIM_USERNAME" default:"preferred_username" help:"username field in the OIDC claim"`
OIDCClaimEmail string `env:"OIDC_CLAIM_EMAIL" default:"email" help:"email field in the OIDC claim"`
OIDCClaimName string `env:"OIDC_CLAIM_NAME" default:"name" help:"name field in the OIDC claim"`
AuthBlockNew bool `env:"AUTH_BLOCK_NEW" default:"true" help:"Block newly created users"`
AuthDefaultRoles []string `env:"AUTH_DEFAULT_ROLES" help:"Default roles for new users"`
IndexPath string `env:"INDEX_PATH" default:"index.bleve" help:"Path for the bleve index"`
ArangoDBHost string `env:"ARANGO_DB_HOST" default:"http://arangodb:8529"`
ArangoDBUser string `env:"ARANGO_DB_USER" default:"root"`
ArangoDBPassword string `env:"ARANGO_DB_PASSWORD" required:""`
S3Host string `env:"S3_HOST" default:"http://minio:9000" name:"s3-host"`
S3User string `env:"S3_USER" default:"minio" name:"s3-user"`
S3Password string `env:"S3_PASSWORD" required:"" name:"s3-password"`
EmitterIOHost string `env:"EMITTER_IO_HOST" default:"tcp://emitter:8080"`
EmitterIORKey string `env:"EMITTER_IO_KEY" required:""`
Timeformat string `env:"TIMEFORMAT" default:"yyyy-MM-dd HH:mm:ss" help:""`
ArtifactStates []map[string]string `env:"ARTIFACT_STATES"`
InitialAPIKey string `env:"INITIAL_API_KEY"`
}
func ParseCatalystConfig() (*catalyst.Config, error) {
var cli CLI
kong.Parse(
&cli,
kong.Configuration(kong.JSON, "/etc/catalyst.json", ".catalyst.json"),
kong.Configuration(kongyaml.Loader, "/etc/catalyst.yaml", ".catalyst.yaml"),
)
return MapConfig(cli)
}
func MapConfig(cli CLI) (*catalyst.Config, error) {
roles := role.Explode(role.Analyst)
roles = append(roles, role.Explodes(cli.AuthDefaultRoles)...)
roles = role.Explodes(role.Strings(roles))
artifactStates, err := toTypes(cli.ArtifactStates)
if err != nil {
return nil, err
}
if len(artifactStates) == 0 {
artifactStates = []*models.Type{
{Icon: "mdi-help-circle-outline", ID: "unknown", Name: "Unknown", Color: pointer.String(models.TypeColorInfo)},
{Icon: "mdi-skull", ID: "malicious", Name: "Malicious", Color: pointer.String(models.TypeColorError)},
{Icon: "mdi-check", ID: "clean", Name: "Clean", Color: pointer.String(models.TypeColorSuccess)},
}
}
scopes := unique(append([]string{oidc.ScopeOpenID, "profile", "email"}, cli.OIDCScopes...))
config := &catalyst.Config{
IndexPath: cli.IndexPath,
DB: &database.Config{Host: cli.ArangoDBHost, User: cli.ArangoDBUser, Password: cli.ArangoDBPassword},
Storage: &storage.Config{Host: cli.S3Host, User: cli.S3User, Password: cli.S3Password},
Secret: []byte(cli.Secret),
ExternalAddress: cli.ExternalAddress,
Auth: &catalyst.AuthConfig{
OIDCIssuer: cli.OIDCIssuer,
OAuth2: &oauth2.Config{ClientID: cli.OIDCClientID, ClientSecret: cli.OIDCClientSecret, RedirectURL: cli.ExternalAddress + "/callback", Scopes: scopes},
OIDCClaimUsername: cli.OIDCClaimUsername,
OIDCClaimEmail: cli.OIDCClaimEmail,
OIDCClaimName: cli.OIDCClaimName,
AuthBlockNew: cli.AuthBlockNew,
AuthDefaultRoles: roles,
},
Bus: &bus.Config{Host: cli.EmitterIOHost, Key: cli.EmitterIORKey, APIUrl: cli.CatalystAddress + "/api"},
UISettings: &models.Settings{
ArtifactStates: artifactStates,
Timeformat: cli.Timeformat,
Version: catalyst.GetVersion(),
Tier: models.SettingsTierCommunity,
},
InitialAPIKey: cli.InitialAPIKey,
}
return config, nil
}
func toTypes(params []map[string]string) ([]*models.Type, error) {
var types []*models.Type
for _, param := range params {
t := &models.Type{}
icon, iconOK := param["icon"]
if iconOK {
t.Icon = icon
}
id, idOK := param["id"]
if idOK {
t.ID = id
}
name, nameOK := param["name"]
if nameOK {
t.Name = name
}
color, ok := param["color"]
if ok {
t.Color = pointer.String(color)
}
if iconOK && idOK && nameOK {
types = append(types, t)
} else {
return nil, fmt.Errorf("incomplete type: icon, id and name need to be provided (%s)", params)
}
}
return types, nil
}
func unique(l []string) []string {
keys := make(map[string]bool)
var list []string
for _, entry := range l {
if _, value := keys[entry]; !value {
keys[entry] = true
list = append(list, entry)
}
}
return list
}

164
dag/dag.go Normal file
View File

@@ -0,0 +1,164 @@
// Adapted from https://github.com/philopon/go-toposort under the MIT License
// Original License:
//
// Copyright (c) 2017 Hirotomo Moriwaki
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package dag
import (
"errors"
"sort"
)
type Graph struct {
nodes []string
outputs map[string]map[string]struct{}
// node: number of parents
inputs map[string]int
}
func NewGraph() *Graph {
return &Graph{
nodes: []string{},
inputs: make(map[string]int),
outputs: make(map[string]map[string]struct{}),
}
}
func (g *Graph) AddNode(name string) error {
g.nodes = append(g.nodes, name)
if _, ok := g.outputs[name]; ok {
return errors.New("duplicate detected")
}
g.outputs[name] = make(map[string]struct{})
g.inputs[name] = 0
return nil
}
func (g *Graph) AddNodes(names ...string) error {
for _, name := range names {
if err := g.AddNode(name); err != nil {
return err
}
}
return nil
}
func (g *Graph) AddEdge(from, to string) error {
m, ok := g.outputs[from]
if !ok {
return errors.New("node does not exist")
}
m[to] = struct{}{}
g.inputs[to]++
return nil
}
func (g *Graph) Toposort() ([]string, error) {
outputs := map[string]map[string]struct{}{}
for key, value := range g.outputs {
outputs[key] = map[string]struct{}{}
for k, v := range value {
outputs[key][k] = v
}
}
L := make([]string, 0, len(g.nodes))
S := make([]string, 0, len(g.nodes))
sort.Strings(g.nodes)
for _, n := range g.nodes {
if g.inputs[n] == 0 {
S = append(S, n)
}
}
for len(S) > 0 {
var n string
n, S = S[0], S[1:]
L = append(L, n)
ms := make([]string, len(outputs[n]))
for _, k := range keys(outputs[n]) {
m := k
// i := outputs[n][m]
// ms[i-1] = m
ms = append(ms, m)
}
for _, m := range ms {
delete(outputs[n], m)
g.inputs[m]--
if g.inputs[m] == 0 {
S = append(S, m)
}
}
}
N := 0
for _, v := range g.inputs {
N += v
}
if N > 0 {
return L, errors.New("cycle detected")
}
return L, nil
}
func keys(m map[string]struct{}) []string {
var keys []string
for k := range m {
keys = append(keys, k)
}
sort.Strings(keys)
return keys
}
func (g *Graph) GetParents(id string) []string {
var parents []string
for node, targets := range g.outputs {
if _, ok := targets[id]; ok {
parents = append(parents, node)
}
}
sort.Strings(parents)
return parents
}
func (g *Graph) GetRoot() (string, error) {
var roots []string
for n, parents := range g.inputs {
if parents == 0 {
roots = append(roots, n)
}
}
if len(roots) != 1 {
return "", errors.New("more than one root")
}
return roots[0], nil
}

238
dag/dag_test.go Normal file
View File

@@ -0,0 +1,238 @@
// Adapted from https://github.com/philopon/go-toposort under the MIT License
// Original License:
//
// Copyright (c) 2017 Hirotomo Moriwaki
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
package dag
import (
"reflect"
"testing"
"github.com/stretchr/testify/assert"
)
func index(s []string, v string) int {
for i, s := range s {
if s == v {
return i
}
}
return -1
}
type Edge struct {
From string
To string
}
func TestDuplicatedNode(t *testing.T) {
graph := NewGraph()
assert.NoError(t, graph.AddNode("a"))
assert.Error(t, graph.AddNode("a"))
}
func TestWikipedia(t *testing.T) {
graph := NewGraph()
assert.NoError(t, graph.AddNodes("2", "3", "5", "7", "8", "9", "10", "11"))
edges := []Edge{
{"7", "8"},
{"7", "11"},
{"5", "11"},
{"3", "8"},
{"3", "10"},
{"11", "2"},
{"11", "9"},
{"11", "10"},
{"8", "9"},
}
for _, e := range edges {
assert.NoError(t, graph.AddEdge(e.From, e.To))
}
result, err := graph.Toposort()
if err != nil {
t.Errorf("closed path detected in no closed pathed graph")
}
for _, e := range edges {
if i, j := index(result, e.From), index(result, e.To); i > j {
t.Errorf("dependency failed: not satisfy %v(%v) > %v(%v)", e.From, i, e.To, j)
}
}
}
func TestCycle(t *testing.T) {
graph := NewGraph()
assert.NoError(t, graph.AddNodes("1", "2", "3"))
assert.NoError(t, graph.AddEdge("1", "2"))
assert.NoError(t, graph.AddEdge("2", "3"))
assert.NoError(t, graph.AddEdge("3", "1"))
_, err := graph.Toposort()
if err == nil {
t.Errorf("closed path not detected in closed pathed graph")
}
}
func TestGraph_GetParents(t *testing.T) {
type fields struct {
nodes []string
edges map[string]string
}
type args struct {
id string
}
tests := []struct {
name string
fields fields
args args
want []string
}{
{"parents 2", fields{nodes: []string{"1", "2", "3"}, edges: map[string]string{"1": "2", "2": "3"}}, args{id: "2"}, []string{"1"}},
{"parents 3", fields{nodes: []string{"1", "2", "3"}, edges: map[string]string{"1": "3", "2": "3"}}, args{id: "3"}, []string{"1", "2"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewGraph()
for _, node := range tt.fields.nodes {
assert.NoError(t, g.AddNode(node))
}
for from, to := range tt.fields.edges {
assert.NoError(t, g.AddEdge(from, to))
}
if got := g.GetParents(tt.args.id); !reflect.DeepEqual(got, tt.want) {
t.Errorf("GetParents() = %v, want %v", got, tt.want)
}
})
}
}
func TestDAG_AddNode(t *testing.T) {
dag := NewGraph()
v := "1"
assert.NoError(t, dag.AddNode(v))
assert.Error(t, dag.AddNode(v))
}
func TestDAG_AddEdge(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNode("0"))
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
// add a single edge and inspect the graph
assert.NoError(t, dag.AddEdge("1", "2"))
if parents := dag.GetParents("2"); len(parents) != 1 {
t.Errorf("GetParents(v2) = %d, want 1", len(parents))
}
assert.NoError(t, dag.AddEdge("2", "3"))
_ = dag.AddEdge("0", "1")
}
func TestDAG_GetParents(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
_ = dag.AddEdge("1", "3")
_ = dag.AddEdge("2", "3")
parents := dag.GetParents("3")
if length := len(parents); length != 2 {
t.Errorf("GetParents(v3) = %d, want 2", length)
}
}
func TestDAG_GetDescendants(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddNode("4"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("2", "3"))
assert.NoError(t, dag.AddEdge("2", "4"))
}
func TestDAG_Topsort(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddNode("4"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("2", "3"))
assert.NoError(t, dag.AddEdge("2", "4"))
desc, _ := dag.Toposort()
assert.Equal(t, desc, []string{"1", "2", "3", "4"})
}
func TestDAG_TopsortStable(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNode("1"))
assert.NoError(t, dag.AddNode("2"))
assert.NoError(t, dag.AddNode("3"))
assert.NoError(t, dag.AddEdge("1", "2"))
assert.NoError(t, dag.AddEdge("1", "3"))
desc, _ := dag.Toposort()
assert.Equal(t, desc, []string{"1", "2", "3"})
}
func TestDAG_TopsortStable2(t *testing.T) {
dag := NewGraph()
assert.NoError(t, dag.AddNodes("block-ioc", "block-iocs", "block-sender", "board", "fetch-iocs", "escalate", "extract-iocs", "mail-available", "search-email-gateway"))
assert.NoError(t, dag.AddEdge("block-iocs", "block-ioc"))
assert.NoError(t, dag.AddEdge("block-sender", "extract-iocs"))
assert.NoError(t, dag.AddEdge("board", "escalate"))
assert.NoError(t, dag.AddEdge("board", "mail-available"))
assert.NoError(t, dag.AddEdge("fetch-iocs", "block-iocs"))
assert.NoError(t, dag.AddEdge("extract-iocs", "fetch-iocs"))
assert.NoError(t, dag.AddEdge("mail-available", "block-sender"))
assert.NoError(t, dag.AddEdge("mail-available", "extract-iocs"))
assert.NoError(t, dag.AddEdge("mail-available", "search-email-gateway"))
assert.NoError(t, dag.AddEdge("search-email-gateway", "extract-iocs"))
sorted, err := dag.Toposort()
assert.NoError(t, err)
want := []string{"board", "escalate", "mail-available", "block-sender", "search-email-gateway", "extract-iocs", "fetch-iocs", "block-iocs", "block-ioc"}
assert.Equal(t, want, sorted)
}

101
database/artifact.go Normal file
View File

@@ -0,0 +1,101 @@
package database
import (
"context"
"fmt"
"time"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func (db *Database) ArtifactGet(ctx context.Context, id int64, name string) (*models.Artifact, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a in NOT_NULL(d.artifacts, [])
FILTER a.name == @name
RETURN a`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]interface{}{
"@collection": TicketCollectionName,
"ID": fmt.Sprint(id),
"name": name,
}), busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc models.Artifact
_, err = cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
return &doc, nil
}
func (db *Database) ArtifactUpdate(ctx context.Context, id int64, name string, artifact *models.Artifact) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), @artifact)
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"artifact": artifact,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: fmt.Sprintf("Update artifact %s", name),
})
}
func (db *Database) EnrichArtifact(ctx context.Context, id int64, name string, enrichmentForm *models.EnrichmentForm) (*models.TicketWithTickets, error) {
enrichment := models.Enrichment{time.Now().UTC(), enrichmentForm.Data, enrichmentForm.Name}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN d.artifacts
FILTER a.name == @name
LET enrichments = NOT_NULL(a.enrichments, {})
LET newenrichments = MERGE(enrichments, ZIP( [@enrichmentname], [@enrichment]) )
LET newartifacts = APPEND(REMOVE_VALUE(d.artifacts, a), MERGE(a, { "enrichments": newenrichments }))
UPDATE d WITH { "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"@collection": TicketCollectionName,
"ID": id,
"name": name,
"enrichmentname": enrichment.Name,
"enrichment": enrichment,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: fmt.Sprintf("Run %s on artifact", enrichment.Name),
})
}

99
database/automation.go Normal file
View File

@@ -0,0 +1,99 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func toAutomation(doc *models.AutomationForm) interface{} {
return &models.Automation{
Image: doc.Image,
Script: doc.Script,
Schema: doc.Schema,
Type: doc.Type,
}
}
func toAutomationResponse(id string, doc models.Automation) *models.AutomationResponse {
return &models.AutomationResponse{
ID: id,
Image: doc.Image,
Script: doc.Script,
Schema: doc.Schema,
Type: doc.Type,
}
}
func (db *Database) AutomationCreate(ctx context.Context, automation *models.AutomationForm) (*models.AutomationResponse, error) {
if automation == nil {
return nil, errors.New("requires automation")
}
if automation.ID == "" {
return nil, errors.New("requires automation ID")
}
var doc models.Automation
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.CreateDocument(ctx, newctx, automation.ID, toAutomation(automation))
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationGet(ctx context.Context, id string) (*models.AutomationResponse, error) {
var doc models.Automation
meta, err := db.automationCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationUpdate(ctx context.Context, id string, automation *models.AutomationForm) (*models.AutomationResponse, error) {
var doc models.Automation
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.automationCollection.ReplaceDocument(ctx, id, toAutomation(automation))
if err != nil {
return nil, err
}
return toAutomationResponse(meta.Key, doc), nil
}
func (db *Database) AutomationDelete(ctx context.Context, id string) error {
_, err := db.automationCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) AutomationList(ctx context.Context) ([]*models.AutomationResponse, error) {
query := "FOR d IN @@collection SORT d._key ASC RETURN UNSET(d, 'script')"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": AutomationCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.AutomationResponse
for {
var doc models.Automation
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toAutomationResponse(meta.Key, doc))
}
return docs, err
}

182
database/busdb/busdb.go Normal file
View File

@@ -0,0 +1,182 @@
package busdb
import (
"context"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/generated/models"
)
type Hook interface {
PublishAction(action string, context, msg map[string]interface{}) error
PublishUpdate(col, id string) error
}
// BusDatabase
// 1. Save entry to log
// 2. Send update ticket to bus
// 3. Add document to index
type BusDatabase struct {
internal driver.Database
logCollection driver.Collection
bus *bus.Bus
// index *index.Index
}
func NewDatabase(ctx context.Context, internal driver.Database, b *bus.Bus) (*BusDatabase, error) {
logCollection, err := internal.Collection(ctx, LogCollectionName)
if err != nil {
return nil, err
}
return &BusDatabase{
internal: internal,
logCollection: logCollection,
bus: b,
}, nil
}
type OperationType int
const (
Create OperationType = iota
Read = iota
Update = iota
)
type Operation struct {
OperationType OperationType
Ids []driver.DocumentID
Msg string
}
var CreateOperation = &Operation{OperationType: Create}
var ReadOperation = &Operation{OperationType: Read}
func (db BusDatabase) Query(ctx context.Context, query string, vars map[string]interface{}, operation *Operation) (driver.Cursor, *models.LogEntry, error) {
cur, err := db.internal.Query(ctx, query, vars)
if err != nil {
return nil, nil, err
}
var logs *models.LogEntry
switch {
case operation.OperationType == Update:
if err := db.LogAndNotify(ctx, operation.Ids, operation.Msg); err != nil {
return nil, nil, err
}
}
return cur, logs, err
}
func (db BusDatabase) LogAndNotify(ctx context.Context, ids []driver.DocumentID, msg string) error {
var logEntries []*models.LogEntry
for _, i := range ids {
logEntries = append(logEntries, &models.LogEntry{Reference: i.String(), Message: msg})
}
if err := db.LogBatchCreate(ctx, logEntries); err != nil {
return err
}
return db.bus.PublishUpdate(ids)
}
func (db BusDatabase) Remove(ctx context.Context) error {
return db.internal.Remove(ctx)
}
func (db BusDatabase) Collection(ctx context.Context, name string) (driver.Collection, error) {
return db.internal.Collection(ctx, name)
}
type Collection struct {
internal driver.Collection
db *BusDatabase
}
func NewCollection(internal driver.Collection, db *BusDatabase) *Collection {
return &Collection{internal: internal, db: db}
}
func (c Collection) CreateDocument(ctx, newctx context.Context, key string, document interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.CreateDocument(newctx, &Keyed{Key: key, Doc: document})
if err != nil {
return meta, err
}
err = c.db.LogAndNotify(ctx, []driver.DocumentID{meta.ID}, "Document created")
if err != nil {
return meta, err
}
return meta, nil
}
func (c Collection) CreateEdge(ctx, newctx context.Context, edge *driver.EdgeDocument) (driver.DocumentMeta, error) {
meta, err := c.internal.CreateDocument(newctx, edge)
if err != nil {
return meta, err
}
err = c.db.LogAndNotify(ctx, []driver.DocumentID{meta.ID}, "Document created")
if err != nil {
return meta, err
}
return meta, nil
}
func (c Collection) CreateEdges(ctx context.Context, edges []*driver.EdgeDocument) (driver.DocumentMetaSlice, error) {
metas, errs, err := c.internal.CreateDocuments(ctx, edges)
if err != nil {
return nil, err
}
if errs.FirstNonNil() != nil {
return nil, errs.FirstNonNil()
}
var ids []driver.DocumentID
for _, meta := range metas {
ids = append(ids, meta.ID)
}
err = c.db.LogAndNotify(ctx, ids, "Document created")
if err != nil {
return metas, err
}
return metas, nil
}
func (c Collection) DocumentExists(ctx context.Context, id string) (bool, error) {
return c.internal.DocumentExists(ctx, id)
}
func (c Collection) ReadDocument(ctx context.Context, key string, result interface{}) (driver.DocumentMeta, error) {
return c.internal.ReadDocument(ctx, key, result)
}
func (c Collection) UpdateDocument(ctx context.Context, key string, update interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.UpdateDocument(ctx, key, update)
if err != nil {
return meta, err
}
return meta, c.db.bus.PublishUpdate([]driver.DocumentID{meta.ID})
}
func (c Collection) ReplaceDocument(ctx context.Context, key string, document interface{}) (driver.DocumentMeta, error) {
meta, err := c.internal.ReplaceDocument(ctx, key, document)
if err != nil {
return meta, err
}
return meta, c.db.bus.PublishUpdate([]driver.DocumentID{meta.ID})
}
func (c Collection) RemoveDocument(ctx context.Context, formatInt string) (driver.DocumentMeta, error) {
return c.internal.RemoveDocument(ctx, formatInt)
}

34
database/busdb/context.go Normal file
View File

@@ -0,0 +1,34 @@
package busdb
import (
"context"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/role"
)
const (
userContextKey = "user"
groupContextKey = "groups"
)
func SetContext(ctx *gin.Context, user *models.UserResponse) {
user.Roles = role.Strings(role.Explodes(user.Roles))
ctx.Set(userContextKey, user)
}
func SetGroupContext(ctx *gin.Context, groups []string) {
ctx.Set(groupContextKey, groups)
}
func UserContext(ctx context.Context, user *models.UserResponse) context.Context {
user.Roles = role.Strings(role.Explodes(user.Roles))
return context.WithValue(ctx, userContextKey, user)
}
func UserFromContext(ctx context.Context) (*models.UserResponse, bool) {
u, ok := ctx.Value(userContextKey).(*models.UserResponse)
return u, ok
}

25
database/busdb/keyed.go Normal file
View File

@@ -0,0 +1,25 @@
package busdb
import "encoding/json"
type Keyed struct {
Key string
Doc interface{}
}
func (p Keyed) MarshalJSON() ([]byte, error) {
b, err := json.Marshal(p.Doc)
if err != nil {
panic(err)
}
var m map[string]interface{}
err = json.Unmarshal(b, &m)
if err != nil {
panic(err)
}
m["_key"] = p.Key
return json.Marshal(m)
}

92
database/busdb/log.go Normal file
View File

@@ -0,0 +1,92 @@
package busdb
import (
"context"
"errors"
"time"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/generated/models"
)
const LogCollectionName = "logs"
func (db *BusDatabase) LogCreate(ctx context.Context, id, message string) (*models.LogEntry, error) {
user, ok := UserFromContext(ctx)
if !ok {
return nil, errors.New("no user in context")
}
logentry := &models.LogEntry{
Reference: id,
Created: time.Now(),
Creator: user.ID,
Message: message,
}
doc := models.LogEntry{}
_, err := db.logCollection.CreateDocument(driver.WithReturnNew(ctx, &doc), logentry)
if err != nil {
return nil, err
}
return &doc, db.bus.PublishUpdate([]driver.DocumentID{driver.DocumentID(logentry.Reference)})
}
func (db *BusDatabase) LogBatchCreate(ctx context.Context, logEntryForms []*models.LogEntry) error {
user, ok := UserFromContext(ctx)
if !ok {
return errors.New("no user in context")
}
var ids []driver.DocumentID
var logentries []*models.LogEntry
for _, logEntryForm := range logEntryForms {
logentry := &models.LogEntry{
Reference: logEntryForm.Reference,
Created: time.Now(),
Creator: user.ID,
Message: logEntryForm.Message,
}
logentries = append(logentries, logentry)
ids = append(ids, driver.DocumentID(logentry.Reference))
}
_, errs, err := db.logCollection.CreateDocuments(ctx, logentries)
if err != nil {
return err
}
err = errs.FirstNonNil()
if err != nil {
return err
}
return db.bus.PublishUpdate(ids)
}
func (db *BusDatabase) LogList(ctx context.Context, reference string) ([]*models.LogEntry, error) {
query := "FOR d IN @@collection FILTER d.reference == @reference SORT d.created DESC RETURN d"
cursor, err := db.internal.Query(ctx, query, map[string]interface{}{
"@collection": LogCollectionName,
"reference": reference,
})
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.LogEntry
for {
var doc models.LogEntry
_, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, &doc)
}
return docs, err
}

177
database/db.go Normal file
View File

@@ -0,0 +1,177 @@
package database
import (
"context"
"fmt"
"log"
"github.com/arangodb/go-driver"
"github.com/arangodb/go-driver/http"
"github.com/SecurityBrewery/catalyst/bus"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/hooks"
"github.com/SecurityBrewery/catalyst/index"
)
const (
Name = "catalyst"
TicketCollectionName = "tickets"
TemplateCollectionName = "templates"
PlaybookCollectionName = "playbooks"
AutomationCollectionName = "automations"
UserDataCollectionName = "userdata"
UserCollectionName = "users"
TicketTypeCollectionName = "tickettypes"
JobCollectionName = "jobs"
TicketArtifactsGraphName = "Graph"
RelatedTicketsCollectionName = "related"
)
type Database struct {
*busdb.BusDatabase
Index *index.Index
bus *bus.Bus
Hooks *hooks.Hooks
templateCollection *busdb.Collection
ticketCollection *busdb.Collection
playbookCollection *busdb.Collection
automationCollection *busdb.Collection
userdataCollection *busdb.Collection
userCollection *busdb.Collection
tickettypeCollection *busdb.Collection
jobCollection *busdb.Collection
relatedCollection *busdb.Collection
containsCollection *busdb.Collection
}
type Config struct {
Host string
User string
Password string
Name string
}
func New(ctx context.Context, index *index.Index, bus *bus.Bus, hooks *hooks.Hooks, config *Config) (*Database, error) {
name := config.Name
if config.Name == "" {
name = Name
}
conn, err := http.NewConnection(http.ConnectionConfig{Endpoints: []string{config.Host}})
if err != nil {
return nil, err
}
client, err := driver.NewClient(driver.ClientConfig{
Connection: conn,
Authentication: driver.BasicAuthentication(config.User, config.Password),
})
if err != nil {
return nil, err
}
hooks.DatabaseAfterConnect(ctx, client, name)
db, err := setupDB(ctx, client, name)
if err != nil {
return nil, fmt.Errorf("DB setup failed: %w", err)
}
if err = migrations.PerformMigrations(ctx, db); err != nil {
return nil, fmt.Errorf("migrations failed: %w", err)
}
ticketCollection, err := db.Collection(ctx, TicketCollectionName)
if err != nil {
return nil, err
}
templateCollection, err := db.Collection(ctx, TemplateCollectionName)
if err != nil {
return nil, err
}
playbookCollection, err := db.Collection(ctx, PlaybookCollectionName)
if err != nil {
return nil, err
}
relatedCollection, err := db.Collection(ctx, RelatedTicketsCollectionName)
if err != nil {
return nil, err
}
automationCollection, err := db.Collection(ctx, AutomationCollectionName)
if err != nil {
return nil, err
}
userdataCollection, err := db.Collection(ctx, UserDataCollectionName)
if err != nil {
return nil, err
}
userCollection, err := db.Collection(ctx, UserCollectionName)
if err != nil {
return nil, err
}
tickettypeCollection, err := db.Collection(ctx, TicketTypeCollectionName)
if err != nil {
return nil, err
}
jobCollection, err := db.Collection(ctx, JobCollectionName)
if err != nil {
return nil, err
}
hookedDB, err := busdb.NewDatabase(ctx, db, bus)
if err != nil {
return nil, err
}
return &Database{
BusDatabase: hookedDB,
bus: bus,
Index: index,
Hooks: hooks,
templateCollection: busdb.NewCollection(templateCollection, hookedDB),
ticketCollection: busdb.NewCollection(ticketCollection, hookedDB),
playbookCollection: busdb.NewCollection(playbookCollection, hookedDB),
automationCollection: busdb.NewCollection(automationCollection, hookedDB),
relatedCollection: busdb.NewCollection(relatedCollection, hookedDB),
userdataCollection: busdb.NewCollection(userdataCollection, hookedDB),
userCollection: busdb.NewCollection(userCollection, hookedDB),
tickettypeCollection: busdb.NewCollection(tickettypeCollection, hookedDB),
jobCollection: busdb.NewCollection(jobCollection, hookedDB),
}, nil
}
func setupDB(ctx context.Context, client driver.Client, dbName string) (driver.Database, error) {
databaseExists, err := client.DatabaseExists(ctx, dbName)
if err != nil {
return nil, err
}
var db driver.Database
if !databaseExists {
db, err = client.CreateDatabase(ctx, dbName, nil)
} else {
db, err = client.Database(ctx, dbName)
}
if err != nil {
return nil, err
}
collectionExists, err := db.CollectionExists(ctx, migrations.MigrationCollection)
if err != nil {
return nil, err
}
if !collectionExists {
if _, err := db.CreateCollection(ctx, migrations.MigrationCollection, &driver.CreateCollectionOptions{
KeyOptions: &driver.CollectionKeyOptions{AllowUserKeys: true},
}); err != nil {
log.Println(err)
}
}
return db, nil
}

256
database/job.go Normal file
View File

@@ -0,0 +1,256 @@
package database
import (
"context"
"encoding/json"
"errors"
"fmt"
"strings"
"github.com/arangodb/go-driver"
"github.com/docker/docker/client"
"github.com/xeipuuv/gojsonschema"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func toJob(doc *models.JobForm) *models.Job {
return &models.Job{
Automation: doc.Automation,
Payload: doc.Payload,
Origin: doc.Origin,
Running: true,
Status: "created",
}
}
func (db *Database) toJobResponse(ctx context.Context, key string, doc *models.Job, update bool) (*models.JobResponse, error) {
cli, err := client.NewClientWithOpts(client.FromEnv)
if err != nil {
return nil, err
}
defer cli.Close()
status := doc.Status
if doc.Running {
inspect, err := cli.ContainerInspect(ctx, key)
if err != nil || inspect.State == nil {
doc.Running = false
if update {
db.JobUpdate(ctx, key, doc)
}
} else if doc.Status != inspect.State.Status {
status = inspect.State.Status
doc.Status = inspect.State.Status
if update {
db.JobUpdate(ctx, key, doc)
}
}
}
return &models.JobResponse{
Automation: doc.Automation,
ID: key,
Log: doc.Log,
Payload: doc.Payload,
Origin: doc.Origin,
Output: doc.Output,
Status: status,
Container: doc.Container,
}, nil
}
func (db *Database) JobCreate(ctx context.Context, id string, job *models.JobForm) (*models.JobResponse, error) {
if job == nil {
return nil, errors.New("requires job")
}
var doc models.Job
newctx := driver.WithReturnNew(ctx, &doc)
/* Start validation */
j := toJob(job)
b, _ := json.Marshal(j)
r, err := models.JobSchema.Validate(gojsonschema.NewBytesLoader(b))
if err != nil {
return nil, err
}
if !r.Valid() {
var errs []string
for _, e := range r.Errors() {
errs = append(errs, e.String())
}
return nil, errors.New(strings.Join(errs, ", "))
}
/* End validation */
meta, err := db.jobCollection.CreateDocument(ctx, newctx, id, j)
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobGet(ctx context.Context, id string) (*models.JobResponse, error) {
var doc models.Job
meta, err := db.jobCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobUpdate(ctx context.Context, id string, job *models.Job) (*models.JobResponse, error) {
var doc models.Job
ctx = driver.WithReturnNew(ctx, &doc)
/* Start validation */
b, _ := json.Marshal(job)
r, err := models.JobSchema.Validate(gojsonschema.NewBytesLoader(b))
if err != nil {
return nil, err
}
if !r.Valid() {
var errs []string
for _, e := range r.Errors() {
errs = append(errs, e.String())
}
return nil, errors.New(strings.Join(errs, ", "))
}
/* End validation */
meta, err := db.jobCollection.ReplaceDocument(ctx, id, job)
if err != nil {
return nil, err
}
return db.toJobResponse(ctx, meta.Key, &doc, true)
}
func (db *Database) JobLogAppend(ctx context.Context, id string, logLine string) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "log": CONCAT(NOT_NULL(d.log, ""), @logline) } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]interface{}{
"@collection": JobCollectionName,
"ID": id,
"logline": logLine,
}, &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
Msg: fmt.Sprintf("Append logline"),
})
if err != nil {
return err
}
defer cur.Close()
return nil
}
func (db *Database) JobComplete(ctx context.Context, id string, out interface{}) error {
query := `LET d = DOCUMENT(@@collection, @ID)
UPDATE d WITH { "output": @out, "status": "completed", "running": false } IN @@collection`
cur, _, err := db.Query(ctx, query, map[string]interface{}{
"@collection": JobCollectionName,
"ID": id,
"out": out,
}, &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%s", JobCollectionName, id)),
},
Msg: fmt.Sprintf("Set output"),
})
if err != nil {
return err
}
defer cur.Close()
return nil
}
func (db *Database) JobDelete(ctx context.Context, id string) error {
_, err := db.jobCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) JobList(ctx context.Context) ([]*models.JobResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": JobCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.JobResponse
for {
var doc models.Job
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
job, err := db.toJobResponse(ctx, meta.Key, &doc, false)
if err != nil {
return nil, err
}
docs = append(docs, job)
}
return docs, err
}
func publishJobMapping(id, automation string, contextStructs *models.Context, origin *models.Origin, payloadMapping map[string]string, db *Database) error {
msg, err := generatePayload(payloadMapping, contextStructs)
if err != nil {
return fmt.Errorf("message generation failed: %w", err)
}
return publishJob(id, automation, contextStructs, origin, msg, db)
}
func publishJob(id, automation string, contextStructs *models.Context, origin *models.Origin, payload map[string]interface{}, db *Database) error {
return db.bus.PublishJob(id, automation, payload, contextStructs, origin)
}
func generatePayload(msgMapping map[string]string, contextStructs *models.Context) (map[string]interface{}, error) {
contextJson, err := json.Marshal(contextStructs)
if err != nil {
return nil, err
}
automationContext := map[string]interface{}{}
err = json.Unmarshal(contextJson, &automationContext)
if err != nil {
return nil, err
}
parser := caql.Parser{}
msg := map[string]interface{}{}
for arg, expr := range msgMapping {
tree, err := parser.Parse(expr)
if err != nil {
return nil, err
}
v, err := tree.Eval(automationContext)
if err != nil {
return nil, err
}
msg[arg] = v
}
return msg, nil
}

View File

@@ -0,0 +1,25 @@
#!/usr/bin/env python
import subprocess
import sys
subprocess.call(
[sys.executable, "-m", "pip", "install", "requests"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
import json
import requests
def run(msg):
if "ticket" in msg["context"]:
headers = {"PRIVATE-TOKEN": msg["secrets"]["catalyst_apikey"]}
url = "%s/tickets/%d/comments" % (msg["secrets"]["catalyst_apiurl"], msg["context"]["ticket"]["id"])
data = {'message': msg["payload"]["default"], 'creator': 'automation'}
requests.post(url, json=data, headers=headers).json()
return {"done": True}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -0,0 +1,13 @@
#!/usr/bin/env python
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload']['default'].encode('utf-8'))
return {"hash": sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -0,0 +1,630 @@
#!/usr/bin/env python
import subprocess
import sys
import json
from datetime import datetime
import io
subprocess.check_call(
[sys.executable, "-m", "pip", "install", "thehive4py", "requests", "minio"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
defaultschema = {
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"summary",
"tlp",
"pap"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Unknown",
"title": "Unknown",
"icon": "mdi-help"
},
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
},
{
"const": "Very High",
"title": "Very High",
"icon": "mdi-exclamation"
}
]
},
"flag": {
"title": "Flag",
"type": "boolean",
"x-cols": 6,
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"pap": {
"$id": "#root/pap",
"title": "PAP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"tags": {
"$id": "#root/tags",
"title": "Tags",
"type": "array",
"items": {
"type": "string"
}
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
},
"resolutionStatus": {
"$id": "#root/resolutionStatus",
"title": "Resolution Status",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
},
"endDate": {
"$id": "#root/endDate",
"title": "End Data",
"type": "string",
"format": "date-time",
"x-cols": 6,
"x-class": "pr-2",
},
"summary": {
"$id": "#root/summary",
"title": "Summary",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}
defaultalertschema = {
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"summary",
"tlp",
"pap"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Unknown",
"title": "Unknown",
"icon": "mdi-help"
},
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
},
{
"const": "Very High",
"title": "Very High",
"icon": "mdi-exclamation"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"source": {
"$id": "#root/source",
"title": "Source",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"sourceRef": {
"$id": "#root/sourceRef",
"title": "Source Ref",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"type": {
"$id": "#root/type",
"title": "Type",
"type": "string",
"x-cols": 4,
"x-class": "pr-2",
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}
class schema:
def __init__(self):
self.schema = defaultschema
def add_string(self, title):
self.schema["properties"][title] = { "type": "string", "x-cols": 6, "x-class": "pr-2" }
def add_boolean(self, title):
self.schema["properties"][title] = { "type": "boolean", "x-cols": 6, "x-class": "pr-2" }
def add_date(self, title):
self.schema["properties"][title] = { "type": "string", "format": "date-time", "x-cols": 6, "x-class": "pr-2" }
def add_integer(self, title):
self.schema["properties"][title] = { "type": "integer", "x-cols": 6, "x-class": "pr-2" }
def add_float(self, title):
self.schema["properties"][title] = { "type": "number", "x-cols": 6, "x-class": "pr-2" }
class alertschema:
def __init__(self):
self.schema = defaultalertschema
def maptime(hivetime):
if hivetime is None:
return None
return datetime.fromtimestamp(hivetime/1000).isoformat() + "Z"
def mapstatus(hivestatus):
if hivestatus == "Open" or hivestatus == "New":
return "open"
return "closed"
def maptlp(hivetlp):
if hivetlp == 0:
return "White"
if hivetlp == 1:
return "Green"
if hivetlp == 2:
return "Amber"
if hivetlp == 3:
return "Red"
return "White"
def mapseverity(hiveseverity):
if hiveseverity == 1:
return "Low"
if hiveseverity == 2:
return "Medium"
if hiveseverity == 3:
return "High"
if hiveseverity == 4:
return "Very High"
return "Unknown"
# {
# "_id": "~16416",
# "id": "~16416",
# "createdBy": "jonas@thehive.local",
# "updatedBy": "jonas@thehive.local",
# "createdAt": 1638704013583,
# "updatedAt": 1638704061151,
# "_type": "case",
# "caseId": 1,
# "title": "My Test 1",
# "description": "My Testcase",
# "severity": 2,
# "startDate": 1638703980000,
# "endDate": null,
# "impactStatus": null,
# "resolutionStatus": null,
# "tags": [],
# "flag": false,
# "tlp": 2,
# "pap": 2,
# "status": "Open",
# "summary": null,
# "owner": "jonas@thehive.local",
# "customFields": {},
# "stats": {},
# "permissions": [ "manageShare", "manageAnalyse", "manageTask", "manageCaseTemplate", "manageCase", "manageUser", "manageProcedure", "managePage", "manageObservable", "manageTag", "manageConfig", "manageAlert", "accessTheHiveFS", "manageAction" ]
# }
def mapcase(hivecase, url, keep_ids):
s = schema()
details = {}
for name, data in hivecase["customFields"].items():
if "string" in data and data["string"] is not None:
s.add_string(name)
details[name] = data["string"]
if "boolean" in data and data["boolean"] is not None:
s.add_boolean(name)
details[name] = data["boolean"]
if "date" in data and data["date"] is not None:
s.add_date(name)
details[name] = maptime(data["date"])
if "integer" in data and data["integer"] is not None:
s.add_integer(name)
details[name] = data["integer"]
if "float" in data and data["float"] is not None:
s.add_float(name)
details[name] = data["float"]
case = {}
if keep_ids:
case["id"] = hivecase["caseId"]
return {
"name": hivecase["title"],
"type": "incident",
"status": mapstatus(hivecase["status"]),
"owner": hivecase["owner"],
# "write": hivecase["write"],
# "read": hivecase["read"],
"schema": json.dumps(s.schema),
"details": {
"tlp": maptlp(hivecase["tlp"]),
"pap": maptlp(hivecase["pap"]),
"severity": mapseverity(hivecase["severity"]),
"description": hivecase["description"],
"summary": hivecase["summary"],
"tags": hivecase["tags"],
"endDate": maptime(hivecase["endDate"]),
"resolutionStatus": hivecase["resolutionStatus"],
"flag": hivecase["flag"],
} | details,
"references": [
{ "name": "TheHive #%d" % hivecase["caseId"], "href": "%s/index.html#!/case/~%s/details" % (url, hivecase["id"]) }
],
#
# "playbooks": hivecase["playbooks"],
#
"files": [],
"comments": [],
# creator, created, message
#
"artifacts": [],
# name, type, status, enrichment
# name, data
"created": maptime(hivecase["createdAt"]),
"modified": maptime(hivecase["updatedAt"]),
} | case
# {
# "_id": "ce2c00f17132359cb3c50dfbb1901810",
# "_type": "alert",
# "artifacts": [],
# "createdAt": 1495012062014,
# "createdBy": "myuser",
# "date": 1495012062016,
# "description": "N/A",
# "follow": true,
# "id": "ce2c00f17132359cb3c50dfbb1901810",
# "lastSyncDate": 1495012062016,
# "severity": 2,
# "source": "instance1",
# "sourceRef": "alert-ref",
# "status": "New",
# "title": "New Alert",
# "tlp": 2,
# "type": "external",
# "user": "myuser"
# }
def mapalert(hivealert, url):
s = alertschema()
details = {}
return {
"name": hivealert["title"],
"type": "alert",
"status": mapstatus(hivealert["status"]),
"owner": hivealert["user"],
"schema": json.dumps(s.schema),
"details": {
"tlp": maptlp(hivealert["tlp"]),
"severity": mapseverity(hivealert["severity"]),
"description": hivealert["description"],
"source": hivealert["source"],
"sourceRef": hivealert["sourceRef"],
"type": hivealert["type"],
} | details,
"references": [
{ "name": "TheHive Alerts", "href": "%s/index.html#!/alert/list" % url }
],
"files": [],
"comments": [],
"artifacts": [],
"created": maptime(hivealert["createdAt"]),
"modified": maptime(hivealert["lastSyncDate"]),
}
# {
# "_id": "~41152",
# "id": "~41152",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638723814523,
# "_type": "case_artifact",
# "dataType": "ip",
# "data": "2.2.2.2",
# "startDate": 1638723814523,
# "tlp": 2,
# "tags": [],
# "ioc": false,
# "sighted": false,
# "message": ".",
# "reports": {},
# "stats": {},
# "ignoreSimilarity": false
# }
def mapobservable(hiveobservable):
status = "unknown"
if hiveobservable["ioc"]:
status = "malicious"
return {
"name": hiveobservable["data"],
"type": hiveobservable["dataType"],
"status": status,
}
# {
# "id": "~12296",
# "_id": "~12296",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638704029800,
# "_type": "case_task",
# "title": "Start",
# "group": "MyTaskGroup1",
# "owner": "jonas@thehive.local",
# "status": "InProgress",
# "flag": false,
# "startDate": 1638704115667,
# "order": 0
# }
# {
# "_id": "~24656",
# "id": "~24656",
# "createdBy": "jonas@thehive.local",
# "createdAt": 1638729992590,
# "_type": "case_task_log",
# "message": "asd",
# "startDate": 1638729992590,
# "attachment": {
# "name": "Chemistry Vector.eps",
# "hashes": [
# "adf2d4cd72f4141fe7f8eb4af035596415a29c048d3039be6449008f291258e9",
# "180f66a6d22b1f09ed198afd814f701e42440e7c",
# "b28ae347371df003b76cbb8c6199c97e"
# ],
# "size": 3421842,
# "contentType": "application/postscript",
# "id": "adf2d4cd72f4141fe7f8eb4af035596415a29c048d3039be6449008f291258e9"
# },
# "status": "Ok",
# "owner": "jonas@thehive.local"
# }
def maptasklog(hivetask, hivetasklog):
message = "**" + hivetask["group"] + ": " + hivetask["title"] + "** (" + hivetask["status"] + ")\n\n"
message += hivetasklog["message"]
if 'attachment' in hivetasklog:
message += "\n\n*Attachment*: " + hivetasklog['attachment']["name"]
return {
"creator": hivetasklog["createdBy"],
"created": maptime(hivetasklog["createdAt"]),
"message": message,
}
def run(msg):
skip_files = msg["payload"]["skip_files"]
keep_ids = msg["payload"]["keep_ids"]
from thehive4py.api import TheHiveApi
import requests
from minio import Minio
headers = {"PRIVATE-TOKEN": msg["secrets"]["catalyst_apikey"]}
# minioclient = Minio("try.catalyst-soar.com:9000", access_key="minio", secret_key="password")
if not skip_files:
minioclient = Minio(
msg["secrets"]["minio_host"],
access_key=msg["secrets"]["minio_access_key"],
secret_key=msg["secrets"]["minio_secret_key"])
# url = "http://localhost:9000"
url = msg["payload"]["thehiveurl"]
# api = TheHiveApi(url, "dtUCnzY4h291GIFHJKW/Z2I2SgjTRQqo")
api = TheHiveApi(url, msg["payload"]["thehivekey"])
print("find alerts", file=sys.stderr)
alerts = []
resp = api.find_alerts(query={}, sort=['-createdAt'], range='all')
resp.raise_for_status()
for alert in resp.json():
alerts.append(mapalert(alert, url))
if alerts:
print("create %s alerts" % len(alerts), file=sys.stderr)
response = requests.post(msg["secrets"]["catalyst_apiurl"] + "/tickets/batch", json=alerts, headers=headers)
response.raise_for_status()
print("find incidents", file=sys.stderr)
incidents = []
resp = api.find_cases(query={}, sort=['-createdAt'], range='all')
resp.raise_for_status()
for case in resp.json():
incident = mapcase(case, url, keep_ids)
for observable in api.get_case_observables(case["id"]).json():
incident["artifacts"].append(mapobservable(observable))
for task in api.get_case_tasks(case["id"]).json():
for log in api.get_task_logs(task["id"]).json():
incident["comments"].append(maptasklog(task, log))
if 'attachment' in log and not skip_files:
incident["files"].append({ "key": log['attachment']["id"], "name": log['attachment']["name"] })
bucket_name = "catalyst-%d" % incident["id"]
if not minioclient.bucket_exists(bucket_name):
minioclient.make_bucket(bucket_name)
response = api.download_attachment(log["attachment"]["id"])
data = io.BytesIO(response.content)
minioclient.put_object(bucket_name, log["attachment"]["id"], data, length=-1, part_size=10*1024*1024)
incidents.append(incident)
if incidents:
if keep_ids:
print("delete incidents", file=sys.stderr)
for incident in incidents:
requests.delete(msg["secrets"]["catalyst_apiurl"] + "/tickets/%d" % incident["id"], headers=headers)
print("create %d incidents" % len(incidents), file=sys.stderr)
response = requests.post(msg["secrets"]["catalyst_apiurl"] + "/tickets/batch", json=incidents, headers=headers)
response.raise_for_status()
return {"done": True}
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -0,0 +1,22 @@
#!/usr/bin/env python
import subprocess
import sys
subprocess.call(
[sys.executable, "-m", "pip", "install", "requests"],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
)
import json
import requests
def run(msg):
api_key = msg['secrets']['vt_api_key'].encode('utf-8')
resource = msg['payload']['default'].encode('utf-8')
params = {'apikey': api_key, 'resource': resource}
return requests.get("https://www.virustotal.com/vtapi/v2/file/report", params=params).json()
print(json.dumps(run(json.loads(sys.argv[1]))))

View File

@@ -0,0 +1,27 @@
package migrations
import _ "embed"
//go:embed templates/default.json
var DefaultTemplateSchema string
//go:embed automations/hash.sha1.py
var SHA1HashAutomation string
//go:embed automations/vt.hash.py
var VTHashAutomation string
//go:embed automations/thehive.py
var TheHiveAutomation string
//go:embed automations/comment.py
var CommentAutomation string
//go:embed playbooks/malware.yml
var MalwarePlaybook string
//go:embed playbooks/phishing.yml
var PhishingPlaybook string
//go:embed playbooks/simple.yaml
var SimplePlaybook string

View File

@@ -0,0 +1,217 @@
package migrations
import (
"context"
"fmt"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
)
const MigrationCollection string = "migrations"
type Migration interface {
MID() string
Migrate(ctx context.Context, driver driver.Database) error
}
func generateMigrations() ([]Migration, error) {
// content here should never change
return []Migration{
&createCollection{ID: "create-log-collection", Name: "logs", DataType: "log", Schema: `{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"},"reference":{"type":"string"}},"required":["created","creator","message","reference"],"type":"object"}`},
&createCollection{ID: "create-ticket-collection", Name: "tickets", DataType: "ticket", Schema: `{"properties":{"artifacts":{"items":{"properties":{"enrichments":{"additionalProperties":{"properties":{"created":{"format":"date-time","type":"string"},"data":{"example":{"hash":"b7a067a742c20d07a7456646de89bc2d408a1153"},"properties":{},"type":"object"},"name":{"example":"hash.sha1","type":"string"}},"required":["created","data","name"],"type":"object"},"type":"object"},"name":{"example":"2.2.2.2","type":"string"},"status":{"example":"Unknown","type":"string"},"type":{"type":"string"}},"required":["name"],"type":"object"},"type":"array"},"comments":{"items":{"properties":{"created":{"format":"date-time","type":"string"},"creator":{"type":"string"},"message":{"type":"string"}},"required":["created","creator","message"],"type":"object"},"type":"array"},"created":{"format":"date-time","type":"string"},"details":{"example":{"description":"my little incident"},"properties":{},"type":"object"},"files":{"items":{"properties":{"key":{"example":"myfile","type":"string"},"name":{"example":"notes.docx","type":"string"}},"required":["key","name"],"type":"object"},"type":"array"},"modified":{"format":"date-time","type":"string"},"name":{"example":"WannyCry","type":"string"},"owner":{"example":"bob","type":"string"},"playbooks":{"additionalProperties":{"properties":{"name":{"example":"Phishing","type":"string"},"tasks":{"additionalProperties":{"properties":{"automation":{"type":"string"},"closed":{"format":"date-time","type":"string"},"created":{"format":"date-time","type":"string"},"data":{"properties":{},"type":"object"},"done":{"type":"boolean"},"join":{"example":false,"type":"boolean"},"payload":{"additionalProperties":{"type":"string"},"type":"object"},"name":{"example":"Inform user","type":"string"},"next":{"additionalProperties":{"type":"string"},"type":"object"},"owner":{"type":"string"},"schema":{"properties":{},"type":"object"},"type":{"enum":["task","input","automation"],"example":"task","type":"string"}},"required":["created","done","name","type"],"type":"object"},"type":"object"}},"required":["name","tasks"],"type":"object"},"type":"object"},"read":{"example":["bob"],"items":{"type":"string"},"type":"array"},"references":{"items":{"properties":{"href":{"example":"https://cve.mitre.org/cgi-bin/cvename.cgi?name=cve-2017-0144","type":"string"},"name":{"example":"CVE-2017-0144","type":"string"}},"required":["href","name"],"type":"object"},"type":"array"},"schema":{"example":"{}","type":"string"},"status":{"example":"open","type":"string"},"type":{"example":"incident","type":"string"},"write":{"example":["alice"],"items":{"type":"string"},"type":"array"}},"required":["created","modified","name","schema","status","type"],"type":"object"}`},
&createCollection{ID: "create-template-collection", Name: "templates", DataType: "template", Schema: `{"properties":{"name":{"type":"string"},"schema":{"type":"string"}},"required":["name","schema"],"type":"object"}`},
&createCollection{ID: "create-playbook-collection", Name: "playbooks", DataType: "playbook", Schema: `{"properties":{"name":{"type":"string"},"yaml":{"type":"string"}},"required":["name","yaml"],"type":"object"}`},
&createCollection{ID: "create-automation-collection", Name: "automations", DataType: "automation", Schema: `{"properties":{"image":{"type":"string"},"script":{"type":"string"}},"required":["image","script"],"type":"object"}`},
&createCollection{ID: "create-userdata-collection", Name: "userdata", DataType: "userdata", Schema: `{"properties":{"email":{"type":"string"},"image":{"type":"string"},"name":{"type":"string"},"timeformat":{"title":"Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)","type":"string"}},"type":"object"}`},
&createCollection{ID: "create-tickettype-collection", Name: "tickettypes", DataType: "tickettype", Schema: `{"properties":{"default_groups":{"items":{"type":"string"},"type":"array"},"default_playbooks":{"items":{"type":"string"},"type":"array"},"default_template":{"type":"string"},"icon":{"type":"string"},"name":{"type":"string"}},"required":["default_playbooks","default_template","icon","name"],"type":"object"}`},
&createCollection{ID: "create-user-collection", Name: "users", DataType: "user", Schema: `{"properties":{"apikey":{"type":"boolean"},"blocked":{"type":"boolean"},"roles":{"items":{"type":"string"},"type":"array"},"sha256":{"type":"string"}},"required":["apikey","blocked","roles"],"type":"object"}`},
&createGraph{ID: "create-ticket-graph", Name: "Graph", EdgeDefinitions: []driver.EdgeDefinition{{Collection: "related", From: []string{"tickets"}, To: []string{"tickets"}}}},
&createDocument{ID: "create-template-default", Collection: "templates", Document: &busdb.Keyed{Key: "default", Doc: models.TicketTemplate{Schema: DefaultTemplateSchema, Name: "Default"}}},
&createDocument{ID: "create-automation-vt.hash", Collection: "automations", Document: &busdb.Keyed{Key: "vt.hash", Doc: models.Automation{Image: "docker.io/python:3", Script: VTHashAutomation}}},
&createDocument{ID: "create-automation-comment", Collection: "automations", Document: &busdb.Keyed{Key: "comment", Doc: models.Automation{Image: "docker.io/python:3", Script: CommentAutomation}}},
&createDocument{ID: "create-automation-thehive", Collection: "automations", Document: &busdb.Keyed{Key: "thehive", Doc: models.Automation{Image: "docker.io/python:3", Script: TheHiveAutomation}}},
&createDocument{ID: "create-automation-hash.sha1", Collection: "automations", Document: &busdb.Keyed{Key: "hash.sha1", Doc: models.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation}}},
&createDocument{ID: "create-playbook-malware", Collection: "playbooks", Document: &busdb.Keyed{Key: "malware", Doc: models.PlaybookTemplate{Name: "Malware", Yaml: MalwarePlaybook}}},
&createDocument{ID: "create-playbook-phishing", Collection: "playbooks", Document: &busdb.Keyed{Key: "phishing", Doc: models.PlaybookTemplate{Name: "Phishing", Yaml: PhishingPlaybook}}},
&createDocument{ID: "create-tickettype-alert", Collection: "tickettypes", Document: &busdb.Keyed{Key: "alert", Doc: models.TicketType{Name: "Alerts", Icon: "mdi-alert", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-incident", Collection: "tickettypes", Document: &busdb.Keyed{Key: "incident", Doc: models.TicketType{Name: "Incidents", Icon: "mdi-radioactive", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-investigation", Collection: "tickettypes", Document: &busdb.Keyed{Key: "investigation", Doc: models.TicketType{Name: "Forensic Investigations", Icon: "mdi-fingerprint", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&createDocument{ID: "create-tickettype-hunt", Collection: "tickettypes", Document: &busdb.Keyed{Key: "hunt", Doc: models.TicketType{Name: "Threat Hunting", Icon: "mdi-target", DefaultTemplate: "default", DefaultPlaybooks: []string{}, DefaultGroups: nil}}},
&updateSchema{ID: "update-automation-collection-1", Name: "automations", DataType: "automation", Schema: `{"properties":{"image":{"type":"string"},"script":{"type":"string"}},"required":["image","script"],"type":"object"}`},
&updateDocument{ID: "update-automation-vt.hash-1", Collection: "automations", Key: "vt.hash", Document: models.Automation{Image: "docker.io/python:3", Script: VTHashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&updateDocument{ID: "update-automation-comment-1", Collection: "automations", Key: "comment", Document: models.Automation{Image: "docker.io/python:3", Script: CommentAutomation, Type: []string{"playbook"}}},
&updateDocument{ID: "update-automation-thehive-1", Collection: "automations", Key: "thehive", Document: models.Automation{Image: "docker.io/python:3", Script: TheHiveAutomation, Schema: pointer.String(`{"title":"TheHive credentials","type":"object","properties":{"thehiveurl":{"type":"string","title":"TheHive URL (e.g. 'https://thehive.example.org')"},"thehivekey":{"type":"string","title":"TheHive API Key"},"skip_files":{"type":"boolean", "default": true, "title":"Skip Files (much faster)"},"keep_ids":{"type":"boolean", "default": true, "title":"Keep IDs and overwrite existing IDs"}},"required":["thehiveurl", "thehivekey", "skip_files", "keep_ids"]}`), Type: []string{"global"}}},
&updateDocument{ID: "update-automation-hash.sha1-1", Collection: "automations", Key: "hash.sha1", Document: models.Automation{Image: "docker.io/python:3", Script: SHA1HashAutomation, Schema: pointer.String(`{"title":"Input","type":"object","properties":{"default":{"type":"string","title":"Value"}},"required":["default"]}`), Type: []string{"global", "artifact", "playbook"}}},
&createCollection{ID: "create-job-collection", Name: "jobs", DataType: "job", Schema: `{"properties":{"automation":{"type":"string"},"log":{"type":"string"},"payload":{},"origin":{"properties":{"artifact_origin":{"properties":{"artifact":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["artifact","ticket_id"],"type":"object"},"task_origin":{"properties":{"playbook_id":{"type":"string"},"task_id":{"type":"string"},"ticket_id":{"format":"int64","type":"integer"}},"required":["playbook_id","task_id","ticket_id"],"type":"object"}},"type":"object"},"output":{"properties":{},"type":"object"},"running":{"type":"boolean"},"status":{"type":"string"}},"required":["automation","running","status"],"type":"object"}`},
}, nil
}
func loadSchema(dataType, jsonschema string) (*driver.CollectionSchemaOptions, error) {
ticketCollectionSchema := &driver.CollectionSchemaOptions{Level: driver.CollectionSchemaLevelStrict, Message: fmt.Sprintf("Validation of %s failed", dataType)}
err := ticketCollectionSchema.LoadRule([]byte(jsonschema))
return ticketCollectionSchema, err
}
type migration struct {
Key string `json:"_key"`
}
func PerformMigrations(ctx context.Context, db driver.Database) error {
collection, err := db.Collection(ctx, MigrationCollection)
if err != nil {
return err
}
migrations, err := generateMigrations()
if err != nil {
return fmt.Errorf("could not generate migrations: %w", err)
}
for _, m := range migrations {
migrationRan, err := collection.DocumentExists(ctx, m.MID())
if err != nil {
return err
}
if !migrationRan {
if err := m.Migrate(ctx, db); err != nil {
return fmt.Errorf("migration %s failed: %w", m.MID(), err)
}
if _, err := collection.CreateDocument(ctx, &migration{Key: m.MID()}); err != nil {
return fmt.Errorf("could not save %s migration document: %w", m.MID(), err)
}
}
}
return nil
}
type createCollection struct {
ID string
Name string
DataType string
Schema string
}
func (m *createCollection) MID() string {
return m.ID
}
func (m *createCollection) Migrate(ctx context.Context, db driver.Database) error {
schema, err := loadSchema(m.DataType, m.Schema)
if err != nil {
return err
}
_, err = db.CreateCollection(ctx, m.Name, &driver.CreateCollectionOptions{
Schema: schema,
})
return err
}
type updateSchema struct {
ID string
Name string
DataType string
Schema string
}
func (m *updateSchema) MID() string {
return m.ID
}
func (m *updateSchema) Migrate(ctx context.Context, db driver.Database) error {
schema, err := loadSchema(m.DataType, m.Schema)
if err != nil {
return err
}
col, err := db.Collection(ctx, m.Name)
if err != nil {
return err
}
err = col.SetProperties(ctx, driver.SetCollectionPropertiesOptions{
Schema: schema,
})
return err
}
type createGraph struct {
ID string
Name string
EdgeDefinitions []driver.EdgeDefinition
}
func (m *createGraph) MID() string {
return m.ID
}
func (m *createGraph) Migrate(ctx context.Context, db driver.Database) error {
_, err := db.CreateGraph(ctx, m.Name, &driver.CreateGraphOptions{
EdgeDefinitions: m.EdgeDefinitions,
})
return err
}
type createDocument struct {
ID string
Collection string
Document interface{}
}
func (m *createDocument) MID() string {
return m.ID
}
func (m *createDocument) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
}
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
type updateDocument struct {
ID string
Collection string
Key string
Document interface{}
}
func (m *updateDocument) MID() string {
return m.ID
}
func (m *updateDocument) Migrate(ctx context.Context, driver driver.Database) error {
collection, err := driver.Collection(ctx, m.Collection)
if err != nil {
return err
}
exists, err := collection.DocumentExists(ctx, m.Key)
if err != nil {
return err
}
if !exists {
_, err = collection.CreateDocument(ctx, m.Document)
return err
}
_, err = collection.ReplaceDocument(ctx, m.Key, m.Document)
return err
}

View File

@@ -0,0 +1,63 @@
name: Malware
tasks:
file-or-hash:
name: Do you have the file or the hash?
type: input
schema:
title: Malware
type: object
properties:
file:
type: string
title: "I have the"
enum: [ "File", "Hash" ]
next:
enter-hash: "file == 'Hash'"
upload: "file == 'File'"
enter-hash:
name: Please enter the hash
type: input
schema:
title: Malware
type: object
properties:
hash:
type: string
title: Please enter the hash value
minlength: 32
next:
virustotal: "hash != ''"
upload:
name: Upload the malware
type: input
schema:
title: Malware
type: object
properties:
malware:
type: object
x-display: file
title: Please upload the malware
next:
hash: "malware"
hash:
name: Hash the malware
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['upload'].data['malware']"
next:
virustotal:
virustotal:
name: Send hash to VirusTotal
type: automation
automation: vt.hash
args:
hash: "playbook.tasks['enter-hash'].data['hash'] || playbook.tasks['hash'].data['hash']"
# next:
# known-malware: "score > 5"
# sandbox: "score < 6" # unknown-malware

View File

@@ -0,0 +1,85 @@
name: Phishing
tasks:
board:
name: Board Involvement?
description: Is a board member involved?
type: input
schema:
properties:
boardInvolved:
default: false
title: A board member is involved.
type: boolean
required:
- boardInvolved
title: Board Involvement?
type: object
next:
escalate: "boardInvolved == true"
mail-available: "boardInvolved == false"
escalate:
name: Escalate to CISO
description: Please escalate the task to the CISO
type: task
mail-available:
name: Mail available
type: input
schema:
oneOf:
- properties:
mail:
title: Mail
type: string
x-display: textarea
schemaKey:
const: 'yes'
type: string
required:
- mail
title: 'Yes'
- properties:
schemaKey:
const: 'no'
type: string
title: 'No'
title: Mail available
type: object
next:
block-sender: "schemaKey == 'yes'"
extract-iocs: "schemaKey == 'yes'"
search-email-gateway: "schemaKey == 'no'"
search-email-gateway:
name: Search email gateway
description: Please search email-gateway for the phishing mail.
type: task
next:
extract-iocs:
block-sender:
name: Block sender
type: task
next:
extract-iocs:
extract-iocs:
name: Extract IOCs
description: Please insert the IOCs
type: input
schema:
properties:
iocs:
items:
type: string
title: IOCs
type: array
title: Extract IOCs
type: object
next:
block-iocs:
block-iocs:
name: Block IOCs
type: task

View File

@@ -0,0 +1,37 @@
name: Simple
tasks:
input:
name: Enter something to hash
type: input
schema:
title: Something
type: object
properties:
something:
type: string
title: Something
default: ""
next:
hash: "something != ''"
hash:
name: Hash the something
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['something']"
next:
comment: "hash != ''"
comment:
name: Comment the hash
type: automation
automation: comment
payload:
default: "playbook.tasks['hash'].data['hash']"
next:
done: "done"
done:
name: You can close this case now
type: task

View File

@@ -0,0 +1,208 @@
{
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Advanced",
"type": "object",
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"nx-enum": [
"White",
"Green",
"Amber",
"Red"
],
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
},
"type": {
"type": "object",
"title": "Select an incident type",
"oneOf": [
{
"title": "Malware",
"properties": {
"schemaKey": {
"type": "string",
"const": "malware"
},
"malware_type": {
"type": "string",
"title": "Malware Type",
"enum": ["Ransomware", "Worm", "Virus"]
}
}
},
{
"title": "Phishing",
"properties": {
"schemaKey": {
"type": "string",
"const": "phishing"
},
"phishing_type": {
"type": "string",
"title": "Phishing Type",
"enum": ["Normal", "Spear", "Whale"]
}
}
}
],
"x-cols": 12
},
"apt": {
"type": "boolean",
"x-display": "switch",
"title": "APT involved?",
"x-cols": 6
},
"apt-group": {
"type": "string",
"title": "Select APT",
"enum": ["Lazarus Group", "Equation Group", "Fancy Bear (APT 28)", "OceanLotus (APT 32)", "Other"],
"x-if": "apt",
"x-cols": 6
},
"tactics": {
"type": "array",
"title": "MITRE Att&ck",
"description": "This description is used as a help message.",
"items": {
"type": "object",
"oneOf": [
{
"title": "Reconnaissance",
"properties": {
"tactic": {
"type": "string",
"const": "reconnaissance",
"title": "Tactic",
"description": "The adversary is trying to gather information they can use to plan future operations."
},
"techniques": {
"type": "array",
"title": "Techniques",
"items": {
"type": "string",
"oneOf": [
{
"const": "T1595",
"title": "Active Scanning",
"description": "Adversaries may execute active reconnaissance scans to gather information that can be used during targeting. Active scans are those where the adversary probes victim infrastructure via network traffic, as opposed to other forms of reconnaissance that do not involve direct interaction."
},
{
"const": "T1592",
"title": "Gather Victim Host Information"
}
]
},
"minItems": 1,
"uniqueItems": true
}
}
},
{
"title": "Persistence",
"properties": {
"tactic": {
"type": "string",
"const": "persistence"
},
"techniques": {
"type": "string",
"title": "Techniques",
"oneOf": [
{
"const": "T1098",
"title": "Account Manipulation"
},
{
"const": "T1197",
"title": "BITS Jobs"
}
]
}
}
}
]
},
"uniqueItems": true
},
"tags": {
"type": "array",
"title": "Tags",
"items": {
"type": "string",
"examples": [
"misp",
"external report",
"internal report"
]
}
}
},
"required": ["severity", "description", "tactics", "type"]
}

View File

@@ -0,0 +1,79 @@
{
"definitions": {},
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "https://example.com/object1618746510.json",
"title": "Default",
"type": "object",
"required": [
"severity",
"description",
"tlp"
],
"properties": {
"severity": {
"$id": "#root/severity",
"title": "Severity",
"type": "string",
"default": "Medium",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "Low",
"title": "Low",
"icon": "mdi-chevron-up"
},
{
"const": "Medium",
"title": "Medium",
"icon": "mdi-chevron-double-up"
},
{
"const": "High",
"title": "High",
"icon": "mdi-chevron-triple-up"
}
]
},
"tlp": {
"$id": "#root/tlp",
"title": "TLP",
"type": "string",
"x-cols": 6,
"x-class": "pr-2",
"x-display": "icon",
"x-itemIcon": "icon",
"oneOf": [
{
"const": "White",
"title": "White",
"icon": "mdi-alpha-w"
},
{
"const": "Green",
"title": "Green",
"icon": "mdi-alpha-g"
},
{
"const": "Amber",
"title": "Amber",
"icon": "mdi-alpha-a"
},
{
"const": "Red",
"title": "Red",
"icon": "mdi-alpha-r"
}
]
},
"description": {
"$id": "#root/description",
"title": "Description",
"type": "string",
"x-display": "textarea",
"x-class": "pr-2"
}
}
}

File diff suppressed because it is too large Load Diff

152
database/playbook.go Normal file
View File

@@ -0,0 +1,152 @@
package database
import (
"context"
"errors"
"time"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/icza/dyno"
"gopkg.in/yaml.v3"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
type PlaybookYAML struct {
Name string `yaml:"name"`
Tasks map[string]TaskYAML `yaml:"tasks"`
}
type TaskYAML struct {
Name string `yaml:"name"`
Type string `yaml:"type"`
Schema interface{} `yaml:"schema"`
Automation string `yaml:"automation"`
Payload map[string]string `yaml:"payload"`
Next map[string]string `yaml:"next"`
Join bool `yaml:"join"`
}
func toPlaybooks(docs []*models.PlaybookTemplateForm) (map[string]*models.Playbook, error) {
playbooks := map[string]*models.Playbook{}
for _, doc := range docs {
playbook, err := toPlaybook(doc)
if err != nil {
return nil, err
}
if doc.ID != nil {
playbooks[*doc.ID] = playbook
} else {
playbooks[strcase.ToKebab(playbook.Name)] = playbook
}
}
return playbooks, nil
}
func toPlaybook(doc *models.PlaybookTemplateForm) (*models.Playbook, error) {
ticketPlaybook := &models.Playbook{}
err := yaml.Unmarshal([]byte(doc.Yaml), ticketPlaybook)
if err != nil {
return nil, err
}
for idx, task := range ticketPlaybook.Tasks {
if task.Schema != nil {
task.Schema = dyno.ConvertMapI2MapS(task.Schema.(map[string]interface{}))
}
task.Created = time.Now().UTC()
ticketPlaybook.Tasks[idx] = task
}
return ticketPlaybook, nil
}
func toPlaybookTemplateResponse(key string, doc *models.PlaybookTemplate) *models.PlaybookTemplateResponse {
return &models.PlaybookTemplateResponse{ID: key, Name: doc.Name, Yaml: doc.Yaml}
}
func (db *Database) PlaybookCreate(ctx context.Context, playbook *models.PlaybookTemplateForm) (*models.PlaybookTemplateResponse, error) {
if playbook == nil {
return nil, errors.New("requires playbook")
}
var playbookYAML PlaybookYAML
err := yaml.Unmarshal([]byte(playbook.Yaml), &playbookYAML)
if err != nil {
return nil, err
}
if playbookYAML.Name == "" {
return nil, errors.New("requires template name")
}
p := models.PlaybookTemplate{Name: playbookYAML.Name, Yaml: playbook.Yaml}
var doc models.PlaybookTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.CreateDocument(ctx, newctx, strcase.ToKebab(playbookYAML.Name), p)
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookGet(ctx context.Context, id string) (*models.PlaybookTemplateResponse, error) {
doc := models.PlaybookTemplate{}
meta, err := db.playbookCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookDelete(ctx context.Context, id string) error {
_, err := db.playbookCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) PlaybookUpdate(ctx context.Context, id string, playbook *models.PlaybookTemplateForm) (*models.PlaybookTemplateResponse, error) {
var pb PlaybookYAML
err := yaml.Unmarshal([]byte(playbook.Yaml), &pb)
if err != nil {
return nil, err
}
if pb.Name == "" {
return nil, errors.New("requires template name")
}
var doc models.PlaybookTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.playbookCollection.ReplaceDocument(ctx, id, models.PlaybookTemplate{Name: pb.Name, Yaml: playbook.Yaml})
if err != nil {
return nil, err
}
return toPlaybookTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) PlaybookList(ctx context.Context) ([]*models.PlaybookTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": PlaybookCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.PlaybookTemplateResponse
for {
var doc models.PlaybookTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toPlaybookTemplateResponse(meta.Key, &doc))
}
return docs, err
}

182
database/playbookutils.go Normal file
View File

@@ -0,0 +1,182 @@
package database
import (
"errors"
"fmt"
"log"
"sort"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/dag"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func playbookGraph(playbook *models.Playbook) (*dag.Graph, error) {
d := dag.NewGraph()
var taskIDs []string
for taskID := range playbook.Tasks {
taskIDs = append(taskIDs, taskID)
}
sort.Strings(taskIDs)
for _, taskID := range taskIDs {
if err := d.AddNode(taskID); err != nil {
return nil, errors.New("could not add node")
}
}
for _, taskID := range taskIDs {
task := playbook.Tasks[taskID]
for next := range task.Next {
if err := d.AddEdge(taskID, next); err != nil {
return nil, errors.New("could not add edge")
}
}
}
return d, nil
}
func toTaskResponse(playbook *models.Playbook, taskID string, order int, graph *dag.Graph) (*models.TaskResponse, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return nil, fmt.Errorf("task %s not found", taskID)
}
tr := &models.TaskResponse{
Automation: task.Automation,
Closed: task.Closed,
Created: task.Created,
Data: task.Data,
Done: task.Done,
Join: task.Join,
Payload: task.Payload,
Name: task.Name,
Next: task.Next,
Owner: task.Owner,
Schema: task.Schema,
Type: task.Type,
// Active: active,
// Order: v.Order,
}
tr.Order = int64(order)
taskActive, _ := active(playbook, taskID, graph, task)
tr.Active = taskActive
return tr, nil
}
func activePlaybook(playbook *models.Playbook, taskID string) (bool, error) {
task, ok := playbook.Tasks[taskID]
if !ok {
return false, fmt.Errorf("playbook does not contain tasks %s", taskID)
}
d, err := playbookGraph(playbook)
if err != nil {
return false, err
}
return active(playbook, taskID, d, task)
}
func active(playbook *models.Playbook, taskID string, d *dag.Graph, task *models.Task) (bool, error) {
if task.Done {
return false, nil
}
parents := d.GetParents(taskID)
if len(parents) == 0 {
return true, nil // valid(&task)
}
if task.Join != nil && *task.Join {
for _, parent := range parents {
parentTask := playbook.Tasks[parent]
if !parentTask.Done {
return false, nil
}
requirement := parentTask.Next[taskID]
b, err := evalRequirement(requirement, parentTask.Data)
if err != nil {
return false, err
}
if !b {
return false, nil
}
}
return true, nil
}
for _, parent := range parents {
parentTask := playbook.Tasks[parent]
if !parentTask.Done {
// return false, nil
continue
}
requirement := parentTask.Next[taskID]
b, err := evalRequirement(requirement, parentTask.Data)
if err != nil {
continue
}
if b {
return true, nil
}
}
return false, nil
}
func evalRequirement(aql string, data interface{}) (bool, error) {
if aql == "" {
return true, nil
}
parser := caql.Parser{}
tree, err := parser.Parse(aql)
if err != nil {
return false, err
}
var dataMap map[string]interface{}
if data != nil {
if dataMapX, ok := data.(map[string]interface{}); ok {
dataMap = dataMapX
} else {
log.Println("wrong data type for task data")
}
}
v, err := tree.Eval(dataMap)
if err != nil {
return false, err
}
if b, ok := v.(bool); ok {
return b, nil
}
return false, err
}
/*
// "github.com/qri-io/jsonschema"
func valid(task *models.Task) (bool, error) {
schema, err := json.Marshal(task.Schema)
if err != nil {
return false, err
}
rs := &jsonschema.Schema{}
if err := json.Unmarshal(schema, rs); err != nil {
return false, err
}
state := rs.Validate(context.Background(), task.Data)
return len(*state.Errs) > 0, nil
}
*/

View File

@@ -0,0 +1,135 @@
package database
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/models"
)
var playbook2 = &models.Playbook{
Name: "Phishing",
Tasks: map[string]*models.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"aquire-mail": "boardInvolved == false",
}},
"escalate": {},
"aquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
}},
"extract-iocs": {Next: map[string]string{"fetch-iocs": ""}},
"fetch-iocs": {Next: map[string]string{"block-iocs": ""}},
"search-email-gateway": {Next: map[string]string{"block-iocs": ""}},
"block-sender": {Next: map[string]string{"block-iocs": ""}},
"block-iocs": {Next: map[string]string{"block-ioc": ""}},
"block-ioc": {},
},
}
var playbook3 = &models.Playbook{
Name: "Phishing",
Tasks: map[string]*models.Task{
"board": {Next: map[string]string{
"escalate": "boardInvolved == true",
"aquire-mail": "boardInvolved == false",
}, Data: map[string]interface{}{"boardInvolved": true}, Done: true},
"escalate": {},
"aquire-mail": {Next: map[string]string{
"extract-iocs": "schemaKey == 'yes'",
"block-sender": "schemaKey == 'yes'",
"search-email-gateway": "schemaKey == 'no'",
}},
"extract-iocs": {Next: map[string]string{"fetch-iocs": ""}},
"fetch-iocs": {Next: map[string]string{"block-iocs": ""}},
"search-email-gateway": {Next: map[string]string{"block-iocs": ""}},
"block-sender": {Next: map[string]string{"block-iocs": ""}},
"block-iocs": {Next: map[string]string{"block-ioc": ""}},
"block-ioc": {},
},
}
var playbook4 = &models.Playbook{
Name: "Malware",
Tasks: map[string]*models.Task{
"file-or-hash": {Next: map[string]string{
"enter-hash": "file == 'Hash'",
"upload": "file == 'File'",
}},
"enter-hash": {Next: map[string]string{
"virustotal": "hash != ''",
}},
"upload": {Next: map[string]string{
"hash": "malware",
}},
"hash": {Next: map[string]string{"virustotal": ""}},
"virustotal": {},
},
}
func Test_canBeCompleted(t *testing.T) {
type args struct {
playbook *models.Playbook
taskID string
}
tests := []struct {
name string
args args
want bool
wantErr bool
}{
{"playbook2 board", args{playbook: playbook2, taskID: "board"}, true, false},
{"playbook2 escalate", args{playbook: playbook2, taskID: "escalate"}, false, false},
{"playbook2 aquire-mail", args{playbook: playbook2, taskID: "aquire-mail"}, false, false},
{"playbook2 block-ioc", args{playbook: playbook2, taskID: "block-ioc"}, false, false},
{"playbook3 board", args{playbook: playbook3, taskID: "board"}, false, false},
{"playbook3 escalate", args{playbook: playbook3, taskID: "escalate"}, true, false},
{"playbook3 aquire-mail", args{playbook: playbook3, taskID: "aquire-mail"}, false, false},
{"playbook3 block-ioc", args{playbook: playbook3, taskID: "block-ioc"}, false, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := activePlaybook(tt.args.playbook, tt.args.taskID)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}
if got != tt.want {
t.Errorf("activePlaybook() got = %v, want %v", got, tt.want)
}
})
}
}
func Test_playbookOrder(t *testing.T) {
type args struct {
playbook *models.Playbook
}
tests := []struct {
name string
args args
want []string
wantErr bool
}{
{"playbook4", args{playbook: playbook4}, []string{"file-or-hash", "enter-hash", "upload", "hash", "virustotal"}, false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, err := toPlaybookResponse(tt.args.playbook)
if (err != nil) != tt.wantErr {
t.Errorf("activePlaybook() error = %v, wantErr %v", err, tt.wantErr)
return
}
names := make([]string, len(got.Tasks))
for name, task := range got.Tasks {
names[task.Order] = name
}
assert.Equal(t, tt.want, names)
})
}
}

48
database/relationships.go Normal file
View File

@@ -0,0 +1,48 @@
package database
import (
"context"
"errors"
"strconv"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
)
func (db *Database) RelatedCreate(ctx context.Context, id, id2 int64) error {
if id == id2 {
return errors.New("tickets cannot relate to themself")
}
_, err := db.relatedCollection.CreateEdge(ctx, ctx, &driver.EdgeDocument{
From: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
To: driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
})
return err
}
func (db *Database) RelatedBatchCreate(ctx context.Context, edges []*driver.EdgeDocument) error {
_, err := db.relatedCollection.CreateEdges(ctx, edges)
return err
}
func (db *Database) RelatedRemove(ctx context.Context, id, id2 int64) error {
q := `
FOR d in @@collection
FILTER (d._from == @id && d._to == @id2) || (d._to == @id && d._from == @id2)
REMOVE d in @@collection`
_, _, err := db.Query(ctx, q, map[string]interface{}{
"@collection": RelatedTicketsCollectionName,
"id": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
"id2": driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
}, &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id))),
driver.DocumentID(TicketCollectionName + "/" + strconv.Itoa(int(id2))),
},
Msg: "Removed ticket/artifact relation",
})
return err
}

86
database/settings.go Normal file
View File

@@ -0,0 +1,86 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/gin-gonic/gin"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func toUserDataResponse(key string, doc *models.UserData) *models.UserDataResponse {
return &models.UserDataResponse{
Email: doc.Email,
ID: key,
Image: doc.Image,
Name: doc.Name,
Timeformat: doc.Timeformat,
}
}
func (db *Database) UserDataCreate(ctx context.Context, id string, userdata *models.UserData) error {
if userdata == nil {
return errors.New("requires setting")
}
if id == "" {
return errors.New("requires username")
}
_, err := db.userdataCollection.CreateDocument(ctx, ctx, id, userdata)
return err
}
func (db *Database) UserDataGetOrCreate(ctx *gin.Context, id string, newUserData *models.UserData) (*models.UserDataResponse, error) {
setting, err := db.UserDataGet(ctx, id)
if err != nil {
return toUserDataResponse(id, newUserData), db.UserDataCreate(ctx, id, newUserData)
}
return setting, nil
}
func (db *Database) UserDataGet(ctx context.Context, id string) (*models.UserDataResponse, error) {
var doc models.UserData
meta, err := db.userdataCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), err
}
func (db *Database) UserDataList(ctx context.Context) ([]*models.UserDataResponse, error) {
query := "FOR d IN @@collection SORT d.username ASC RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserDataCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.UserDataResponse
for {
var doc models.UserData
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toUserDataResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserDataUpdate(ctx context.Context, id string, userdata *models.UserData) (*models.UserDataResponse, error) {
var doc models.UserData
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userdataCollection.ReplaceDocument(ctx, id, userdata)
if err != nil {
return nil, err
}
return toUserDataResponse(meta.Key, &doc), nil
}

159
database/settings_test.go Normal file
View File

@@ -0,0 +1,159 @@
package database_test
import (
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/test"
)
func init() {
gin.SetMode(gin.TestMode)
}
var bob = &models.UserData{
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
var bobResponse = &models.UserDataResponse{
ID: "bob",
Email: pointer.String("bob@example.org"),
Name: pointer.String("Bob"),
}
func TestDatabase_UserDataCreate(t *testing.T) {
type args struct {
id string
setting *models.UserData
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal setting", args: args{id: "bob", setting: bob}, wantErr: false},
{name: "Nil setting", args: args{id: "bob"}, wantErr: true},
{name: "UserData without settingname", args: args{id: ""}, wantErr: true},
{name: "Only settingname", args: args{id: "bob"}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), tt.args.id, tt.args.setting); (err != nil) != tt.wantErr {
t.Errorf("settingCreate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_UserDataGet(t *testing.T) {
type args struct {
id string
}
tests := []struct {
name string
args args
want *models.UserDataResponse
wantErr bool
}{
{name: "Normal get", args: args{id: "bob"}, want: bobResponse},
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
got, err := db.UserDataGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("UserDataGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
return
}
assert.Equal(t, tt.want, got)
})
}
}
func TestDatabase_UserDataList(t *testing.T) {
tests := []struct {
name string
want []*models.UserDataResponse
wantErr bool
}{
{name: "Normal list", want: []*models.UserDataResponse{bobResponse}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
got, err := db.UserDataList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("UserDataList() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, tt.want, got)
})
}
}
func TestDatabase_UserDataUpdate(t *testing.T) {
type args struct {
id string
setting *models.UserData
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{id: "bob", setting: bob}},
{name: "Not existing", args: args{id: "foo"}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if err := db.UserDataCreate(test.Context(), "bob", bob); err != nil {
t.Errorf("settingCreate() error = %v", err)
}
if _, err := db.UserDataUpdate(test.Context(), tt.args.id, tt.args.setting); (err != nil) != tt.wantErr {
t.Errorf("UserDataUpdate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

43
database/statistics.go Normal file
View File

@@ -0,0 +1,43 @@
package database
import (
"context"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func (db *Database) Statistics(ctx context.Context) (*models.Statistics, error) {
query := `RETURN {
tickets_per_type: MERGE(FOR d in tickets
COLLECT type = d.type WITH COUNT INTO typecount
RETURN ZIP([type], [typecount])),
unassigned: FIRST(FOR d in tickets
FILTER d.status == "open" AND !d.owner
COLLECT WITH COUNT INTO length
RETURN length),
open_tickets_per_user: MERGE(FOR d in tickets
FILTER d.status == "open"
COLLECT user = d.owner WITH COUNT INTO usercount
RETURN ZIP([user], [usercount])),
tickets_per_week: MERGE(FOR d in tickets
COLLECT week = CONCAT(DATE_YEAR(d.created), "-", DATE_ISOWEEK(d.created) < 10 ? "0" : "", DATE_ISOWEEK(d.created)) WITH COUNT INTO weekcount
RETURN ZIP([week], [weekcount])),
}`
cur, _, err := db.Query(ctx, query, nil, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cur.Close()
statistics := models.Statistics{}
if _, err := cur.ReadDocument(ctx, &statistics); err != nil {
return nil, err
}
return &statistics, nil
}

68
database/task.go Normal file
View File

@@ -0,0 +1,68 @@
package database
import (
"context"
"github.com/arangodb/go-driver"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
type playbookResponse struct {
PlaybookId string `json:"playbook_id"`
PlaybookName string `json:"playbook_name"`
Playbook models.Playbook `json:"playbook"`
TicketId int64 `json:"ticket_id"`
TicketName string `json:"ticket_name"`
}
func (db *Database) TaskList(ctx context.Context) ([]*models.TaskWithContext, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d.status == 'open'
FOR playbook IN NOT_NULL(VALUES(d.playbooks), [])
RETURN { ticket_id: TO_NUMBER(d._key), ticket_name: d.name, playbook_id: POSITION(d.playbooks, playbook, true), playbook_name: playbook.name, playbook: playbook }`
cursor, _, err := db.Query(ctx, query, mergeMaps(ticketFilterVars, map[string]interface{}{
"@collection": TicketCollectionName,
}), busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
docs := []*models.TaskWithContext{}
for {
var doc playbookResponse
_, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
playbook, err := toPlaybookResponse(&doc.Playbook)
if err != nil {
return nil, err
}
for _, task := range playbook.Tasks {
if task.Active {
docs = append(docs, &models.TaskWithContext{
PlaybookId: doc.PlaybookId,
PlaybookName: doc.PlaybookName,
Task: *task,
TicketId: doc.TicketId,
TicketName: doc.TicketName,
})
}
}
}
return docs, err
}

88
database/template.go Normal file
View File

@@ -0,0 +1,88 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func toTicketTemplate(doc *models.TicketTemplateForm) *models.TicketTemplate {
return &models.TicketTemplate{Name: doc.Name, Schema: doc.Schema}
}
func toTicketTemplateResponse(key string, doc *models.TicketTemplate) *models.TicketTemplateResponse {
return &models.TicketTemplateResponse{ID: key, Name: doc.Name, Schema: doc.Schema}
}
func (db *Database) TemplateCreate(ctx context.Context, template *models.TicketTemplateForm) (*models.TicketTemplateResponse, error) {
if template == nil {
return nil, errors.New("requires template")
}
if template.Name == "" {
return nil, errors.New("requires template name")
}
var doc models.TicketTemplate
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.CreateDocument(ctx, newctx, strcase.ToKebab(template.Name), toTicketTemplate(template))
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateGet(ctx context.Context, id string) (*models.TicketTemplateResponse, error) {
var doc models.TicketTemplate
meta, err := db.templateCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateUpdate(ctx context.Context, id string, template *models.TicketTemplateForm) (*models.TicketTemplateResponse, error) {
var doc models.TicketTemplate
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.templateCollection.ReplaceDocument(ctx, id, toTicketTemplate(template))
if err != nil {
return nil, err
}
return toTicketTemplateResponse(meta.Key, &doc), nil
}
func (db *Database) TemplateDelete(ctx context.Context, id string) error {
_, err := db.templateCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TemplateList(ctx context.Context) ([]*models.TicketTemplateResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": TemplateCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.TicketTemplateResponse
for {
var doc models.TicketTemplate
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toTicketTemplateResponse(meta.Key, &doc))
}
return docs, err
}

182
database/template_test.go Normal file
View File

@@ -0,0 +1,182 @@
package database_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/database/migrations"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/test"
)
var template1 = &models.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Template 1",
}
var default1 = &models.TicketTemplateForm{
Schema: migrations.DefaultTemplateSchema,
Name: "Default",
}
func TestDatabase_TemplateCreate(t *testing.T) {
type args struct {
template *models.TicketTemplateForm
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{template: template1}},
{name: "Duplicate", args: args{template: default1}, wantErr: true},
{name: "Nil template", args: args{}, wantErr: true},
{name: "Template without fields", args: args{template: &models.TicketTemplateForm{}}, wantErr: true},
{name: "Only name", args: args{template: &models.TicketTemplateForm{Name: "name"}}, wantErr: false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), tt.args.template); (err != nil) != tt.wantErr {
t.Errorf("TemplateCreate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_TemplateDelete(t *testing.T) {
type args struct {
id string
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{"default"}},
{name: "Not existing", args: args{"foobar"}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
if err := db.TemplateDelete(test.Context(), tt.args.id); (err != nil) != tt.wantErr {
t.Errorf("TemplateDelete() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
func TestDatabase_TemplateGet(t *testing.T) {
type args struct {
id string
}
tests := []struct {
name string
args args
want *models.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", args: args{id: "default"}, want: &models.TicketTemplateResponse{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}},
{name: "Not existing", args: args{id: "foobar"}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
got, err := db.TemplateGet(test.Context(), tt.args.id)
if (err != nil) != tt.wantErr {
t.Errorf("TemplateGet() error = %v, wantErr %v", err, tt.wantErr)
return
}
if err != nil {
return
}
assert.Equal(t, got, tt.want)
})
}
}
func TestDatabase_TemplateList(t *testing.T) {
tests := []struct {
name string
want []*models.TicketTemplateResponse
wantErr bool
}{
{name: "Normal", want: []*models.TicketTemplateResponse{{ID: "default", Name: "Default", Schema: migrations.DefaultTemplateSchema}, {ID: "template-1", Name: template1.Name, Schema: template1.Schema}}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
got, err := db.TemplateList(test.Context())
if (err != nil) != tt.wantErr {
t.Errorf("TemplateList() error = %v, wantErr %v", err, tt.wantErr)
return
}
assert.Equal(t, got, tt.want)
})
}
}
func TestDatabase_TemplateUpdate(t *testing.T) {
type args struct {
id string
template *models.TicketTemplateForm
}
tests := []struct {
name string
args args
wantErr bool
}{
{name: "Normal", args: args{"default", template1}},
{name: "Not existing", args: args{"foobar", template1}, wantErr: true},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, _, _, _, _, db, cleanup, err := test.DB(t)
if err != nil {
t.Fatal(err)
}
defer cleanup()
if _, err := db.TemplateCreate(test.Context(), template1); err != nil {
t.Errorf("TemplateCreate() error = %v", err)
}
if _, err := db.TemplateUpdate(test.Context(), tt.args.id, tt.args.template); (err != nil) != tt.wantErr {
t.Errorf("TemplateUpdate() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}

604
database/ticket.go Normal file
View File

@@ -0,0 +1,604 @@
package database
import (
"context"
"encoding/json"
"errors"
"fmt"
"sort"
"strconv"
"strings"
"sync"
"time"
"github.com/arangodb/go-driver"
"github.com/xeipuuv/gojsonschema"
"github.com/SecurityBrewery/catalyst/caql"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/index"
)
func toTicket(ticketForm *models.TicketForm) (interface{}, error) {
playbooks, err := toPlaybooks(ticketForm.Playbooks)
if err != nil {
return nil, err
}
ticket := &models.Ticket{
Artifacts: ticketForm.Artifacts,
Comments: ticketForm.Comments,
Details: ticketForm.Details,
Files: ticketForm.Files,
Name: ticketForm.Name,
Owner: ticketForm.Owner,
Playbooks: playbooks,
Read: ticketForm.Read,
References: ticketForm.References,
Status: ticketForm.Status,
Type: ticketForm.Type,
Write: ticketForm.Write,
// ID: ticketForm.ID,
// Created: ticketForm.Created,
// Modified: ticketForm.Modified,
// Schema: ticketForm.Schema,
}
if ticketForm.Created != nil {
ticket.Created = *ticketForm.Created
} else {
ticket.Created = time.Now().UTC()
}
if ticketForm.Modified != nil {
ticket.Modified = *ticketForm.Modified
} else {
ticket.Modified = time.Now().UTC()
}
if ticketForm.Schema != nil {
ticket.Schema = *ticketForm.Schema
} else {
ticket.Schema = "{}"
}
if ticketForm.Status == "" {
ticket.Status = "open"
}
if ticketForm.ID != nil {
return &busdb.Keyed{Key: strconv.FormatInt(*ticketForm.ID, 10), Doc: ticket}, nil
}
return ticket, nil
}
func toTicketResponses(tickets []*models.TicketSimpleResponse) ([]*models.TicketResponse, error) {
var extendedTickets []*models.TicketResponse
for _, simple := range tickets {
tr, err := toTicketResponse(simple)
if err != nil {
return nil, err
}
extendedTickets = append(extendedTickets, tr)
}
return extendedTickets, nil
}
func toTicketResponse(ticket *models.TicketSimpleResponse) (*models.TicketResponse, error) {
playbooks, err := toPlaybookResponses(ticket.Playbooks)
if err != nil {
return nil, err
}
return &models.TicketResponse{
ID: ticket.ID,
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}, nil
}
func toTicketSimpleResponse(key string, ticket *models.Ticket) (*models.TicketSimpleResponse, error) {
id, err := strconv.ParseInt(key, 10, 64)
if err != nil {
return nil, err
}
return &models.TicketSimpleResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
ID: id,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: ticket.Playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}, nil
}
func toTicketWithTickets(ticketResponse *models.TicketResponse, tickets []*models.TicketSimpleResponse) *models.TicketWithTickets {
return &models.TicketWithTickets{
Artifacts: ticketResponse.Artifacts,
Comments: ticketResponse.Comments,
Created: ticketResponse.Created,
Details: ticketResponse.Details,
Files: ticketResponse.Files,
ID: ticketResponse.ID,
Modified: ticketResponse.Modified,
Name: ticketResponse.Name,
Owner: ticketResponse.Owner,
Playbooks: ticketResponse.Playbooks,
Read: ticketResponse.Read,
References: ticketResponse.References,
Schema: ticketResponse.Schema,
Status: ticketResponse.Status,
Type: ticketResponse.Type,
Write: ticketResponse.Write,
Tickets: tickets,
}
}
func toPlaybookResponses(playbooks map[string]*models.Playbook) (map[string]*models.PlaybookResponse, error) {
pr := map[string]*models.PlaybookResponse{}
var err error
for k, v := range playbooks {
pr[k], err = toPlaybookResponse(v)
if err != nil {
return nil, err
}
}
return pr, nil
}
func toPlaybookResponse(playbook *models.Playbook) (*models.PlaybookResponse, error) {
graph, err := playbookGraph(playbook)
if err != nil {
return nil, err
}
re := &models.PlaybookResponse{
Name: playbook.Name,
Tasks: map[string]*models.TaskResponse{},
}
results, err := graph.Toposort()
if err != nil {
return nil, err
}
i := 0
for _, taskID := range results {
rootTask, err := toTaskResponse(playbook, taskID, i, graph)
if err != nil {
return nil, err
}
re.Tasks[taskID] = rootTask
i++
}
return re, nil
}
func (db *Database) TicketBatchCreate(ctx context.Context, ticketForms []*models.TicketForm) ([]*models.TicketResponse, error) {
update, err := db.Hooks.IngestionFilter(ctx, db.Index)
if err != nil {
return nil, err
}
var dbTickets []interface{}
for _, ticketForm := range ticketForms {
ticket, err := toTicket(ticketForm)
if err != nil {
return nil, err
}
if err := validate(ticket, models.TicketSchema); err != nil {
return nil, err
}
dbTickets = append(dbTickets, ticket)
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @tickets
` + ticketFilterQuery + `
LET updates = ` + update + `
LET newdoc = LENGTH(updates) != 0 ? APPLY("MERGE_RECURSIVE", APPEND([d], updates)) : d
LET keyeddoc = HAS(newdoc, "id") ? MERGE(newdoc, {"_key": TO_STRING(newdoc.id)}) : newdoc
LET noiddoc = UNSET(keyeddoc, "id")
INSERT noiddoc INTO @@collection
RETURN NEW`
apiTickets, _, err := db.ticketListQuery(ctx, query, mergeMaps(map[string]interface{}{
"tickets": dbTickets,
}, ticketFilterVars), busdb.CreateOperation)
if err != nil {
return nil, err
}
if err = batchIndex(db.Index, apiTickets); err != nil {
return nil, err
}
var ids []driver.DocumentID
for _, apiTicket := range apiTickets {
ids = append(ids, driver.NewDocumentID(TicketCollectionName, fmt.Sprint(apiTicket.ID)))
}
if err := db.BusDatabase.LogAndNotify(ctx, ids, "Ticket created"); err != nil {
return nil, err
}
ticketResponses, err := toTicketResponses(apiTickets)
if err != nil {
return nil, err
}
for _, ticketResponse := range ticketResponses {
for playbookID := range ticketResponse.Playbooks {
if err := runRootTask(ticketResponse, playbookID, db); err != nil {
return nil, err
}
}
}
return ticketResponses, nil
}
func (db *Database) IndexRebuild(ctx context.Context) error {
if err := db.Index.Truncate(); err != nil {
return err
}
tickets, _, err := db.ticketListQuery(ctx, "FOR d IN @@collection RETURN d", nil, busdb.ReadOperation)
if err != nil {
return err
}
return batchIndex(db.Index, tickets)
}
func batchIndex(index *index.Index, tickets []*models.TicketSimpleResponse) error {
var wg sync.WaitGroup
var batch []*models.TicketSimpleResponse
for _, ticket := range tickets {
batch = append(batch, ticket)
if len(batch) > 100 {
wg.Add(1)
go func(docs []*models.TicketSimpleResponse) {
index.Index(docs)
wg.Done()
}(batch)
batch = []*models.TicketSimpleResponse{}
}
}
wg.Wait()
return nil
}
func (db *Database) TicketGet(ctx context.Context, ticketID int64) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
return db.ticketGetQuery(ctx, ticketID, `LET d = DOCUMENT(@@collection, @ID) `+ticketFilterQuery+` RETURN d`, ticketFilterVars, busdb.ReadOperation)
}
func (db *Database) ticketGetQuery(ctx context.Context, ticketID int64, query string, bindVars map[string]interface{}, operation *busdb.Operation) (*models.TicketWithTickets, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
}
bindVars["@collection"] = TicketCollectionName
if ticketID != 0 {
bindVars["ID"] = fmt.Sprint(ticketID)
}
cur, _, err := db.Query(ctx, query, bindVars, operation)
if err != nil {
return nil, err
}
defer cur.Close()
ticket := models.Ticket{}
meta, err := cur.ReadDocument(ctx, &ticket)
if err != nil {
return nil, err
}
ticketSimpleResponse, err := toTicketSimpleResponse(meta.Key, &ticket)
if err != nil {
return nil, err
}
// index
go db.Index.Index([]*models.TicketSimpleResponse{ticketSimpleResponse})
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
// tickets
ticketsQuery := `FOR vertex, edge IN OUTBOUND
DOCUMENT(@@tickets, @ID)
GRAPH @graph
FILTER IS_SAME_COLLECTION(@@collection, vertex)
FILTER vertex != null
LET d = DOCUMENT(@@collection, edge["_to"])
` + ticketFilterQuery + `
RETURN d`
outTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
ticketsQuery = `FOR vertex, edge IN INBOUND
DOCUMENT(@@tickets, @ID)
GRAPH @graph
FILTER IS_SAME_COLLECTION(@@collection, vertex)
FILTER vertex != null
LET d = DOCUMENT(@@collection, edge["_from"])
` + ticketFilterQuery + `
RETURN d`
inTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
"ID": fmt.Sprint(ticketID),
"graph": TicketArtifactsGraphName,
"@tickets": TicketCollectionName,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
var artifactNames []string
for _, artifact := range ticketSimpleResponse.Artifacts {
artifactNames = append(artifactNames, artifact.Name)
}
ticketsQuery = `FOR d IN @@collection
FILTER d._key != @ID
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER POSITION(@artifacts, a.name)
RETURN d`
sameArtifactTickets, _, err := db.ticketListQuery(ctx, ticketsQuery, mergeMaps(map[string]interface{}{
"ID": fmt.Sprint(ticketID),
"artifacts": artifactNames,
}, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return nil, err
}
tickets := append(outTickets, inTickets...)
tickets = append(tickets, sameArtifactTickets...)
sort.Slice(tickets, func(i, j int) bool {
return tickets[i].ID < tickets[j].ID
})
ticketResponse, err := toTicketResponse(ticketSimpleResponse)
if err != nil {
return nil, err
}
return toTicketWithTickets(ticketResponse, tickets), nil
}
func (db *Database) TicketUpdate(ctx context.Context, ticketID int64, ticket *models.Ticket) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
REPLACE d WITH @ticket IN @@collection
RETURN NEW`
ticket.Modified = time.Now().UTC() // TODO make setable?
return db.ticketGetQuery(ctx, ticketID, query, mergeMaps(map[string]interface{}{"ticket": ticket}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update, Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, strconv.FormatInt(ticketID, 10)),
},
Msg: "Ticket updated",
})
}
func (db *Database) TicketDelete(ctx context.Context, ticketID int64) error {
_, err := db.TicketGet(ctx, ticketID)
if err != nil {
return err
}
_, err = db.ticketCollection.RemoveDocument(ctx, strconv.FormatInt(ticketID, 10))
if err != nil {
return err
}
return nil
}
func (db *Database) TicketList(ctx context.Context, ticketType string, query string, sorts []string, desc []bool, offset, count int64) (*models.TicketList, error) {
binVars := map[string]interface{}{}
parser := &caql.Parser{Searcher: db.Index, Prefix: "d."}
var typeString = ""
if ticketType != "" {
typeString = "FILTER d.type == @type "
binVars["type"] = ticketType
}
var filterString = ""
if query != "" {
queryTree, err := parser.Parse(query)
if err != nil {
return nil, errors.New("invalid filter query: syntax error")
}
filterString, err = queryTree.String()
if err != nil {
return nil, fmt.Errorf("invalid filter query: %w", err)
}
filterString = "FILTER " + filterString
}
documentCount, err := db.TicketCount(ctx, typeString, filterString, binVars)
if err != nil {
return nil, err
}
sortQ := sortQuery(sorts, desc, binVars)
binVars["offset"] = offset
binVars["count"] = count
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return nil, err
}
q := `FOR d IN @@collection
` + ticketFilterQuery + `
` + sortQ + `
` + typeString + `
` + filterString + `
LIMIT @offset, @count
SORT d._key ASC
RETURN d`
// RETURN KEEP(d, "_key", "id", "name", "type", "created")`
ticketList, _, err := db.ticketListQuery(ctx, q, mergeMaps(binVars, ticketFilterVars), busdb.ReadOperation)
return &models.TicketList{
Count: documentCount,
Tickets: ticketList,
}, err
// return map[string]interface{}{"tickets": ticketList, "count": documentCount}, err
}
func (db *Database) ticketListQuery(ctx context.Context, query string, bindVars map[string]interface{}, operation *busdb.Operation) ([]*models.TicketSimpleResponse, *models.LogEntry, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
}
bindVars["@collection"] = TicketCollectionName
cursor, logEntry, err := db.Query(ctx, query, bindVars, operation)
if err != nil {
return nil, nil, err
}
defer cursor.Close()
var docs []*models.TicketSimpleResponse
for {
doc := models.Ticket{}
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, nil, err
}
resp, err := toTicketSimpleResponse(meta.Key, &doc)
if err != nil {
return nil, nil, err
}
docs = append(docs, resp)
}
return docs, logEntry, nil
}
func (db *Database) TicketCount(ctx context.Context, typequery, filterquery string, bindVars map[string]interface{}) (int, error) {
if bindVars == nil {
bindVars = map[string]interface{}{}
}
bindVars["@collection"] = TicketCollectionName
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketReadFilter(ctx)
if err != nil {
return 0, err
}
countQuery := `RETURN LENGTH(FOR d IN @@collection ` + ticketFilterQuery + " " + typequery + " " + filterquery + ` RETURN 1)`
cursor, _, err := db.Query(ctx, countQuery, mergeMaps(bindVars, ticketFilterVars), busdb.ReadOperation)
if err != nil {
return 0, err
}
documentCount := 0
_, err = cursor.ReadDocument(ctx, &documentCount)
if err != nil {
return 0, err
}
cursor.Close()
return documentCount, nil
}
func sortQuery(paramsSort []string, paramsDesc []bool, bindVars map[string]interface{}) string {
sort := ""
if len(paramsSort) > 0 {
var sorts []string
for i, column := range paramsSort {
colsort := fmt.Sprintf("d.@column%d", i)
if len(paramsDesc) > i && paramsDesc[i] {
colsort += " DESC"
}
sorts = append(sorts, colsort)
bindVars[fmt.Sprintf("column%d", i)] = column
}
sort = "SORT " + strings.Join(sorts, ", ")
}
return sort
}
func mergeMaps(a map[string]interface{}, b map[string]interface{}) map[string]interface{} {
merged := map[string]interface{}{}
for k, v := range a {
merged[k] = v
}
for k, v := range b {
merged[k] = v
}
return merged
}
func validate(e interface{}, schema *gojsonschema.Schema) error {
b, err := json.Marshal(e)
if err != nil {
return err
}
res, err := schema.Validate(gojsonschema.NewStringLoader(string(b)))
if err != nil {
return err
}
if len(res.Errors()) > 0 {
var l []string
for _, e := range res.Errors() {
l = append(l, e.String())
}
return fmt.Errorf("validation failed: %v", strings.Join(l, ", "))
}
return nil
}

291
database/ticket_field.go Normal file
View File

@@ -0,0 +1,291 @@
package database
import (
"context"
"errors"
"fmt"
"time"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/mingrammer/commonregex"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
)
func (db *Database) AddArtifact(ctx context.Context, id int64, artifact *models.Artifact) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
if artifact.Status == nil {
artifact.Status = pointer.String("unknown")
}
if artifact.Type == nil {
artifact.Type = pointer.String(inferType(artifact.Name))
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "artifacts": PUSH(NOT_NULL(d.artifacts, []), @artifact) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"artifact": artifact}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Add artifact",
})
}
func inferType(name string) string {
switch {
case commonregex.IPRegex.MatchString(name):
return "ip"
case commonregex.LinkRegex.MatchString(name):
return "url"
case commonregex.EmailRegex.MatchString(name):
return "email"
case commonregex.MD5HexRegex.MatchString(name):
return "md5"
case commonregex.SHA1HexRegex.MatchString(name):
return "sha1"
case commonregex.SHA256HexRegex.MatchString(name):
return "sha256"
}
return "unknown"
}
func (db *Database) RemoveArtifact(ctx context.Context, id int64, name string) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
FOR a IN NOT_NULL(d.artifacts, [])
FILTER a.name == @name
LET newartifacts = REMOVE_VALUE(d.artifacts, a)
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "artifacts": newartifacts } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"name": name}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Remove artifact",
})
}
func (db *Database) SetTemplate(ctx context.Context, id int64, schema string) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "schema": @schema } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"schema": schema}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Set Template",
})
}
func (db *Database) AddComment(ctx context.Context, id int64, comment *models.CommentForm) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
if comment.Creator == nil || *comment.Creator == "" {
user, exists := busdb.UserFromContext(ctx)
if !exists {
return nil, errors.New("no user in context")
}
comment.Creator = pointer.String(user.ID)
}
if comment.Created == nil {
comment.Created = pointer.Time(time.Now().UTC())
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "comments": PUSH(NOT_NULL(d.comments, []), @comment) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"comment": comment}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Add comment",
})
}
func (db *Database) RemoveComment(ctx context.Context, id int64, commentID int64) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "comments": REMOVE_NTH(d.comments, @commentID) } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"commentID": commentID}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Remove comment",
})
}
func (db *Database) SetReferences(ctx context.Context, id int64, references []*models.Reference) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "references": @references } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"references": references}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Changed references",
})
}
func (db *Database) LinkFiles(ctx context.Context, id int64, files []*models.File) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "files": @files } IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{"files": files}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.DocumentID(fmt.Sprintf("%s/%d", TicketCollectionName, id)),
},
Msg: "Linked files",
})
}
func (db *Database) AddTicketPlaybook(ctx context.Context, id int64, playbookTemplate *models.PlaybookTemplateForm) (*models.TicketWithTickets, error) {
pb, err := toPlaybook(playbookTemplate)
if err != nil {
return nil, err
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
playbookID := strcase.ToKebab(pb.Name)
if playbookTemplate.ID != nil {
playbookID = *playbookTemplate.ID
}
parentTicket, err := db.TicketGet(ctx, id)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d._key == @ID
LET newplaybook = ZIP( [@playbookID], [@playbook] )
LET newplaybooks = MERGE(NOT_NULL(d.playbooks, {}), newplaybook)
LET newticket = MERGE(d, { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks })
REPLACE d WITH newticket IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"playbook": pb,
"playbookID": findName(parentTicket.Playbooks, playbookID),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: "Added playbook",
})
if err != nil {
return nil, err
}
if err := runRootTask(extractTicketResponse(ticket), playbookID, db); err != nil {
return nil, err
}
return ticket, nil
}
func findName(playbooks map[string]*models.PlaybookResponse, name string) string {
if _, ok := playbooks[name]; !ok {
return name
}
for i := 0; ; i++ {
try := fmt.Sprintf("%s%d", name, i)
if _, ok := playbooks[try]; !ok {
return try
}
}
}
func runRootTask(ticket *models.TicketResponse, playbookID string, db *Database) error {
playbook := ticket.Playbooks[playbookID]
var root *models.TaskResponse
for _, task := range playbook.Tasks {
if task.Order == 0 {
root = task
}
}
runNextTasks(ticket.ID, playbookID, root.Next, root.Data, ticket, db)
return nil
}
func (db *Database) RemoveTicketPlaybook(ctx context.Context, id int64, playbookID string) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `FOR d IN @@collection
` + ticketFilterQuery + `
FILTER d._key == @ID
LET newplaybooks = UNSET(d.playbooks, @playbookID)
REPLACE d WITH MERGE(d, { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks }) IN @@collection
RETURN NEW`
return db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"playbookID": playbookID,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Removed playbook %s", playbookID),
})
}

186
database/ticket_task.go Normal file
View File

@@ -0,0 +1,186 @@
package database
import (
"context"
"errors"
"fmt"
"log"
"time"
"github.com/arangodb/go-driver"
"github.com/google/uuid"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func (db *Database) TaskGet(ctx context.Context, id int64, playbookID string, taskID string) (*models.TicketWithTickets, *models.PlaybookResponse, *models.TaskWithContext, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, nil, nil, err
}
playbook, ok := inc.Playbooks[playbookID]
if !ok {
return nil, nil, nil, errors.New("playbook does not exist")
}
task, ok := playbook.Tasks[taskID]
if !ok {
return nil, nil, nil, errors.New("task does not exist")
}
return inc, playbook, &models.TaskWithContext{
PlaybookId: playbookID,
PlaybookName: playbook.Name,
TaskId: taskID,
Task: *task,
TicketId: id,
TicketName: inc.Name,
}, nil
}
func (db *Database) TaskComplete(ctx context.Context, id int64, playbookID string, taskID string, data interface{}) (*models.TicketWithTickets, error) {
inc, err := db.TicketGet(ctx, id)
if err != nil {
return nil, err
}
completable := inc.Playbooks[playbookID].Tasks[taskID].Active
if !completable {
return nil, errors.New("cannot be completed")
}
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET task = playbook.tasks[@taskID]
LET newtask = MERGE(task, {"data": NOT_NULL(@data, {}), "done": true, closed: @closed })
LET newtasks = MERGE(playbook.tasks, { @taskID: newtask } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"playbookID": playbookID,
"taskID": taskID,
"data": data,
"closed": time.Now().UTC(),
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Completed task %s in playbook %s", taskID, playbookID),
})
if err != nil {
return nil, err
}
playbook := ticket.Playbooks[playbookID]
task := playbook.Tasks[taskID]
runNextTasks(id, playbookID, task.Next, task.Data, extractTicketResponse(ticket), db)
return ticket, nil
}
func extractTicketResponse(ticket *models.TicketWithTickets) *models.TicketResponse {
return &models.TicketResponse{
Artifacts: ticket.Artifacts,
Comments: ticket.Comments,
Created: ticket.Created,
Details: ticket.Details,
Files: ticket.Files,
ID: ticket.ID,
Modified: ticket.Modified,
Name: ticket.Name,
Owner: ticket.Owner,
Playbooks: ticket.Playbooks,
Read: ticket.Read,
References: ticket.References,
Schema: ticket.Schema,
Status: ticket.Status,
Type: ticket.Type,
Write: ticket.Write,
}
}
func (db *Database) TaskUpdate(ctx context.Context, id int64, playbookID string, taskID string, task *models.Task) (*models.TicketWithTickets, error) {
ticketFilterQuery, ticketFilterVars, err := db.Hooks.TicketWriteFilter(ctx)
if err != nil {
return nil, err
}
query := `LET d = DOCUMENT(@@collection, @ID)
` + ticketFilterQuery + `
LET playbook = d.playbooks[@playbookID]
LET newtasks = MERGE(playbook.tasks, { @taskID: @task } )
LET newplaybook = MERGE(playbook, {"tasks": newtasks})
LET newplaybooks = MERGE(d.playbooks, { @playbookID: newplaybook } )
UPDATE d WITH { "modified": DATE_ISO8601(DATE_NOW()), "playbooks": newplaybooks } IN @@collection
RETURN NEW`
ticket, err := db.ticketGetQuery(ctx, id, query, mergeMaps(map[string]interface{}{
"playbookID": playbookID,
"taskID": taskID,
"task": task,
}, ticketFilterVars), &busdb.Operation{
OperationType: busdb.Update,
Ids: []driver.DocumentID{
driver.NewDocumentID(TicketCollectionName, fmt.Sprintf("%d", id)),
},
Msg: fmt.Sprintf("Saved task %s in playbook %s", taskID, playbookID),
})
if err != nil {
return nil, err
}
return ticket, nil
}
func (db *Database) TaskRun(ctx context.Context, id int64, playbookID string, taskID string) error {
ticket, _, task, err := db.TaskGet(ctx, id, playbookID, taskID)
if err != nil {
return err
}
if task.Task.Type == models.TaskTypeAutomation {
if err := runTask(id, playbookID, taskID, &task.Task, extractTicketResponse(ticket), db); err != nil {
return err
}
}
return nil
}
func runNextTasks(id int64, playbookID string, next map[string]string, data interface{}, ticket *models.TicketResponse, db *Database) {
for nextTaskID, requirement := range next {
nextTask := ticket.Playbooks[playbookID].Tasks[nextTaskID]
if nextTask.Type == models.TaskTypeAutomation {
b, err := evalRequirement(requirement, data)
if err != nil {
continue
}
if b {
if err := runTask(id, playbookID, nextTaskID, nextTask, ticket, db); err != nil {
log.Println(err)
}
}
}
}
}
func runTask(ticketID int64, playbookID string, taskID string, task *models.TaskResponse, ticket *models.TicketResponse, db *Database) error {
playbook := ticket.Playbooks[playbookID]
msgContext := &models.Context{Playbook: playbook, Task: task, Ticket: ticket}
origin := &models.Origin{TaskOrigin: &models.TaskOrigin{TaskId: taskID, PlaybookId: playbookID, TicketId: ticketID}}
jobID := uuid.NewString()
return publishJobMapping(jobID, *task.Automation, msgContext, origin, task.Payload, db)
}

101
database/tickettype.go Normal file
View File

@@ -0,0 +1,101 @@
package database
import (
"context"
"errors"
"github.com/arangodb/go-driver"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
)
func toTicketType(doc *models.TicketTypeForm) *models.TicketType {
return &models.TicketType{
Name: doc.Name,
Icon: doc.Icon,
DefaultPlaybooks: doc.DefaultPlaybooks,
DefaultTemplate: doc.DefaultTemplate,
DefaultGroups: doc.DefaultGroups,
}
}
func toTicketTypeResponse(key string, doc *models.TicketType) *models.TicketTypeResponse {
return &models.TicketTypeResponse{
ID: key,
Name: doc.Name,
Icon: doc.Icon,
DefaultPlaybooks: doc.DefaultPlaybooks,
DefaultTemplate: doc.DefaultTemplate,
DefaultGroups: doc.DefaultGroups,
}
}
func (db *Database) TicketTypeCreate(ctx context.Context, tickettype *models.TicketTypeForm) (*models.TicketTypeResponse, error) {
if tickettype == nil {
return nil, errors.New("requires ticket type")
}
if tickettype.Name == "" {
return nil, errors.New("requires ticket type name")
}
var doc models.TicketType
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.CreateDocument(ctx, newctx, strcase.ToKebab(tickettype.Name), toTicketType(tickettype))
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeGet(ctx context.Context, id string) (*models.TicketTypeResponse, error) {
var doc models.TicketType
meta, err := db.tickettypeCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeUpdate(ctx context.Context, id string, tickettype *models.TicketTypeForm) (*models.TicketTypeResponse, error) {
var doc models.TicketType
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.tickettypeCollection.ReplaceDocument(ctx, id, toTicketType(tickettype))
if err != nil {
return nil, err
}
return toTicketTypeResponse(meta.Key, &doc), nil
}
func (db *Database) TicketTypeDelete(ctx context.Context, id string) error {
_, err := db.tickettypeCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) TicketTypeList(ctx context.Context) ([]*models.TicketTypeResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": TicketTypeCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.TicketTypeResponse
for {
var doc models.TicketType
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
docs = append(docs, toTicketTypeResponse(meta.Key, &doc))
}
return docs, err
}

201
database/user.go Normal file
View File

@@ -0,0 +1,201 @@
package database
import (
"context"
"crypto/sha256"
"errors"
"fmt"
"math/rand"
"time"
"github.com/arangodb/go-driver"
"github.com/gin-gonic/gin"
"github.com/iancoleman/strcase"
"github.com/SecurityBrewery/catalyst/database/busdb"
"github.com/SecurityBrewery/catalyst/generated/models"
"github.com/SecurityBrewery/catalyst/pointer"
"github.com/SecurityBrewery/catalyst/role"
)
var letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-_")
func init() {
rand.Seed(time.Now().UnixNano())
}
func generateKey() string {
b := make([]rune, 32)
for i := range b {
b[i] = letters[rand.Intn(len(letters))]
}
return string(b)
}
func toUser(user *models.UserForm, sha256 *string) *models.User {
roles := []string{}
roles = append(roles, role.Strings(role.Explodes(user.Roles))...)
u := &models.User{
Blocked: user.Blocked,
Roles: roles,
Sha256: sha256,
Apikey: user.Apikey,
}
// log.Println(u)
// b, _ := json.Marshal(u)
// loader := gojsonschema.NewBytesLoader(b)
// res, err := models.UserSchema.Validate(loader)
// if err != nil {
// log.Println(err)
// }
// log.Println(res.Errors())
return u
}
func toUserResponse(key string, user *models.User) *models.UserResponse {
return &models.UserResponse{
ID: key,
Roles: user.Roles,
Blocked: user.Blocked,
Apikey: user.Apikey,
}
}
func toNewUserResponse(key string, user *models.User, secret *string) *models.NewUserResponse {
return &models.NewUserResponse{
ID: key,
Roles: user.Roles,
Secret: secret,
Blocked: user.Blocked,
}
}
func (db *Database) UserGetOrCreate(ctx *gin.Context, newUser *models.UserForm) (*models.UserResponse, error) {
user, err := db.UserGet(ctx, newUser.ID)
if err != nil {
newUser, err := db.UserCreate(ctx, newUser)
if err != nil {
return nil, err
}
return &models.UserResponse{ID: newUser.ID, Roles: newUser.Roles, Blocked: newUser.Blocked}, nil
}
return user, nil
}
func (db *Database) UserCreate(ctx context.Context, newUser *models.UserForm) (*models.NewUserResponse, error) {
var key string
var hash *string
if newUser.Apikey {
key = generateKey()
hash = pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
}
var doc models.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, hash))
if err != nil {
return nil, err
}
return toNewUserResponse(meta.Key, &doc, pointer.String(key)), nil
}
func (db *Database) UserCreateSetupAPIKey(ctx context.Context, key string) (*models.UserResponse, error) {
newUser := &models.UserForm{
ID: "setup",
Roles: []string{role.Admin},
Apikey: true,
Blocked: false,
}
hash := pointer.String(fmt.Sprintf("%x", sha256.Sum256([]byte(key))))
var doc models.User
newctx := driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.CreateDocument(ctx, newctx, strcase.ToKebab(newUser.ID), toUser(newUser, hash))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserGet(ctx context.Context, id string) (*models.UserResponse, error) {
var doc models.User
meta, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}
func (db *Database) UserDelete(ctx context.Context, id string) error {
_, err := db.userCollection.RemoveDocument(ctx, id)
return err
}
func (db *Database) UserList(ctx context.Context) ([]*models.UserResponse, error) {
query := "FOR d IN @@collection RETURN d"
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserCollectionName}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var docs []*models.UserResponse
for {
var doc models.User
meta, err := cursor.ReadDocument(ctx, &doc)
if driver.IsNoMoreDocuments(err) {
break
} else if err != nil {
return nil, err
}
doc.Sha256 = nil
docs = append(docs, toUserResponse(meta.Key, &doc))
}
return docs, err
}
func (db *Database) UserByHash(ctx context.Context, sha256 string) (*models.UserResponse, error) {
query := `FOR d in @@collection
FILTER d.sha256 == @sha256
RETURN d`
cursor, _, err := db.Query(ctx, query, map[string]interface{}{"@collection": UserCollectionName, "sha256": sha256}, busdb.ReadOperation)
if err != nil {
return nil, err
}
defer cursor.Close()
var doc models.User
meta, err := cursor.ReadDocument(ctx, &doc)
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), err
}
func (db *Database) UserUpdate(ctx context.Context, id string, user *models.UserForm) (*models.UserResponse, error) {
var doc models.User
_, err := db.userCollection.ReadDocument(ctx, id, &doc)
if err != nil {
return nil, err
}
if doc.Sha256 != nil {
return nil, errors.New("cannot update an API key")
}
ctx = driver.WithReturnNew(ctx, &doc)
meta, err := db.userCollection.ReplaceDocument(ctx, id, toUser(user, nil))
if err != nil {
return nil, err
}
return toUserResponse(meta.Key, &doc), nil
}

160
definition/CAQLLexer.g4 Normal file
View File

@@ -0,0 +1,160 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2020 by Martin Mirchev
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser
* Developed by : Bart Kiers, bart@big-o.nl
*/
// $antlr-format alignTrailingComments on, columnLimit 150, maxEmptyLinesToKeep 1, reflowComments off, useTab off
// $antlr-format allowShortRulesOnASingleLine on, alignSemicolons ownLine
lexer grammar CAQLLexer;
channels { ERRORCHANNEL }
DOT: '.';
// https://github.com/arangodb/arangodb/blob/devel/arangod/Aql/grammar.y
T_REGEX_MATCH: '=~'; // "~= operator"
T_REGEX_NON_MATCH: '!~'; // "~! operator"
T_EQ: '=='; // "== operator";
T_NE: '!='; // "!= operator";
T_LT: '<'; // "< operator";
T_GT: '>'; // "> operator";
T_LE: '<='; // "<= operator";
T_GE: '>='; // ">= operator";
T_PLUS: '+'; // "+ operator"
T_MINUS: '-'; // "- operator"
T_TIMES: '*'; // "* operator"
T_DIV: '/'; // "/ operator"
T_MOD: '%'; // "% operator"
T_QUESTION: '?'; // "?"
T_COLON: ':'; // ":"
T_SCOPE: '::'; // "::"
T_RANGE: '..'; // ".."
T_COMMA: ','; // ","
T_OPEN: '('; // "("
T_CLOSE: ')'; // ")"
T_OBJECT_OPEN: '{'; // "{"
T_OBJECT_CLOSE: '}'; // "}"
T_ARRAY_OPEN: '['; // "["
T_ARRAY_CLOSE: ']'; // "]"
// https://www.arangodb.com/docs/stable/aql/fundamentals-syntax.html#keywords
T_AGGREGATE: A G G R E G A T E;
T_ALL: A L L;
T_AND: (A N D | '&&');
T_ANY: A N Y;
T_ASC: A S C;
T_COLLECT: C O L L E C T;
T_DESC: D E S C;
T_DISTINCT: D I S T I N C T;
T_FALSE: F A L S E;
T_FILTER: F I L T E R;
T_FOR: F O R;
T_GRAPH: G R A P H;
T_IN: I N;
T_INBOUND: I N B O U N D;
T_INSERT: I N S E R T;
T_INTO: I N T O;
T_K_SHORTEST_PATHS: K '_' S H O R T E S T '_' P A T H S;
T_LET: L E T;
T_LIKE: L I K E;
T_LIMIT: L I M I T;
T_NONE: N O N E;
T_NOT: (N O T | '!');
T_NULL: N U L L;
T_OR: (O R | '||');
T_OUTBOUND: O U T B O U N D;
T_REMOVE: R E M O V E;
T_REPLACE: R E P L A C E;
T_RETURN: R E T U R N;
T_SHORTEST_PATH: S H O R T E S T '_' P A T H;
T_SORT: S O R T;
T_TRUE: T R U E;
T_UPDATE: U P D A T E;
T_UPSERT: U P S E R T;
T_WITH: W I T H;
T_KEEP: K E E P;
T_COUNT: C O U N T;
T_OPTIONS: O P T I O N S;
T_PRUNE: P R U N E;
T_SEARCH: S E A R C H;
T_TO: T O;
T_CURRENT: C U R R E N T;
T_NEW: N E W;
T_OLD: O L D;
T_STRING: [a-zA-Z_] [a-zA-Z_0-9]*;
T_INT: [1-9] DIGIT* | '0' | '0x' HEX_DIGIT+ | '0b' [0-1]+;
T_FLOAT: ( [1-9] DIGIT* | '0' )? '.' DIGIT+ (E [-+]? DIGIT+)?;
T_PARAMETER: '@' T_STRING;
T_QUOTED_STRING: ('\'' ('\\'. | '\'\'' | ~('\'' | '\\'))* '\'' | '"' ( '\\'. | '""' | ~('"'| '\\') )* '"');
SINGLE_LINE_COMMENT: '//' ~[\r\n]* (('\r'? '\n') | EOF) -> channel(HIDDEN);
MULTILINE_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
SPACES: [ \u000B\t\r\n] -> channel(HIDDEN);
UNEXPECTED_CHAR: .;
fragment HEX_DIGIT: [0-9a-fA-F];
fragment DIGIT: [0-9];
fragment A: [aA];
fragment B: [bB];
fragment C: [cC];
fragment D: [dD];
fragment E: [eE];
fragment F: [fF];
fragment G: [gG];
fragment H: [hH];
fragment I: [iI];
fragment J: [jJ];
fragment K: [kK];
fragment L: [lL];
fragment M: [mM];
fragment N: [nN];
fragment O: [oO];
fragment P: [pP];
fragment Q: [qQ];
fragment R: [rR];
fragment S: [sS];
fragment T: [tT];
fragment U: [uU];
fragment V: [vV];
fragment W: [wW];
fragment X: [xX];
fragment Y: [yY];
fragment Z: [zZ];
ERROR_RECONGNIGION: . -> channel(ERRORCHANNEL);

109
definition/CAQLParser.g4 Normal file
View File

@@ -0,0 +1,109 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 by Bart Kiers
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
* associated documentation files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify, merge, publish, distribute,
* sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or
* substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
* NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
* DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Project : sqlite-parser; an ANTLR4 grammar for SQLite https://github.com/bkiers/sqlite-parser
* Developed by:
* Bart Kiers, bart@big-o.nl
* Martin Mirchev, marti_2203@abv.bg
* Mike Lische, mike@lischke-online.de
*/
// $antlr-format alignTrailingComments on, columnLimit 130, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments off
// $antlr-format useTab off, allowShortRulesOnASingleLine off, allowShortBlocksOnASingleLine on, alignSemicolons ownLine
parser grammar CAQLParser;
options {
tokenVocab = CAQLLexer;
}
parse: expression EOF
;
expression:
value_literal
| reference
| operator_unary
| expression (T_PLUS|T_MINUS) expression
| expression (T_TIMES|T_DIV|T_MOD) expression
| expression T_RANGE expression
| expression (T_LT|T_GT|T_LE|T_GE) expression
| expression T_NOT? T_IN expression
| expression (T_EQ|T_NE) expression
| expression (T_ALL|T_ANY|T_NONE) eq_op=(T_EQ|T_NE|T_LT|T_GT|T_LE|T_GE|T_IN) expression
| expression (T_ALL|T_ANY|T_NONE) T_NOT T_IN expression
| expression T_NOT? (T_LIKE|T_REGEX_MATCH|T_REGEX_NON_MATCH) expression
| expression T_AND expression
| expression T_OR expression
| expression T_QUESTION expression T_COLON expression
| expression T_QUESTION T_COLON expression
;
operator_unary: (
T_PLUS expression
| T_MINUS expression
| T_NOT expression
);
reference:
T_STRING
| compound_value
| function_call
| T_OPEN expression T_CLOSE
| reference DOT T_STRING
| reference T_ARRAY_OPEN expression T_ARRAY_CLOSE
;
compound_value: (
array
| object
);
function_call: (
T_STRING T_OPEN expression? (T_COMMA expression)*? T_COMMA? T_CLOSE
);
value_literal: (
T_QUOTED_STRING
| T_INT
| T_FLOAT
| T_NULL
| T_TRUE
| T_FALSE
);
array:(
T_ARRAY_OPEN expression? (T_COMMA expression)*? T_COMMA? T_ARRAY_CLOSE
);
object:
T_OBJECT_OPEN object_element? (T_COMMA object_element)* T_COMMA? T_OBJECT_CLOSE
;
object_element:(
T_STRING
| object_element_name T_COLON expression
| T_ARRAY_OPEN expression T_ARRAY_CLOSE T_COLON expression
);
object_element_name:(
T_STRING
| T_QUOTED_STRING
);

29
definition/artifacts.yaml Normal file
View File

@@ -0,0 +1,29 @@
swagger: "2.0"
info: { version: "", title: "" }
paths: { }
definitions:
Artifact:
type: object
required: [ name ]
properties:
name: { type: string, example: "2.2.2.2" }
type: { type: string }
status: { type: string, example: "Unknown" }
enrichments: { type: object, additionalProperties: { $ref: "#/definitions/Enrichment" } }
EnrichmentForm:
type: object
required: [ name, data ]
properties:
name: { type: string, example: "hash.sha1" }
data: { type: object, example: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } }
Enrichment:
type: object
required: [ name, data, created ]
properties:
name: { type: string, example: "hash.sha1" }
data: { type: object, example: { "hash": "b7a067a742c20d07a7456646de89bc2d408a1153" } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }

165
definition/automation.yaml Normal file
View File

@@ -0,0 +1,165 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/automations:
get:
tags: [ "automations" ]
summary: "List automations"
operationId: "listAutomations"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/AutomationResponse" } }
examples:
test:
- id: comment
image: "docker.io/python:3"
script: ""
type: [ playbook ]
- id: hash.sha1
image: "docker.io/python:3"
script: ""
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
- id: thehive
image: "docker.io/python:3"
script: ""
type: [ global ]
schema: "{\"title\":\"TheHive credentials\",\"type\":\"object\",\"properties\":{\"thehiveurl\":{\"type\":\"string\",\"title\":\"TheHive URL (e.g. 'https://thehive.example.org')\"},\"thehivekey\":{\"type\":\"string\",\"title\":\"TheHive API Key\"},\"skip_files\":{\"type\":\"boolean\", \"default\": true, \"title\":\"Skip Files (much faster)\"},\"keep_ids\":{\"type\":\"boolean\", \"default\": true, \"title\":\"Keep IDs and overwrite existing IDs\"}},\"required\":[\"thehiveurl\", \"thehivekey\", \"skip_files\", \"keep_ids\"]}"
- id: vt.hash
image: "docker.io/python:3"
script: ""
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
security: [ { roles: [ "automation:read" ] } ]
post:
tags: [ "automations" ]
summary: "Create a new automation"
operationId: "createAutomation"
parameters:
- { name: "automation", in: "body", description: "New automation", required: true, schema: { $ref: "#/definitions/AutomationForm" }, x-example: { id: "hash-sha-256", image: "docker.io/python:3", script: "import sys\nimport json\nimport hashlib\n\n\ndef run(msg):\n sha256 = hashlib.sha256(msg['payload']['default'].encode('utf-8'))\n return {'hash': sha256.hexdigest()}\n\n\nprint(json.dumps(run(json.loads(sys.argv[1]))))\n", type: [ global ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: "hash-sha-256"
image: "docker.io/python:3"
type: [ global ]
script: |
import sys
import json
import hashlib
def run(msg):
sha256 = hashlib.sha256(msg['payload']['default'].encode('utf-8'))
return {'hash': sha256.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:write" ] } ]
/automations/{id}:
get:
tags: [ "automations" ]
summary: "Get a single automation"
operationId: "getAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: hash.sha1
image: "docker.io/python:3"
type: [ global, artifact, playbook ]
schema: "{\"title\":\"Input\",\"type\":\"object\",\"properties\":{\"default\":{\"type\":\"string\",\"title\":\"Value\"}},\"required\":[\"default\"]}"
script: |
#!/usr/bin/env python
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload']['default'].encode('utf-8'))
return {"hash": sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:read" ] } ]
put:
tags: [ "automations" ]
summary: "Update an existing automation"
operationId: "updateAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
- { name: "automation", in: "body", description: "Automation object that needs to be added", required: true, schema: { $ref: "#/definitions/AutomationForm" }, x-example: { id: hash.sha1, image: "docker.io/python:3", script: "import sys\nimport json\nimport hashlib\n\n\ndef run(msg):\n sha1 = hashlib.sha1(msg['payload'].encode('utf-8'))\n return {'hash': sha1.hexdigest()}\n\n\nprint(json.dumps(run(json.loads(sys.argv[1]))))\n", type: [ global, artifact, playbook ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/AutomationResponse" }
examples:
test:
id: hash.sha1
image: "docker.io/python:3"
type: [ global, artifact, playbook ]
script: |
import sys
import json
import hashlib
def run(msg):
sha1 = hashlib.sha1(msg['payload'].encode('utf-8'))
return {'hash': sha1.hexdigest()}
print(json.dumps(run(json.loads(sys.argv[1]))))
security: [ { roles: [ "automation:write" ] } ]
delete:
tags: [ "automations" ]
summary: "Delete a automation"
operationId: "deleteAutomation"
parameters:
- { name: "id", in: "path", description: "Automation ID", required: true, type: string, x-example: "hash.sha1" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "automation:write" ] } ]
definitions:
AutomationForm:
type: object
required: [ id, image, script, type ]
properties:
id: { type: string }
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }
Automation:
type: object
required: [ image, script, type ]
properties:
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }
AutomationResponse:
type: object
required: [ id, image, script, type ]
properties:
id: { type: string }
image: { type: string }
script: { type: string }
type: { type: array, items: { type: string, enum: [ artifact, playbook, global ] } }
schema: { type: string, example: "{}" }

View File

@@ -0,0 +1,52 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/graph/{col}/{id}:
get:
tags: [ "graph" ]
summary: "Graph"
operationId: "graph"
parameters:
- { name: "col", in: "path", description: "Graph Start", required: true, type: string, x-example: "tickets" }
- { name: "id", in: "path", description: "Graph Start", required: true, type: string, x-example: "88" }
- { name: "depth", in: "query", description: "Graph Start", required: true, type: integer, x-example: 1 }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Graph" }
examples:
test:
nodes:
- { id: "artifacts/94d5cab6f5fe3422a447ab15436e7a672bc0c09a", name: "94d5cab6f5fe3422a447ab15436e7a672bc0c09a" }
- { id: "artifacts/http%3A%2F%2Fwww.customerviral.io%2Fscalable%2Fvertical%2Fkiller", name: "http://www.customerviral.io/scalable/vertical/killer" }
- { id: "artifacts/leadreintermediate.io", name: "leadreintermediate.io" }
- { id: "tickets/88", name: "live zebra" }
links:
- { id: "296239", sid: "tickets/88", tid: "artifacts/http%3A%2F%2Fwww.customerviral.io%2Fscalable%2Fvertical%2Fkiller" }
- { id: "296240", sid: "tickets/88", tid: "artifacts/leadreintermediate.io" }
- { id: "296242", sid: "tickets/88", tid: "artifacts/94d5cab6f5fe3422a447ab15436e7a672bc0c09a" }
security: [ { roles: [ "ticket:read" ] } ]
definitions:
Graph:
type: object
properties:
nodes: { type: array, items: { $ref: "#/definitions/Node" } }
links: { type: array, items: { $ref: "#/definitions/Link" } }
Node:
type: object
required: [ id, name ]
properties:
id: { type: string }
name: { type: string }
Link:
type: object
required: [ id, tid, sid ]
properties:
id: { type: string }
# name: { type: string }
tid: { type: string }
sid: { type: string }

View File

@@ -0,0 +1,84 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/groups:
get:
tags: [ "groups" ]
summary: "List groups"
operationId: "listGroups"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/Group" } }
security: [ { roles: [ "group:read" ] } ]
post:
tags: [ "groups" ]
summary: "Create a new group"
operationId: "createGroup"
parameters:
- { name: "group", in: "body", description: "New group", required: true, schema: { $ref: "#/definitions/GroupForm" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/GroupResponse" }
security: [ { roles: [ "group:write" ] } ]
/groups/{id}:
get:
tags: [ "groups" ]
summary: "Get a single group"
operationId: "getGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/GroupResponse" }
security: [ { roles: [ "group:read" ] } ]
put:
tags: [ "groups" ]
summary: "Update an existing group"
operationId: "updateGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
- { name: "group", in: "body", description: "Group object that needs to be added", required: true, schema: { $ref: "#/definitions/Group" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Group" }
security: [ { roles: [ "group:write" ] } ]
delete:
tags: [ "groups" ]
summary: "Delete a group"
operationId: "deleteGroup"
parameters:
- { name: "id", in: "path", description: "Group ID", required: true, type: string }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "group:write" ] } ]
definitions:
GroupForm:
type: object
required: [ name, users ]
properties:
id: { type: string }
name: { type: string }
users: { type: array, items: { type: string } }
Group:
type: object
required: [ name, users ]
properties:
name: { type: string }
users: { type: array, items: { type: string } }
GroupResponse:
type: object
required: [ id, name, users ]
properties:
id: { type: string }
name: { type: string }
users: { type: array, items: { type: string } }

View File

@@ -0,0 +1,110 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/rules:
get:
tags: [ "rules" ]
summary: "List rules"
operationId: "listRules"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/RuleResponse" } }
examples:
test:
- id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:read" ] } ]
post:
tags: [ "rules" ]
summary: "Create a rule"
operationId: "createRule"
parameters:
- { name: "rule", in: "body", description: "New rule", required: true, schema: { $ref: "#/definitions/RuleForm" }, x-example: { name: "Ignore all Alerts", condition: "type == 'alert'", update: { "status": "closed" } } }
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/RuleResponse" } }
examples:
test:
id: ignore-all-alerts
name: Ignore all Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:write" ] } ]
/rules/{id}:
get:
tags: [ "rules" ]
summary: "Get a single rule"
operationId: "getRule"
parameters:
- { name: "id", in: "path", description: "Rule name", required: true, type: string, x-example: "ignore-alerts" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/RuleResponse" }
examples:
test:
id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "closed" }
security: [ { roles: [ "rule:read" ] } ]
put:
tags: [ "rules" ]
summary: "Update an existing ticket rule"
operationId: "updateRule"
parameters:
- { name: "id", in: "path", description: "Rule ID", required: true, type: string, x-example: "ignore-alerts" }
- { name: "rule", in: "body", description: "Updated rule", required: true, schema: { $ref: "#/definitions/RuleForm" }, x-example: { name: "Ignore Alerts", condition: "type == 'alert'", update: { "status": "invalid" } } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/RuleResponse" }
examples:
test:
id: ignore-alerts
name: Ignore Alerts
condition: "type == 'alert'"
update: { "status": "invalid" }
security: [ { roles: [ "rule:write" ] } ]
delete:
tags: [ "rules" ]
summary: "Delete a rule"
operationId: "deleteRule"
parameters:
- { name: "id", in: "path", description: "Rule name", required: true, type: string, x-example: "ignore-alerts" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "rule:write" ] } ]
definitions:
RuleForm:
type: object
required: [ name, condition, update ]
properties:
id: { type: string }
name: { type: string }
condition: { type: string }
update: { type: object }
Rule:
type: object
required: [ name, condition, update ]
properties:
name: { type: string }
condition: { type: string }
update: { type: object }
RuleResponse:
type: object
required: [ id, name, condition, update ]
properties:
id: { type: string }
name: { type: string }
condition: { type: string }
update: { type: object }

130
definition/jobs.yaml Normal file
View File

@@ -0,0 +1,130 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/jobs:
get:
tags: [ "jobs" ]
summary: "List jobs"
operationId: "listJobs"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/JobResponse" } }
examples:
test:
- id: "99cd67131b48"
automation: "hash.sha1"
payload: "test"
status: "created"
security: [ { roles: [ "job:read" ] } ]
post:
tags: [ "jobs" ]
summary: "Start a new job"
operationId: "runJob"
parameters:
- { name: "job", in: "body", description: "New job", required: true, schema: { $ref: "#/definitions/JobForm" }, x-example: { automation: "hash.sha1", message: { payload: "test" } } }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "job:write" ] } ]
/jobs/{id}:
get:
tags: [ "jobs" ]
summary: "Get a single job"
operationId: "getJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "99cd67131b48" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "created" }
security: [ { roles: [ "job:read" ] } ]
put:
tags: [ "jobs" ]
summary: "Update an existing job"
operationId: "updateJob"
parameters:
- { name: "id", in: "path", description: "Job ID", required: true, type: string, x-example: "99cd67131b48" }
- { name: "job", in: "body", description: "Job object that needs to be added", required: true, schema: { $ref: "#/definitions/Job" }, x-example: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "failed" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/JobResponse" }
examples:
test: { id: "99cd67131b48", automation: "hash.sha1", payload: "test", status: "failed" }
security: [ { roles: [ "job:write" ] } ]
definitions:
Message:
type: object
properties:
payload: { type: object }
secrets: { type: object, additionalProperties: { type: string } }
context: { $ref: "#/definitions/Context" }
Context:
type: object
properties:
artifact: { $ref: "#/definitions/Artifact" }
playbook: { $ref: "#/definitions/PlaybookResponse" }
task: { $ref: "#/definitions/TaskResponse" }
ticket: { $ref: "#/definitions/TicketResponse" }
Origin:
type: object
properties:
task_origin: { $ref: "#/definitions/TaskOrigin" }
artifact_origin: { $ref: "#/definitions/ArtifactOrigin" }
TaskOrigin:
type: object
required: [ ticket_id, playbook_id, task_id ]
properties:
ticket_id: { type: integer, format: int64 }
playbook_id: { type: string }
task_id: { type: string }
ArtifactOrigin:
type: object
required: [ ticket_id, artifact ]
properties:
ticket_id: { type: integer, format: int64 }
artifact: { type: string }
JobForm:
type: object
required: [ automation ]
properties:
automation: { type: string }
payload: { }
origin: { $ref: "#/definitions/Origin" }
Job:
type: object
required: [ automation, running, status ]
properties:
automation: { type: string }
container: { type: string }
payload: { }
running: { type: boolean }
status: { type: string }
log: { type: string }
output: { type: object }
origin: { $ref: "#/definitions/Origin" }
JobResponse:
type: object
required: [ id, automation, status ]
properties:
id: { type: string }
automation: { type: string }
container: { type: string }
status: { type: string }
payload: { }
log: { type: string }
output: { type: object }
origin: { $ref: "#/definitions/Origin" }

30
definition/logs.yaml Normal file
View File

@@ -0,0 +1,30 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/logs/{reference}:
get:
tags: [ "logs" ]
summary: "Get log entries"
operationId: "getLogs"
parameters:
- { name: "reference", in: "path", description: "Reference", required: true, type: string, x-example: "tickets%2F294511" }
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/LogEntry" } }
examples:
test:
- { "created": "2021-10-02T18:05:00.333535+02:00","creator": "bob","reference": "tickets/294511","message": "Fail run account resist lend solve incident centre priority temperature. Cause change distribution examine location technique shape partner milk customer. Rail tea plate soil report cook railway interpretation breath action. Exercise dream accept park conclusion addition shoot assistance may answer. Gold writer link stop combine hear power name commitment operation. Determine lifespan support grow degree henry exclude detail set religion. Direct library policy convention chain retain discover ride walk student. Gather proposal select march aspect play noise avoid encourage employ. Assessment preserve transport combine wish influence income guess run stand. Charge limit crime ignore statement foundation study issue stop claim." }
security: [ { roles: [ "log:read" ] } ]
definitions:
LogEntry:
type: object
required: [ reference, creator, created, message ]
properties:
reference: { type: string }
creator: { type: string }
created: { type: string, format: "date-time" }
message: { type: string }

202
definition/playbooks.yaml Normal file
View File

@@ -0,0 +1,202 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/playbooks:
get:
tags: [ "playbooks" ]
summary: "List playbooks"
operationId: "listPlaybooks"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/PlaybookTemplateResponse" } }
examples:
test:
- id: malware
name: Malware
yaml: "name: Malware\ntasks:\n file-or-hash:\n name: Do you have the file or the hash?\n type: input\n schema:\n title: Malware\n type: object\n properties:\n file:\n type: string\n title: \"I have the\"\n enum: [ \"File\", \"Hash\" ]\n next:\n enter-hash: \"file == 'Hash'\"\n upload: \"file == 'File'\"\n\n enter-hash:\n name: Please enter the hash\n type: input\n schema:\n title: Malware\n type: object\n properties:\n hash:\n type: string\n title: Please enter the hash value\n minlength: 32\n next:\n virustotal: \"hash != ''\"\n\n upload:\n name: Upload the malware\n type: input\n schema:\n title: Malware\n type: object\n properties:\n malware:\n type: object\n x-display: file\n title: Please upload the malware\n next:\n hash: \"malware\"\n\n hash:\n name: Hash the malware\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['upload'].data['malware']\"\n next:\n virustotal:\n\n virustotal:\n name: Send hash to VirusTotal\n type: automation\n automation: vt.hash\n args:\n hash: \"playbook.tasks['enter-hash'].data['hash'] || playbook.tasks['hash'].data['hash']\"\n # next:\n # known-malware: \"score > 5\"\n # sandbox: \"score < 6\" # unknown-malware\n"
- id: phishing
name: Phishing
yaml: "name: Phishing\ntasks:\n board:\n name: Board Involvement?\n description: Is a board member involved?\n type: input\n schema:\n properties:\n boardInvolved:\n default: false\n title: A board member is involved.\n type: boolean\n required:\n - boardInvolved\n title: Board Involvement?\n type: object\n next:\n escalate: \"boardInvolved == true\"\n mail-available: \"boardInvolved == false\"\n\n escalate:\n name: Escalate to CISO\n description: Please escalate the task to the CISO\n type: task\n\n mail-available:\n name: Mail available\n type: input\n schema:\n oneOf:\n - properties:\n mail:\n title: Mail\n type: string\n x-display: textarea\n schemaKey:\n const: 'yes'\n type: string\n required:\n - mail\n title: 'Yes'\n - properties:\n schemaKey:\n const: 'no'\n type: string\n title: 'No'\n title: Mail available\n type: object\n next:\n block-sender: \"schemaKey == 'yes'\"\n extract-iocs: \"schemaKey == 'yes'\"\n search-email-gateway: \"schemaKey == 'no'\"\n\n search-email-gateway:\n name: Search email gateway\n description: Please search email-gateway for the phishing mail.\n type: task\n next:\n extract-iocs:\n\n block-sender:\n name: Block sender\n type: task\n next:\n extract-iocs:\n\n extract-iocs:\n name: Extract IOCs\n description: Please insert the IOCs\n type: input\n schema:\n properties:\n iocs:\n items:\n type: string\n title: IOCs\n type: array\n title: Extract IOCs\n type: object\n next:\n block-iocs:\n\n block-iocs:\n name: Block IOCs\n type: task\n"
- id: simple
name: Simple
yaml: "name: Simple\ntasks:\n input:\n name: Enter something to hash\n type: input\n schema:\n title: Something\n type: object\n properties:\n something:\n type: string\n title: Something\n default: \"\"\n next:\n hash: \"something != ''\"\n\n hash:\n name: Hash the something\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['something']\"\n next:\n comment: \"hash != ''\"\n\n comment:\n name: Comment the hash\n type: automation\n automation: comment\n payload:\n default: \"playbook.tasks['hash'].data['hash']\"\n next:\n done: \"done\"\n\n done:\n name: You can close this case now\n type: task\n"
security: [ { roles: [ "playbook:read" ] } ]
post:
tags: [ "playbooks" ]
summary: "Create a playbook"
operationId: "createPlaybook"
parameters:
- { name: "playbook", in: "body", description: "New playbook", required: true, schema: { $ref: "#/definitions/PlaybookTemplateForm" }, x-example: { yaml: "name: Simple2\ntasks:\n input:\n name: Upload malware if possible\n type: input\n schema:\n title: Malware\n type: object\n properties:\n malware:\n type: string\n title: Select malware\n default: \"\"\n next:\n hash: \"malware != ''\"\n\n hash:\n name: Hash the malware\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['malware']\"\n next:\n escalate:\n\n escalate:\n name: Escalate to malware team\n type: task\n" } }
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/PlaybookTemplateResponse" } }
examples:
test:
id: simple-2
name: Simple2
yaml: |
name: Simple2
tasks:
input:
name: Upload malware if possible
type: input
schema:
title: Malware
type: object
properties:
malware:
type: string
title: Select malware
default: ""
next:
hash: "malware != ''"
hash:
name: Hash the malware
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['malware']"
next:
escalate:
escalate:
name: Escalate to malware team
type: task
security: [ { roles: [ "playbook:write" ] } ]
/playbooks/{id}:
get:
tags: [ "playbooks" ]
summary: "Get a single playbook"
operationId: "getPlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook name", required: true, type: string, x-example: "simple" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple
name: Simple
yaml: |
name: Simple
tasks:
input:
name: Enter something to hash
type: input
schema:
title: Something
type: object
properties:
something:
type: string
title: Something
default: ""
next:
hash: "something != ''"
hash:
name: Hash the something
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['something']"
next:
comment: "hash != ''"
comment:
name: Comment the hash
type: automation
automation: comment
payload:
default: "playbook.tasks['hash'].data['hash']"
next:
done: "done"
done:
name: You can close this case now
type: task
security: [ { roles: [ "playbook:read" ] } ]
put:
tags: [ "playbooks" ]
summary: "Update an existing ticket playbook"
operationId: "updatePlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook ID", required: true, type: string, x-example: "simple" }
- { name: "playbook", in: "body", description: "Updated playbook", required: true, schema: { $ref: "#/definitions/PlaybookTemplateForm" }, x-example: { yaml: "name: Simple\ntasks:\n input:\n name: Upload malware if possible\n type: input\n schema:\n title: Malware\n type: object\n properties:\n malware:\n type: string\n title: Select malware\n default: \"\"\n next:\n hash: \"malware != ''\"\n\n hash:\n name: Hash the malware\n type: automation\n automation: hash.sha1\n payload:\n default: \"playbook.tasks['input'].data['malware']\"\n next:\n escalate:\n\n escalate:\n name: Escalate to malware team\n type: task\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/PlaybookTemplateResponse" }
examples:
test:
id: simple
name: Simple
yaml: |
name: Simple
tasks:
input:
name: Upload malware if possible
type: input
schema:
title: Malware
type: object
properties:
malware:
type: string
title: Select malware
default: ""
next:
hash: "malware != ''"
hash:
name: Hash the malware
type: automation
automation: hash.sha1
payload:
default: "playbook.tasks['input'].data['malware']"
next:
escalate:
escalate:
name: Escalate to malware team
type: task
security: [ { roles: [ "playbook:write" ] } ]
delete:
tags: [ "playbooks" ]
summary: "Delete a playbook"
operationId: "deletePlaybook"
parameters:
- { name: "id", in: "path", description: "Playbook name", required: true, type: string, x-example: "simple" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "playbook:write" ] } ]
definitions:
PlaybookTemplateForm:
type: object
required: [ yaml ]
properties:
id: { type: string }
yaml: { type: string }
PlaybookTemplate:
type: object
required: [ name, yaml ]
properties:
name: { type: string }
yaml: { type: string }
PlaybookTemplateResponse:
type: object
required: [ id, name, yaml ]
properties:
id: { type: string }
name: { type: string }
yaml: { type: string }

59
definition/settings.yaml Normal file
View File

@@ -0,0 +1,59 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/settings:
get:
tags: [ "settings" ]
summary: "Get settings"
operationId: "getSettings"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/Settings" }
examples:
test:
version: "0.0.0-test"
tier: community
timeformat: "YYYY-MM-DDThh:mm:ss"
ticketTypes:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
artifactStates:
- { icon: "mdi-help-circle-outline", id: "unknown", name: "Unknown", color: "info" }
- { icon: "mdi-skull", id: "malicious", name: "Malicious", color: "error" }
- { icon: "mdi-check", id: "clean", name: "Clean", color: "success" }
roles: [
"admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write",
"admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read",
"admin:userdata:write", "analyst:automation:read",
"analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read",
"analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read",
"analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write",
"analyst:tickettype:read", "analyst:user:read", "engineer:automation:write",
"engineer:playbook:write", "engineer:rule:write", "engineer:template:write",
"engineer:tickettype:write" ]
security: [ { roles: [ "settings:read" ] } ]
definitions:
Settings:
type: object
required: [ version, tier, timeformat, ticketTypes, artifactStates ]
properties:
version: { title: "Version", type: string }
tier: { title: "Tier", type: string, enum: [ "community", "enterprise" ] }
timeformat: { title: "Time Format", type: string }
ticketTypes: { title: "Ticket Types", type: array, items: { $ref: "#/definitions/TicketTypeResponse" } }
artifactStates: { title: "Artifact States", type: array, items: { $ref: "#/definitions/Type" } }
roles: { title: "Roles", type: array, items: { type: string } }
Type:
type: object
required: [ id, name, icon ]
properties:
id: { title: ID, type: string, x-cols: 3, x-class: pr-2 }
name: { title: Name, type: string, x-cols: 3, x-class: pr-2 }
icon: { title: "Icon (https://materialdesignicons.com)", type: string, x-cols: 3, x-class: pr-2 }
color: { title: Color, type: string, x-cols: 3, enum: [ error, info, success, warning ] }

View File

@@ -0,0 +1,31 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/statistics:
get:
tags: [ "statistics" ]
summary: "Get statistics"
operationId: "getStatistics"
responses:
"200":
description: "successful operation"
schema: { $ref: '#/definitions/Statistics' }
examples:
test:
unassigned: 0
open_tickets_per_user: { }
tickets_per_week: { "2021-39": 3 }
tickets_per_type: { "alert": 2, "incident": 1 }
security: [ { roles: [ "ticket:read" ] } ]
definitions:
Statistics:
type: object
required: [ unassigned, open_tickets_per_user, tickets_per_week, tickets_per_type ]
properties:
unassigned: { type: integer }
open_tickets_per_user: { type: object, additionalProperties: { type: integer } }
tickets_per_week: { type: object, additionalProperties: { type: integer } }
tickets_per_type: { type: object, additionalProperties: { type: integer } }

18
definition/swagger.yaml Normal file
View File

@@ -0,0 +1,18 @@
swagger: "2.0"
info:
version: "0.0.3"
title: "Catalyst"
description: API for the catalyst incident response platform.
host: "."
basePath: "/api"
schemes:
# - "https"
- "http"
consumes: [ "application/json" ]
produces: [ "application/json" ]
paths: {}
definitions: {}

107
definition/tasks.yaml Normal file
View File

@@ -0,0 +1,107 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/tasks:
get:
tags: [ "tasks" ]
summary: "List tasks"
operationId: "listTasks"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TaskResponse" } }
examples:
test: [ ]
security: [ { roles: [ "ticket:read" ] } ]
definitions:
TaskForm:
type: object
required: [ name, type ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
Task:
type: object
required: [ name, type, done, created ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
TaskResponse:
type: object
required: [ name, type, done, created, order, active ]
properties:
name: { type: string, example: "Inform user" }
type: { type: string, enum: [ task, input, automation ], example: "task" }
done: { type: boolean }
owner: { type: string }
data: { type: object }
# automation
automation: { type: string }
payload: { type: object, additionalProperties: { type: string } }
# input
schema: { type: object }
# workflow
join: { type: boolean, example: false }
next: { type: object, additionalProperties: { type: string } }
created: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
closed: { type: string, format: "date-time", example: "1985-04-12T23:20:50.52Z" }
# helper
order: { type: number, format: "int64", example: 2 }
active: { type: boolean, example: false }
TaskWithContext:
type: object
required: [ ticket_id, ticket_name, playbook_id, playbook_name, task_id, task ]
properties:
ticket_id: { type: number, format: "int64" }
ticket_name: { type: string }
playbook_id: { type: string }
playbook_name: { type: string }
task_id: { type: string }
task: { $ref: '#/definitions/TaskResponse' }

104
definition/templates.yaml Normal file
View File

@@ -0,0 +1,104 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/templates:
get:
tags: [ "templates" ]
summary: "List templates"
operationId: "listTemplates"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TicketTemplateResponse" } }
examples:
test:
- id: default
name: Default
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Default\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:read" ] } ]
post:
tags: [ "templates" ]
summary: "Create a new template"
operationId: "createTemplate"
parameters:
- { name: "template", in: "body", description: "New template", required: true, schema: { $ref: "#/definitions/TicketTemplateForm" }, x-example: { name: "My Template", schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: "my-template"
name: "My Template"
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:write" ] } ]
/templates/{id}:
get:
tags: [ "templates" ]
summary: "Get a single template"
operationId: "getTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: default
name: Default
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Default\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:read" ] } ]
put:
tags: [ "templates" ]
summary: "Update an existing template"
operationId: "updateTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
- { name: "template", in: "body", description: "Template object that needs to be added", required: true, schema: { $ref: "#/definitions/TicketTemplateForm" }, x-example: { name: "My Template", schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTemplateResponse" }
examples:
test:
id: default
name: "My Template"
schema: "{\n \"definitions\": {},\n \"$schema\": \"http://json-schema.org/draft-07/schema#\",\n \"$id\": \"https://example.com/object1618746510.json\",\n \"title\": \"Event\",\n \"type\": \"object\",\n \"required\": [\n \"severity\",\n \"description\",\n \"tlp\"\n ],\n \"properties\": {\n \"severity\": {\n \"$id\": \"#root/severity\",\n \"title\": \"Severity\",\n \"type\": \"string\",\n \"default\": \"Medium\",\n \"nx-enum\": [\n \"Low\",\n \"Medium\",\n \"High\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"Low\",\n \"title\": \"Low\",\n \"icon\": \"mdi-chevron-up\"\n },\n {\n \"const\": \"Medium\",\n \"title\": \"Medium\",\n \"icon\": \"mdi-chevron-double-up\"\n },\n {\n \"const\": \"High\",\n \"title\": \"High\",\n \"icon\": \"mdi-chevron-triple-up\"\n }\n ]\n },\n \"tlp\": {\n \"$id\": \"#root/tlp\",\n \"title\": \"TLP\",\n \"type\": \"string\",\n \"nx-enum\": [\n \"White\",\n \"Green\",\n \"Amber\",\n \"Red\"\n ],\n \"x-cols\": 6,\n \"x-class\": \"pr-2\",\n \"x-display\": \"icon\",\n \"x-itemIcon\": \"icon\",\n \"oneOf\": [\n {\n \"const\": \"White\",\n \"title\": \"White\",\n \"icon\": \"mdi-alpha-w\"\n },\n {\n \"const\": \"Green\",\n \"title\": \"Green\",\n \"icon\": \"mdi-alpha-g\"\n },\n {\n \"const\": \"Amber\",\n \"title\": \"Amber\",\n \"icon\": \"mdi-alpha-a\"\n },\n {\n \"const\": \"Red\",\n \"title\": \"Red\",\n \"icon\": \"mdi-alpha-r\"\n }\n ]\n },\n \"description\": {\n \"$id\": \"#root/description\",\n \"title\": \"Description\",\n \"type\": \"string\",\n \"x-display\": \"textarea\",\n \"x-class\": \"pr-2\"\n }\n }\n}\n"
security: [ { roles: [ "template:write" ] } ]
delete:
tags: [ "templates" ]
summary: "Delete a template"
operationId: "deleteTemplate"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "default" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "template:write" ] } ]
definitions:
TicketTemplateForm:
type: object
required: [ name, schema ]
properties:
id: { type: string }
name: { type: string }
schema: { type: string }
TicketTemplate:
type: object
required: [ name, schema ]
properties:
name: { type: string }
schema: { type: string }
TicketTemplateResponse:
type: object
required: [ id, name, schema ]
properties:
id: { type: string }
name: { type: string }
schema: { type: string }

1094
definition/tickets.yaml Normal file

File diff suppressed because it is too large Load Diff

121
definition/tickettype.yaml Normal file
View File

@@ -0,0 +1,121 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/tickettypes:
get:
tags: [ "tickettypes" ]
summary: "List tickettypes"
operationId: "listTicketTypes"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/TicketTypeResponse" } }
examples:
test:
- { icon: "mdi-alert", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-radioactive", id: "incident", name: "Incidents", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-fingerprint", id: "investigation", name: "Forensic Investigations", default_template: "default", default_playbooks: [ ] }
- { icon: "mdi-target", id: "hunt", name: "Threat Hunting", default_template: "default", default_playbooks: [ ] }
security: [ { roles: [ "tickettype:read" ] } ]
post:
tags: [ "tickettypes" ]
summary: "Create a new tickettype"
operationId: "createTicketType"
parameters:
- { name: "tickettype", in: "body", description: "New tickettype", required: true, schema: { $ref: "#/definitions/TicketTypeForm" }, x-example: { name: "TI Tickets", icon: "mdi-newspaper-variant-outline", default_template: "default", default_playbooks: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
id: "ti-tickets"
name: "TI Tickets"
icon: "mdi-newspaper-variant-outline"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:write" ] } ]
/tickettypes/{id}:
get:
tags: [ "tickettypes" ]
summary: "Get a single tickettype"
operationId: "getTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
icon: "mdi-alert"
id: "alert"
name: "Alerts"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:read" ] } ]
put:
tags: [ "tickettypes" ]
summary: "Update an existing tickettype"
operationId: "updateTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
- { name: "tickettype", in: "body", description: "TicketType object that needs to be added", required: true, schema: { $ref: "#/definitions/TicketTypeForm" }, x-example: { icon: "mdi-bell", id: "alert", name: "Alerts", default_template: "default", default_playbooks: [ ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/TicketTypeResponse" }
examples:
test:
icon: "mdi-bell"
id: "alert"
name: "Alerts"
default_template: "default"
default_playbooks: [ ]
security: [ { roles: [ "tickettype:write" ] } ]
delete:
tags: [ "tickettypes" ]
summary: "Delete a tickettype"
operationId: "deleteTicketType"
parameters:
- { name: "id", in: "path", description: "TicketType ID", required: true, type: string, x-example: "alert" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "tickettype:write" ] } ]
definitions:
TicketTypeForm:
type: object
required: [ name, icon, default_template, default_playbooks ]
properties:
id: { type: string }
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }
TicketType:
type: object
required: [ name, icon, default_template, default_playbooks ]
properties:
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }
TicketTypeResponse:
type: object
required: [ id, name, icon, default_template, default_playbooks ]
properties:
id: { type: string }
icon: { type: string }
name: { type: string }
default_template: { type: string }
default_playbooks: { type: array, items: { type: string } }
default_groups: { type: array, items: { type: string } }

94
definition/userdata.yaml Normal file
View File

@@ -0,0 +1,94 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/currentuserdata:
get:
tags: [ "userdata" ]
summary: "Get current user data"
operationId: "currentUserData"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "currentuserdata:read" ] } ]
put:
tags: [ "userdata" ]
summary: "Update current user data"
operationId: "updateCurrentUserData"
parameters:
- { name: "userdata", in: "body", description: "User data object that needs to be added", required: true, schema: { $ref: "#/definitions/UserData" }, x-example: { name: "Bob Bad", email: "bob@example.org" } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "currentuserdata:write" ] } ]
/userdata:
get:
tags: [ "userdata" ]
summary: "List userdata"
operationId: "listUserData"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/UserDataResponse" } }
examples:
test:
- { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "userdata:read" ] } ]
/userdata/{id}:
get:
tags: [ "userdata" ]
summary: "Get a single user data"
operationId: "getUserData"
parameters:
- { name: "id", in: "path", description: "User Data ID", required: true, type: string, x-example: "bob" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test:
id: bob
name: "Bob Bad"
email: "bob@example.org"
security: [ { roles: [ "userdata:read" ] } ]
put:
tags: [ "userdata" ]
summary: "Update an existing user data"
operationId: "updateUserData"
parameters:
- { name: "id", in: "path", description: "User Data ID", required: true, type: string, x-example: "bob" }
- { name: "userdata", in: "body", description: "User data object that needs to be added", required: true, schema: { $ref: "#/definitions/UserData" }, x-example: { name: "Bob Bad", email: "bob@example.org", blocked: false } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserDataResponse" }
examples:
test: { id: bob, name: "Bob Bad", email: "bob@example.org" }
security: [ { roles: [ "userdata:write" ] } ]
definitions:
UserData:
type: object
properties:
name: { type: string, x-example: "Robert Smith" }
email: { type: string, x-example: "bob@example.org" }
image: { type: string, x-display: "custom-avatar" }
timeformat: { title: "Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)", type: string }
UserDataResponse:
type: object
required: [ id ]
properties:
id: { type: string }
name: { type: string, x-example: "Robert Smith" }
email: { type: string, x-example: "bob@example.org" }
image: { type: string, x-display: "custom-avatar" }
timeformat: { title: "Time Format (https://moment.github.io/luxon/docs/manual/formatting.html#table-of-tokens)", type: string }

122
definition/users.yaml Normal file
View File

@@ -0,0 +1,122 @@
swagger: "2.0"
info: { version: "", title: "" }
paths:
/currentuser:
get:
tags: [ "users" ]
summary: "Get current user"
operationId: "currentUser"
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: bob, roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: false }
security: [ { roles: [ "currentuser:read" ] } ]
/users:
get:
tags: [ "users" ]
summary: "List users"
operationId: "listUsers"
responses:
"200":
description: "successful operation"
schema: { type: array, items: { $ref: "#/definitions/UserResponse" } }
examples:
test:
- { id: bob, blocked: false, roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], apikey: false }
- { id: script, roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
post:
tags: [ "users" ]
summary: "Create user"
operationId: "createUser"
parameters:
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { id: "syncscript", roles: [ "analyst" ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/NewUserResponse" }
examples:
test: { id: "syncscript", roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read" ], secret: "v39bOuobnlEljfWzjAgoKzhmnh1xSMxH", blocked: false }
security: [ { roles: [ "user:write" ] } ]
/users/{id}:
get:
tags: [ "users" ]
summary: "Get a single user"
operationId: "getUser"
parameters:
- { name: "id", in: "path", description: "user ID", required: true, type: string, x-example: "script" }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test: { id: "script", roles: [ "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ], blocked: false, apikey: true }
security: [ { roles: [ "user:read" ] } ]
put:
tags: [ "users" ]
summary: "Update user"
operationId: "updateUser"
parameters:
- { name: "id", in: "path", description: "Template ID", required: true, type: string, x-example: "bob" }
- { name: "user", in: "body", description: "user object that needs to be added", required: true, schema: { $ref: "#/definitions/UserForm" }, x-example: { roles: [ "analyst", "admin" ] } }
responses:
"200":
description: "successful operation"
schema: { $ref: "#/definitions/UserResponse" }
examples:
test:
id: bob
roles: [ "admin:backup:read", "admin:backup:restore", "admin:group:write", "admin:job:read", "admin:job:write", "admin:log:read", "admin:ticket:delete", "admin:user:write", "admin:userdata:read", "admin:userdata:write", "analyst:automation:read", "analyst:currentsettings:write", "analyst:currentuser:read", "analyst:currentuserdata:read", "analyst:file", "analyst:group:read", "analyst:playbook:read", "analyst:rule:read", "analyst:settings:read", "analyst:template:read", "analyst:ticket:read", "analyst:ticket:write", "analyst:tickettype:read", "analyst:user:read", "engineer:automation:write", "engineer:playbook:write", "engineer:rule:write", "engineer:template:write", "engineer:tickettype:write" ]
apikey: false
blocked: false
security: [ { roles: [ "user:write" ] } ]
delete:
tags: [ "users" ]
summary: "Delete user"
operationId: "deleteUser"
parameters:
- { name: "id", in: "path", description: "user ID", required: true, type: string, x-example: "script" }
responses:
"204": { description: "successful operation" }
security: [ { roles: [ "user:write" ] } ]
definitions:
UserForm:
type: object
required: [ id, blocked, roles, apikey ]
properties:
id: { type: string }
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
User:
type: object
required: [ blocked, apikey, roles ]
properties:
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
sha256: { type: string } # for api keys
UserResponse:
type: object
required: [ id, blocked, roles, apikey ]
properties:
id: { type: string }
blocked: { type: boolean }
apikey: { type: boolean }
roles: { type: array, items: { type: string } }
NewUserResponse:
type: object
required: [ id, blocked, roles ]
properties:
id: { type: string }
blocked: { type: boolean }
roles: { type: array, items: { type: string } }
secret: { type: string }

49
dev/docker-compose.yml Normal file
View File

@@ -0,0 +1,49 @@
version: '2.2'
services:
nginx:
image: nginx:1.21
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
ports: [ "80:80", "8529:8529", "9000:9000", "9001:9001", "9002:9002" ]
arangodb:
image: arangodb/arangodb:3.8.1
environment:
ARANGO_ROOT_PASSWORD: foobar
emitter:
image: emitter/server
environment:
- EMITTER_LICENSE=PfA8ID8izeSlDUlNZgNXo77DQV9QzlNtxTk64WreCXKfDZsREAVXUXwh20UKOZdkALbLTmOytO_iC6mc_twKAQ:3
# A9RysEsPJni8RaHeg_K0FKXQNfBrUyw-
minio:
image: minio/minio
environment:
MINIO_ROOT_USER: minio
MINIO_ROOT_PASSWORD: minio123
command: server /data -console-address ":9003"
postgres:
image: postgres
environment:
POSTGRES_DB: keycloak
POSTGRES_USER: keycloak
POSTGRES_PASSWORD: password
keycloak:
image: quay.io/keycloak/keycloak:14.0.0
environment:
DB_VENDOR: POSTGRES
DB_ADDR: postgres
DB_DATABASE: keycloak
DB_USER: keycloak
DB_SCHEMA: public
DB_PASSWORD: password
KEYCLOAK_USER: admin
KEYCLOAK_PASSWORD: admin
KEYCLOAK_IMPORT: /tmp/realm.json
PROXY_ADDRESS_FORWARDING: "true"
volumes:
- ./keycloak/realm.json:/tmp/realm.json
depends_on: [ postgres ]

1997
dev/keycloak/realm.json Normal file

File diff suppressed because it is too large Load Diff

89
dev/nginx.conf Normal file
View File

@@ -0,0 +1,89 @@
user www-data;
worker_processes 5;
error_log /var/log/nginx/error.log;
events {
worker_connections 4096;
}
http {
include mime.types;
index index.html index.htm;
log_format main '$remote_addr - $remote_user [$time_local] $status '
'"$request" $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
access_log /var/log/nginx/access.log main;
server {
listen 80 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
}
location /wss {
resolver 127.0.0.11 valid=30s;
set $upstream_catalyst host.docker.internal;
proxy_pass http://$upstream_catalyst:8000;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_read_timeout 86400;
}
}
server {
listen 8529 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_arangodb arangodb;
proxy_pass http://$upstream_arangodb:8529;
}
}
server {
listen 9000 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_minio minio;
proxy_pass http://$upstream_minio:9000;
}
}
server {
listen 9002 default_server;
server_name _;
location / {
resolver 127.0.0.11 valid=30s;
set $upstream_keycloak keycloak;
proxy_pass http://$upstream_keycloak:8080;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Server $host;
}
}
}
stream {
server {
listen 9001;
resolver 127.0.0.11 valid=30s;
set $upstream_emitter emitter;
proxy_pass $upstream_emitter:8080;
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 248 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 199 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 198 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 197 KiB

Some files were not shown because too many files have changed in this diff Show More