Compare commits

...

31 Commits

Author SHA1 Message Date
Jonas Plum
fedda9daaf chore: add more screenshots (#1155) 2025-09-21 14:50:28 +02:00
Jonas Plum
4d844c567c fix: multiple minor fixes (#1154) 2025-09-21 12:08:28 +00:00
Jonas Plum
9da90e7cc8 refactor: add root store (#1153) 2025-09-21 09:47:29 +00:00
Jonas Plum
d9f759c879 fix: ui basePath (#1152) 2025-09-21 09:02:00 +00:00
dependabot[bot]
1e3f2f24dc build(deps): bump github.com/go-chi/chi/v5 from 5.2.1 to 5.2.2 (#1149)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-09-04 07:02:08 +02:00
Jonas Plum
3cb097126c fix: rename binary (#1150) 2025-09-04 04:52:55 +00:00
Jonas Plum
df96362c3c fix: release docker ids (#1148) 2025-09-02 23:58:54 +00:00
Jonas Plum
377d2dad5f fix: cross-compile (#1147) 2025-09-02 23:27:23 +00:00
Jonas Plum
87fc0e6567 fix: working directory (#1146) 2025-09-02 22:02:52 +00:00
Jonas Plum
06fdae4ab9 fix: adapt goreleaser for cross compilation (#1145) 2025-09-02 21:43:33 +00:00
Jonas Plum
27129f24d5 fix: recreate http flag (#1144) 2025-09-02 20:54:47 +00:00
Jonas Plum
de105f19c1 fix: release CI (#1143) 2025-09-02 20:23:30 +00:00
Jonas Plum
eba2615ec0 refactor: remove pocketbase (#1138) 2025-09-02 21:58:08 +02:00
dependabot[bot]
f28c238135 build(deps): bump github.com/golang-jwt/jwt/v4 from 4.5.1 to 4.5.2 (#1132)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-03-31 06:39:18 +02:00
Jonas Plum
7de89a752c test: add upgrade tests (#1126) 2025-02-02 13:40:33 +01:00
Jonas Plum
b31f90c3ea feat: support integer custom field (#1123) 2025-01-02 23:37:26 +01:00
dependabot[bot]
87175f80a2 build(deps): bump github.com/golang-jwt/jwt/v4 from 4.5.0 to 4.5.1 (#1119)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-21 21:11:32 +01:00
dependabot[bot]
9a8125635b build(deps): bump golang.org/x/crypto from 0.24.0 to 0.31.0 (#1118)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2024-12-21 06:28:06 +01:00
Jonas Plum
86f4aa1d28 fix: relative day display (#1117) 2024-11-29 22:28:07 +00:00
Jonas Plum
7b92d59dff fix: relative day display (#1116) 2024-11-29 23:11:11 +01:00
Jonas Plum
6a8c92f1f6 fix: server setup (#1115) 2024-11-08 21:56:32 +01:00
Jonas Plum
9285aec468 fix: docker entrypoint permissions (#1114) 2024-11-06 02:12:09 +01:00
Jonas Plum
97d0cd3428 fix: goreleaser docker (#1113) 2024-11-06 02:02:47 +01:00
Jonas Plum
baba5b7a45 feat: docker entrypoint with environment variables (#1112) 2024-11-06 01:52:48 +01:00
Jonas Plum
d1cf75ab79 refactor: subcommands (#1111) 2024-11-06 01:21:31 +01:00
Jonas Plum
38a89f2c94 fix: docker latest image (#1110) 2024-11-04 23:08:28 +01:00
Jonas Plum
8c36ea5243 feat: scheduler example (#1109) 2024-11-04 23:07:17 +01:00
Jonas Plum
a2bdeecb0d feat: scheduler (#1108) 2024-11-04 22:30:20 +01:00
Jonas Plum
42797509f7 fix: set-app-url (#1107) 2024-11-04 20:50:18 +00:00
Jonas Plum
70ba16a6bd feat: docker healthcheck (#1106) 2024-11-04 20:47:55 +00:00
Jonas Plum
f42de34780 fix: ci docker tags 2024-09-30 03:55:39 +02:00
443 changed files with 43670 additions and 4284 deletions

View File

@@ -5,68 +5,125 @@ on:
release: { types: [ published ] }
jobs:
fmt:
name: Fmt
generate-go:
name: Generate Go
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.22' }
- uses: oven-sh/setup-bun@v1
- run: |
bun install
mkdir -p dist
touch dist/index.html
working-directory: ui
- run: make install
- run: make fmt
with: { go-version: '1.25' }
- run: make install-golangci-lint generate-go
- run: git diff --exit-code
lint:
name: Lint
generate-ui:
name: Generate UI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.22' }
- run: |
mkdir -p ui/dist
touch ui/dist/index.html
- uses: golangci/golangci-lint-action@v6
with: { version: 'v1.59' }
build:
name: Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.22' }
- uses: oven-sh/setup-bun@v1
- run: make install-ui generate-ui
- run: git diff --exit-code
- run: make build-ui
test:
name: Test
fmt-go:
name: Fmt Go
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.22' }
with: { go-version: '1.25' }
- run: make install-golangci-lint fmt-go
- run: git diff --exit-code
fmt-ui:
name: Fmt UI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v1
- run: make install-ui fmt-ui
- run: |
mkdir -p ui/dist
touch ui/dist/index.html
lint-go:
name: Lint Go
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.25' }
- run: make install-golangci-lint lint-go
lint-ui:
name: Lint UI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v1
- run: make install-ui lint-ui
build-ui:
name: Build UI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: oven-sh/setup-bun@v1
- run: make install-ui build-ui
test-go:
name: Test Go
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.25' }
- run: make test-coverage
- uses: codecov/codecov-action@v4
with:
files: ./coverage.out
token: ${{ secrets.CODECOV_TOKEN }}
test-ui:
name: Test UI
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.25' }
- uses: oven-sh/setup-bun@v1
- run: make install-ui test-ui
test-playwright:
name: Test Playwright
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.25' }
- uses: oven-sh/setup-bun@v1
- run: make install-ui build-ui install-playwright test-playwright
list-upgrade-test-folders:
name: List Upgrade Test Folders
runs-on: ubuntu-latest
outputs:
folders: ${{ steps.set-dirs.outputs.matrix }}
steps:
- uses: actions/checkout@v4
- id: set-dirs
run: |
FOLDERS=$(ls -d ./testing/data/*/ | xargs -n 1 basename | jq -R . | jq -c -s .)
echo "matrix=$FOLDERS" >> $GITHUB_OUTPUT
test-upgrade-playwright:
name: Test Playwright Upgrade
needs: list-upgrade-test-folders
runs-on: ubuntu-latest
strategy:
matrix:
folder: ${{ fromJson(needs.list-upgrade-test-folders.outputs.folders) }}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-go@v5
with: { go-version: '1.25' }
- uses: oven-sh/setup-bun@v1
- run: mkdir -p catalyst_data
- run: cp testing/data/${{ matrix.folder }}/data.db catalyst_data/data.db
- run: make install-ui build-ui install-playwright test-playwright

View File

@@ -18,20 +18,23 @@ jobs:
with:
fetch-depth: 0
- uses: actions/setup-go@v5
with: { go-version: '1.22' }
with: { go-version: '1.25' }
- uses: oven-sh/setup-bun@v1
- run: make build-ui
- run: make install-ui build-ui
- uses: docker/login-action@v3
with:
registry: ghcr.io
username: "securitybrewery"
password: ${{ secrets.GITHUB_TOKEN }}
- uses: goreleaser/goreleaser-action@v6
with:
distribution: goreleaser
version: '~> v2'
args: release --clean
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Run GoReleaser
run: |
docker run --rm --privileged \
-v `pwd`:/go/src/github.com/SecurityBrewery/catalyst \
-v /var/run/docker.sock:/var/run/docker.sock \
-w /go/src/github.com/SecurityBrewery/catalyst \
-e CGO_ENABLED=1 \
-e GITHUB_TOKEN=${{ secrets.GITHUB_TOKEN }} \
ghcr.io/goreleaser/goreleaser-cross:latest \
release --clean

9
.gitignore vendored
View File

@@ -35,4 +35,13 @@ pb_data
catalyst
catalyst_data
# ignore changes, needs to be disabled when adding new upgrade tests
testing/**/*.db
testing/**/*.db-shm
testing/**/*.db-wal
coverage.out
test-results
playwright/playwright-report
openapitools.json

View File

@@ -1,27 +1,59 @@
version: "2"
run:
go: "1.22"
timeout: 5m
linters:
enable-all: true
default: all
disable:
# complexity
- maintidx
- funlen
# disable
- depguard
- dupl
- err113
- exhaustruct
- gomnd
- funcorder
- funlen
- gochecknoglobals
- godox
- gomoddirectives
- ireturn
- lll
- maintidx
- mnd
- noinlineerr
- nonamedreturns
- perfsprint
- prealloc
- tagalign
- tagliatelle
- testpackage
- unparam
- varnamelen
- wrapcheck
linters-settings:
gci:
sections:
- standard
- default
- prefix(github.com/SecurityBrewery/catalyst)
- wsl
exclusions:
generated: lax
presets:
- comments
- common-false-positives
- legacy
- std-error-handling
paths:
- third_party$
- builtin$
- examples$
formatters:
enable:
- gci
- gofmt
- gofumpt
- goimports
settings:
gci:
sections:
- standard
- default
- prefix(github.com/SecurityBrewery/catalyst)
exclusions:
generated: lax
paths:
- third_party$
- builtin$
- examples$

View File

@@ -5,23 +5,94 @@ before:
- go mod tidy
builds:
- env:
- CGO_ENABLED=0
- id: darwin-amd64
main: ./
binary: catalyst
goos:
- darwin
goarch:
- amd64
env:
- CGO_ENABLED=1
- CC=o64-clang
- CXX=o64-clang++
flags:
- -mod=readonly
ldflags:
- -s -w -X main.version={{.Version}}
- id: linux-arm64
main: ./
binary: catalyst
goos:
- linux
- darwin
goarch:
- arm64
env:
- CGO_ENABLED=1
- CC=aarch64-linux-gnu-gcc
- CXX=aarch64-linux-gnu-g++
flags:
- -mod=readonly
ldflags:
- -s -w -X main.version={{.Version}}
- id: linux-amd64
main: ./
binary: catalyst
goos:
- linux
goarch:
- amd64
env:
- CGO_ENABLED=1
- CC=x86_64-linux-gnu-gcc
- CXX=x86_64-linux-gnu-g++
flags:
- -mod=readonly
ldflags:
- -s -w -X main.version={{.Version}}
- id: windows-amd64
main: ./
binary: catalyst
goos:
- windows
goarch:
- amd64
env:
- CGO_ENABLED=1
- CC=x86_64-w64-mingw32-gcc
- CXX=x86_64-w64-mingw32-g++
flags:
- -mod=readonly
ldflags:
- -s -w -X main.version={{.Version}}
- id: windows-arm64
main: ./
binary: catalyst
goos:
- windows
goarch:
- arm64
env:
- CGO_ENABLED=1
- CC=/llvm-mingw/bin/aarch64-w64-mingw32-gcc
- CXX=/llvm-mingw/bin/aarch64-w64-mingw32-g++
flags:
- -mod=readonly
ldflags:
- -s -w -X main.version={{.Version}}
dockers:
- ids: [ catalyst ]
dockerfile: docker/goreleaser.Dockerfile
- ids: [ linux-amd64 ]
dockerfile: docker/Dockerfile
image_templates:
- "ghcr.io/securitybrewery/catalyst:latest"
- "ghcr.io/securitybrewery/catalyst:main"
- "{{if not .Prerelease}}ghcr.io/securitybrewery/catalyst:latest{{end}}"
- "ghcr.io/securitybrewery/catalyst:{{.Tag}}"
- "ghcr.io/securitybrewery/catalyst:v{{.Major}}"
- "ghcr.io/securitybrewery/catalyst:v{{.Major}}.{{.Minor}}"
extra_files:
- docker/entrypoint.sh
archives:
- format: tar.gz
- formats: tar.gz
# this name template makes the OS and Arch compatible with the results of `uname`.
name_template: >-
{{ .ProjectName }}_
@@ -33,7 +104,7 @@ archives:
# use zip for windows archives
format_overrides:
- goos: windows
format: zip
formats: zip
changelog:
sort: asc

234
Makefile
View File

@@ -1,62 +1,228 @@
.PHONY: install
install:
@echo "Installing..."
go install github.com/bombsimon/wsl/v4/cmd...@v4.4.1
go install mvdan.cc/gofumpt@v0.6.0
go install github.com/daixiang0/gci@v0.13.4
#########
## install
#########
.PHONY: fmt
fmt:
@echo "Formatting..."
.PHONY: install-golangci-lint
install-golangci-lint:
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/HEAD/install.sh | sh -s -- -b $(shell go env GOPATH)/bin v2.4.0
.PHONY: install-ui
install-ui:
cd ui && bun install
.PHONY: install-playwright
install-playwright:
cd ui && bun install && bun install:e2e
#########
## fmt
#########
.PHONY: fmt-go
fmt-go:
go mod tidy
go fmt ./...
gci write -s standard -s default -s "prefix(github.com/SecurityBrewery/catalyst)" .
gofumpt -l -w .
wsl -fix ./... || true
gofmt -r 'interface{} -> any' -w **/*.go
golangci-lint fmt ./...
.PHONY: fmt-ui
fmt-ui:
cd ui && bun format
.PHONY: lint
lint:
.PHONY: fmt
fmt: fmt-go fmt-ui
#########
## fix
#########
.PHONY: fix-go
fix-go:
golangci-lint run --fix ./...
.PHONY: fix-ui
fix-ui:
cd ui && bun lint --fix
.PHONY: fix
fix: fix-go fix-ui
#########
## lint
#########
.PHONY: lint-go
lint-go:
golangci-lint version
golangci-lint run ./...
golangci-lint run ./...
.PHONY: lint-ui
lint-ui:
cd ui && bun lint --max-warnings 0
.PHONY: lint
lint: lint-go lint-ui
#########
## test
#########
.PHONY: test-go
test-go:
go test ./...
.PHONY: test-ui
test-ui:
cd ui && bun test src
.PHONY: test-short
test-short: test-go test-ui
.PHONY: test-playwright
test-playwright:
cd ui && bun test:e2e
.PHONY: test-demo-playwright
test-demo-playwright:
cd ui && bun test:e2e:demo
.PHONY: test-playwright-ui
test-playwright-ui:
cd ui && bun test:e2e:ui
.PHONY: test-upgrade-playwright
test-upgrade-playwright:
./testing/test_all.sh
.PHONY: test
test:
@echo "Testing..."
go test -v ./...
cd ui && bun test
test: test-short test-playwright test-upgrade-playwright
.PHONY: test-coverage
test-coverage:
@echo "Testing with coverage..."
go test -coverpkg=./... -coverprofile=coverage.out -count 1 ./...
go tool cover -func=coverage.out
go tool cover -html=coverage.out
##########
## build
##########
.PHONY: build-ui
build-ui:
@echo "Building..."
cd ui && bun install
cd ui && bun build-only
touch ui/dist/.keep
.PHONY: build
build: build-ui
go build -o catalyst .
.PHONY: build-linux
build-linux: build-ui
GOOS=linux GOARCH=amd64 go build -o catalyst .
.PHONY: docker
docker: build-linux
docker build -f docker/Dockerfile -t catalyst .
############
## run
############
.PHONY: reset_data
reset_data:
rm -rf catalyst_data
.PHONY: copy_existing_data
copy_existing_data: reset_data
mkdir -p catalyst_data
cp testing/data/v0.14.1/data.db catalyst_data/data.db
.PHONY: dev
dev:
@echo "Running..."
rm -rf catalyst_data
dev: reset_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . set-feature-flags dev
go run . fake-data
go run . serve
go run . serve --app-url http://localhost:8090 --flags dev
.PHONY: dev-proxy-ui
dev-proxy-ui: reset_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . fake-data
UI_DEVSERVER=http://localhost:3000 go run . serve --app-url http://localhost:8090 --flags dev --flags demo
.PHONY: dev-upgrade-proxy-ui
dev-upgrade-proxy-ui: copy_existing_data
go run . admin create admin@catalyst-soar.com 1234567890
UI_DEVSERVER=http://localhost:3000 go run . serve --app-url http://localhost:8090 --flags dev
.PHONY: dev-10000-proxy-ui
dev-10000-proxy-ui: reset_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . fake-data --users 87 --tickets 12425
UI_DEVSERVER=http://localhost:3000 go run . serve --app-url http://localhost:8090 --flags dev
.PHONY: dev-upgrade
dev-upgrade: copy_existing_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . serve --app-url http://localhost:8090 --flags dev
.PHONY: dev-demo
dev-demo: copy_existing_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . serve --app-url http://localhost:8090 --flags demo
.PHONY: dev-10000
dev-10000:
@echo "Running..."
rm -rf catalyst_data
dev-10000: reset_data
go run . admin create admin@catalyst-soar.com 1234567890
go run . set-feature-flags dev
go run . fake-data --users 100 --tickets 10000
go run . serve
go run . fake-data --users 87 --tickets 12425
go run . serve --app-url http://localhost:8090 --flags dev
.PHONY: default-data
default-data:
rm -rf catalyst_data
go run . default-data
.PHONY: serve-ui
serve-ui:
cd ui && bun dev --port 3000
#########
## generate
#########
.PHONY: sqlc
sqlc:
rm -rf app/database/sqlc
cd app/database && go tool sqlc generate
sed -i.bak 's/Queries/ReadQueries/g' app/database/sqlc/read.sql.go
rm -f app/database/sqlc/read.sql.go.bak
sed -i.bak 's/Queries/WriteQueries/g' app/database/sqlc/write.sql.go
rm -f app/database/sqlc/write.sql.go.bak
cp app/database/sqlc.db.go.tmpl app/database/sqlc/db.go
.PHONY: openapi-go
openapi-go:
go tool oapi-codegen --config=app/openapi/config.yml openapi.yml
.PHONY: openapi-ui
openapi-ui:
rm -rf ui/src/client
cd ui && bun generate
.PHONY: openapi
openapi: openapi-go openapi-ui
.PHONY: generate-go
generate-go: openapi-go sqlc fmt-go
.PHONY: generate-ui
generate-ui: openapi-ui fmt-ui
.PHONY: generate
generate: generate-go generate-ui
#########
## screenshots
#########
.PHONY: screenshots
screenshots:
bash ui/screenshot.sh

122
admin.go Normal file
View File

@@ -0,0 +1,122 @@
package main
import (
"context"
"errors"
"fmt"
"log/slog"
"github.com/urfave/cli/v3"
"github.com/SecurityBrewery/catalyst/app/auth/password"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
func adminCreate(ctx context.Context, command *cli.Command) error {
catalyst, cleanup, err := setup(ctx, command)
if err != nil {
return fmt.Errorf("failed to setup catalyst: %w", err)
}
defer cleanup()
if command.Args().Len() != 2 {
return errors.New("usage: catalyst admin create <email> <password>")
}
name, email := command.Args().Get(0), command.Args().Get(0)
pw := command.Args().Get(1)
passwordHash, tokenKey, err := password.Hash(pw)
if err != nil {
return errors.New("failed to hash password: " + err.Error())
}
user, err := catalyst.Queries.CreateUser(ctx, sqlc.CreateUserParams{
Name: &name,
Email: &email,
Username: "admin",
PasswordHash: passwordHash,
TokenKey: tokenKey,
Active: true,
})
if err != nil {
return err
}
if err := catalyst.Queries.AssignGroupToUser(ctx, sqlc.AssignGroupToUserParams{
UserID: user.ID,
GroupID: "admin",
}); err != nil {
return err
}
slog.InfoContext(ctx, "Creating admin", "id", user.ID, "email", user.Email)
return nil
}
func adminSetPassword(ctx context.Context, command *cli.Command) error {
catalyst, cleanup, err := setup(ctx, command)
if err != nil {
return fmt.Errorf("failed to setup catalyst: %w", err)
}
defer cleanup()
if command.Args().Len() != 2 {
return errors.New("usage: catalyst admin set-password <email> <password>")
}
mail := command.Args().Get(0)
user, err := catalyst.Queries.UserByEmail(ctx, &mail)
if err != nil {
return err
}
passwordHash, tokenKey, err := password.Hash(command.Args().Get(1))
if err != nil {
return errors.New("failed to hash password: " + err.Error())
}
if _, err := catalyst.Queries.UpdateUser(ctx, sqlc.UpdateUserParams{
ID: user.ID,
PasswordHash: &passwordHash,
TokenKey: &tokenKey,
}); err != nil {
return err
}
slog.InfoContext(ctx, "Setting password for admin", "id", user.ID, "email", user.Email)
return nil
}
func adminDelete(ctx context.Context, command *cli.Command) error {
catalyst, cleanup, err := setup(ctx, command)
if err != nil {
return fmt.Errorf("failed to setup catalyst: %w", err)
}
defer cleanup()
if command.Args().Len() != 1 {
return errors.New("usage: catalyst admin delete <email>")
}
mail := command.Args().Get(0)
user, err := catalyst.Queries.UserByEmail(ctx, &mail)
if err != nil {
return err
}
if err := catalyst.Queries.DeleteUser(ctx, user.ID); err != nil {
return err
}
slog.InfoContext(ctx, "Deleted admin", "id", user.ID, "email", mail)
return nil
}

View File

@@ -1,69 +1,75 @@
package app
import (
"context"
"fmt"
"os"
"strings"
"net/http"
"github.com/pocketbase/pocketbase"
"github.com/pocketbase/pocketbase/core"
"github.com/SecurityBrewery/catalyst/migrations"
"github.com/SecurityBrewery/catalyst/reaction"
"github.com/SecurityBrewery/catalyst/webhook"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/hook"
"github.com/SecurityBrewery/catalyst/app/mail"
"github.com/SecurityBrewery/catalyst/app/migration"
"github.com/SecurityBrewery/catalyst/app/reaction"
"github.com/SecurityBrewery/catalyst/app/reaction/schedule"
"github.com/SecurityBrewery/catalyst/app/router"
"github.com/SecurityBrewery/catalyst/app/service"
"github.com/SecurityBrewery/catalyst/app/upload"
"github.com/SecurityBrewery/catalyst/app/webhook"
)
func init() { //nolint:gochecknoinits
migrations.Register()
type App struct {
Queries *sqlc.Queries
Hooks *hook.Hooks
router http.Handler
}
func App(dir string, test bool) (*pocketbase.PocketBase, error) {
app := pocketbase.NewWithConfig(pocketbase.Config{
DefaultDev: test || dev(),
DefaultDataDir: dir,
})
webhook.BindHooks(app)
reaction.BindHooks(app, test)
app.OnBeforeServe().Add(addRoutes())
app.OnAfterBootstrap().Add(func(e *core.BootstrapEvent) error {
if HasFlag(e.App, "demo") {
bindDemoHooks(e.App)
}
return nil
})
// Register additional commands
app.RootCmd.AddCommand(fakeDataCmd(app))
app.RootCmd.AddCommand(setFeatureFlagsCmd(app))
app.RootCmd.AddCommand(setAppURL(app))
if err := app.Bootstrap(); err != nil {
return nil, err
func New(ctx context.Context, dir string) (*App, func(), error) {
uploader, err := upload.New(dir)
if err != nil {
return nil, nil, fmt.Errorf("failed to create uploader: %w", err)
}
if err := MigrateDBs(app); err != nil {
return nil, err
queries, cleanup, err := database.DB(ctx, dir)
if err != nil {
return nil, nil, fmt.Errorf("failed to connect to database: %w", err)
}
return app, nil
if err := migration.Apply(ctx, queries, dir, uploader); err != nil {
return nil, nil, fmt.Errorf("failed to migrate database: %w", err)
}
mailer := mail.New(queries)
scheduler, err := schedule.New(ctx, queries)
if err != nil {
return nil, cleanup, fmt.Errorf("failed to create scheduler: %w", err)
}
hooks := hook.NewHooks()
service := service.New(queries, hooks, uploader, scheduler)
router, err := router.New(service, queries, uploader, mailer)
if err != nil {
return nil, nil, fmt.Errorf("failed to create router: %w", err)
}
if err := reaction.BindHooks(hooks, router, queries, false); err != nil {
return nil, nil, err
}
webhook.BindHooks(hooks, queries)
app := &App{
Queries: queries,
Hooks: hooks,
router: router,
}
return app, cleanup, nil
}
func bindDemoHooks(app core.App) {
app.OnRecordBeforeCreateRequest("files", "reactions").Add(func(e *core.RecordCreateEvent) error {
return fmt.Errorf("cannot create %s in demo mode", e.Record.Collection().Name)
})
app.OnRecordBeforeUpdateRequest("files", "reactions").Add(func(e *core.RecordUpdateEvent) error {
return fmt.Errorf("cannot update %s in demo mode", e.Record.Collection().Name)
})
app.OnRecordBeforeDeleteRequest("files", "reactions").Add(func(e *core.RecordDeleteEvent) error {
return fmt.Errorf("cannot delete %s in demo mode", e.Record.Collection().Name)
})
}
func dev() bool {
return strings.HasPrefix(os.Args[0], os.TempDir())
func (a *App) ServeHTTP(w http.ResponseWriter, r *http.Request) {
a.router.ServeHTTP(w, r)
}

16
app/auth/errorjson.go Normal file
View File

@@ -0,0 +1,16 @@
package auth
import (
"fmt"
"net/http"
)
func unauthorizedJSON(w http.ResponseWriter, msg string) {
errorJSON(w, http.StatusUnauthorized, msg)
}
func errorJSON(w http.ResponseWriter, status int, msg string) {
w.WriteHeader(status)
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
_, _ = fmt.Fprintf(w, `{"status": %d, "error": %q, "message": %q}`, status, http.StatusText(status), msg)
}

158
app/auth/middleware.go Normal file
View File

@@ -0,0 +1,158 @@
package auth
import (
"context"
"errors"
"fmt"
"log/slog"
"net/http"
"slices"
"strings"
strictnethttp "github.com/oapi-codegen/runtime/strictmiddleware/nethttp"
"github.com/SecurityBrewery/catalyst/app/auth/usercontext"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/openapi"
)
const bearerPrefix = "Bearer "
func Middleware(queries *sqlc.Queries) func(next http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/api/config" {
next.ServeHTTP(w, r)
return
}
authorizationHeader := r.Header.Get("Authorization")
bearerToken := strings.TrimPrefix(authorizationHeader, bearerPrefix)
user, claims, err := verifyAccessToken(r.Context(), bearerToken, queries)
if err != nil {
slog.ErrorContext(r.Context(), "invalid bearer token", "error", err)
unauthorizedJSON(w, "invalid bearer token")
return
}
scopes, err := scopes(claims)
if err != nil {
slog.ErrorContext(r.Context(), "failed to get scopes from token", "error", err)
unauthorizedJSON(w, "failed to get scopes")
return
}
// Set the user in the context
r = usercontext.UserRequest(r, user)
r = usercontext.PermissionRequest(r, scopes)
next.ServeHTTP(w, r)
})
}
}
func ValidateFileScopes(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
requiredScopes := []string{"file:read"}
if slices.Contains([]string{http.MethodPost, http.MethodPatch, http.MethodPut, http.MethodDelete}, r.Method) {
requiredScopes = []string{"file:write"}
}
if err := validateScopes(r.Context(), requiredScopes); err != nil {
slog.ErrorContext(r.Context(), "failed to validate scopes", "error", err)
unauthorizedJSON(w, "missing required scopes")
return
}
next.ServeHTTP(w, r)
})
}
func ValidateScopesStrict(next strictnethttp.StrictHTTPHandlerFunc, _ string) strictnethttp.StrictHTTPHandlerFunc {
return func(ctx context.Context, w http.ResponseWriter, r *http.Request, request any) (response any, err error) {
requiredScopes, err := requiredScopes(ctx)
if err != nil {
slog.ErrorContext(ctx, "failed to get required scopes", "error", err)
unauthorizedJSON(w, "failed to get required scopes")
return nil, fmt.Errorf("failed to get required scopes: %w", err)
}
if err := validateScopes(ctx, requiredScopes); err != nil {
slog.ErrorContext(ctx, "failed to validate scopes", "error", err)
unauthorizedJSON(w, "missing required scopes")
return nil, fmt.Errorf("missing required scopes: %w", err)
}
return next(ctx, w, r, request)
}
}
func LogError(next strictnethttp.StrictHTTPHandlerFunc, _ string) strictnethttp.StrictHTTPHandlerFunc {
return func(ctx context.Context, w http.ResponseWriter, r *http.Request, request any) (response any, err error) {
re, err := next(ctx, w, r, request)
if err != nil {
if err.Error() == "context canceled" {
// This is a common error when the request is canceled, e.g., by the client.
// We can ignore this error as it does not indicate a problem with the handler.
return re, nil
}
slog.ErrorContext(ctx, "handler error", "error", err, "method", r.Method, "path", r.URL.Path)
}
return re, err
}
}
func validateScopes(ctx context.Context, requiredScopes []string) error {
if len(requiredScopes) > 0 {
permissions, ok := usercontext.PermissionFromContext(ctx)
if !ok {
return errors.New("missing permissions")
}
if !hasScope(permissions, requiredScopes) {
return fmt.Errorf("missing required scopes: %v", requiredScopes)
}
}
return nil
}
func requiredScopes(ctx context.Context) ([]string, error) {
requiredScopesValue := ctx.Value(openapi.OAuth2Scopes)
if requiredScopesValue == nil {
return nil, nil
}
requiredScopes, ok := requiredScopesValue.([]string)
if !ok {
return nil, fmt.Errorf("invalid required scopes type: %T", requiredScopesValue)
}
return requiredScopes, nil
}
func hasScope(scopes []string, requiredScopes []string) bool {
if slices.Contains(scopes, "admin") {
// If the user has admin scope, they can access everything
return true
}
for _, s := range requiredScopes {
if !slices.Contains(scopes, s) {
return false
}
}
return true
}

188
app/auth/middleware_test.go Normal file
View File

@@ -0,0 +1,188 @@
package auth
import (
"bytes"
"context"
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/app/auth/usercontext"
"github.com/SecurityBrewery/catalyst/app/openapi"
)
func mockHandler(w http.ResponseWriter, _ *http.Request) {
_, _ = w.Write([]byte(`{"message":"OK"}`))
}
func TestService_ValidateScopes(t *testing.T) {
t.Parallel()
type args struct {
requiredScopes []string
permissions []string
next http.HandlerFunc
}
tests := []struct {
name string
args args
want httptest.ResponseRecorder
}{
{
name: "no scopes",
args: args{
requiredScopes: []string{"user:read"},
permissions: []string{},
next: mockHandler,
},
want: httptest.ResponseRecorder{
Code: http.StatusUnauthorized,
Body: bytes.NewBufferString(`{"error": "Unauthorized", "message": "missing required scopes", "status": 401}`),
},
},
{
name: "insufficient scopes",
args: args{
requiredScopes: []string{"user:write"},
permissions: []string{"user:read"},
next: mockHandler,
},
want: httptest.ResponseRecorder{
Code: http.StatusUnauthorized,
Body: bytes.NewBufferString(`{"error": "Unauthorized", "message": "missing required scopes", "status": 401}`),
},
},
{
name: "sufficient scopes",
args: args{
requiredScopes: []string{"user:read"},
permissions: []string{"user:read", "user:write"},
next: mockHandler,
},
want: httptest.ResponseRecorder{
Code: http.StatusOK,
Body: bytes.NewBufferString(`{"message":"OK"}`),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
handler := ValidateScopesStrict(func(_ context.Context, w http.ResponseWriter, r *http.Request, _ any) (response any, err error) {
tt.args.next(w, r)
return w, nil
}, "")
w := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/", nil)
//nolint: staticcheck
r = r.WithContext(context.WithValue(r.Context(), openapi.OAuth2Scopes, tt.args.requiredScopes))
r = usercontext.PermissionRequest(r, tt.args.permissions)
if _, err := handler(r.Context(), w, r, r); err != nil {
return
}
assert.Equal(t, tt.want.Code, w.Code, "response code should match expected value")
assert.JSONEq(t, tt.want.Body.String(), w.Body.String(), "response body should match expected value")
})
}
}
func Test_hasScope(t *testing.T) {
t.Parallel()
type args struct {
scopes []string
requiredScopes []string
}
tests := []struct {
name string
args args
want bool
}{
{
name: "no scopes",
args: args{
scopes: []string{},
requiredScopes: []string{"user:read"},
},
want: false,
},
{
name: "missing required scope",
args: args{
scopes: []string{"user:read"},
requiredScopes: []string{"user:write"},
},
},
{
name: "has required scope",
args: args{
scopes: []string{"user:read", "user:write"},
requiredScopes: []string{"user:read"},
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
assert.Equalf(t, tt.want, hasScope(tt.args.scopes, tt.args.requiredScopes), "hasScope(%v, %v)", tt.args.scopes, tt.args.requiredScopes)
})
}
}
func Test_requiredScopes(t *testing.T) {
t.Parallel()
type args struct {
r *http.Request
}
tests := []struct {
name string
args args
want []string
wantErr assert.ErrorAssertionFunc
}{
{
name: "no required scopes",
args: args{
r: httptest.NewRequest(http.MethodGet, "/", nil),
},
want: nil,
wantErr: assert.NoError,
},
{
name: "valid required scopes",
args: args{
//nolint: staticcheck
r: httptest.NewRequest(http.MethodGet, "/", nil).WithContext(context.WithValue(t.Context(), openapi.OAuth2Scopes, []string{"user:read", "user:write"})),
},
want: []string{"user:read", "user:write"},
wantErr: assert.NoError,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := requiredScopes(tt.args.r.Context())
if !tt.wantErr(t, err, fmt.Sprintf("requiredScopes(%v)", tt.args.r)) {
return
}
assert.Equalf(t, tt.want, got, "requiredScopes(%v)", tt.args.r)
})
}
}

View File

@@ -0,0 +1,32 @@
package password
import (
"crypto/rand"
"encoding/base64"
"fmt"
"golang.org/x/crypto/bcrypt"
)
func Hash(password string) (hashedPassword, tokenKey string, err error) {
hashedPasswordB, err := bcrypt.GenerateFromPassword([]byte(password), bcrypt.DefaultCost)
if err != nil {
return "", "", fmt.Errorf("failed to hash password: %w", err)
}
tokenKey, err = GenerateTokenKey()
if err != nil {
return "", "", err
}
return string(hashedPasswordB), tokenKey, nil
}
func GenerateTokenKey() (string, error) {
b := make([]byte, 32)
if _, err := rand.Read(b); err != nil {
return "", err
}
return base64.URLEncoding.EncodeToString(b), nil
}

View File

@@ -0,0 +1,67 @@
package password
import (
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/crypto/bcrypt"
)
func TestHash(t *testing.T) {
t.Parallel()
type args struct {
password string
}
tests := []struct {
name string
args args
wantErr require.ErrorAssertionFunc
}{
{
name: "Hash valid password",
args: args{
password: "securePassword123!",
},
wantErr: require.NoError,
},
{
name: "Long password",
args: args{
password: strings.Repeat("a", 75),
},
wantErr: require.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
gotHashedPassword, gotTokenKey, err := Hash(tt.args.password)
tt.wantErr(t, err, "Hash() should not return an error")
if err != nil {
return
}
assert.NotEmpty(t, gotHashedPassword, "Hash() gotHashedPassword should not be empty")
assert.NotEmpty(t, gotTokenKey, "Hash() gotTokenKey should not be empty")
require.NoError(t, bcrypt.CompareHashAndPassword([]byte(gotHashedPassword), []byte(tt.args.password)), "Hash() hashed password does not match original password")
assert.GreaterOrEqual(t, len(gotTokenKey), 43, "Hash() gotTokenKey should be at least 43 characters long")
})
}
}
func TestGenerateTokenKey(t *testing.T) {
t.Parallel()
tokenKey, err := GenerateTokenKey()
require.NoError(t, err)
assert.NotEmpty(t, tokenKey, "GenerateTokenKey() tokenKey should not be empty")
assert.GreaterOrEqual(t, len(tokenKey), 43, "GenerateTokenKey() tokenKey should be at least 43 characters long")
}

73
app/auth/permission.go Normal file
View File

@@ -0,0 +1,73 @@
package auth
import (
"context"
"encoding/json"
"log/slog"
)
var (
TicketReadPermission = "ticket:read"
TicketWritePermission = "ticket:write"
FileReadPermission = "file:read"
FileWritePermission = "file:write"
TypeReadPermission = "type:read"
TypeWritePermission = "type:write"
UserReadPermission = "user:read"
UserWritePermission = "user:write"
GroupReadPermission = "group:read"
GroupWritePermission = "group:write"
ReactionReadPermission = "reaction:read"
ReactionWritePermission = "reaction:write"
WebhookReadPermission = "webhook:read"
WebhookWritePermission = "webhook:write"
SettingsReadPermission = "settings:read"
SettingsWritePermission = "settings:write"
)
func All() []string {
return []string{
TicketReadPermission,
TicketWritePermission,
FileReadPermission,
FileWritePermission,
TypeReadPermission,
TypeWritePermission,
UserReadPermission,
UserWritePermission,
GroupReadPermission,
GroupWritePermission,
ReactionReadPermission,
ReactionWritePermission,
WebhookReadPermission,
WebhookWritePermission,
SettingsReadPermission,
SettingsWritePermission,
}
}
func FromJSONArray(ctx context.Context, permissions string) []string {
var result []string
if err := json.Unmarshal([]byte(permissions), &result); err != nil {
slog.ErrorContext(ctx, "Failed to unmarshal permissions", "error", err)
return nil
}
return result
}
func ToJSONArray(ctx context.Context, permissions []string) string {
if len(permissions) == 0 {
return "[]"
}
data, err := json.Marshal(permissions)
if err != nil {
slog.ErrorContext(ctx, "Failed to marshal permissions", "error", err)
return "[]"
}
return string(data)
}

View File

@@ -0,0 +1,84 @@
package auth
import (
"reflect"
"testing"
)
func TestFromJSONArray(t *testing.T) {
t.Parallel()
tests := []struct {
name string
input string
want []string
shouldError bool
}{
{
name: "Valid JSON array",
input: `["ticket:read", "ticket:write"]`,
want: []string{"ticket:read", "ticket:write"},
shouldError: false,
},
{
name: "Empty array",
input: "[]",
want: []string{},
shouldError: false,
},
{
name: "Invalid JSON",
input: "not json",
want: nil,
shouldError: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got := FromJSONArray(t.Context(), tt.input)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("FromJSONArray() = %v, want %v", got, tt.want)
}
})
}
}
func TestToJSONArray(t *testing.T) {
t.Parallel()
tests := []struct {
name string
input []string
want string
}{
{
name: "Valid permissions array",
input: []string{"ticket:read", "ticket:write"},
want: `["ticket:read","ticket:write"]`,
},
{
name: "Empty array",
input: []string{},
want: "[]",
},
{
name: "Nil array",
input: nil,
want: "[]",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got := ToJSONArray(t.Context(), tt.input)
if got != tt.want {
t.Errorf("ToJSONArray() = %v, want %v", got, tt.want)
}
})
}
}

178
app/auth/resetpassword.go Normal file
View File

@@ -0,0 +1,178 @@
package auth
import (
"database/sql"
"encoding/json"
"errors"
"net/http"
"strings"
"time"
"github.com/SecurityBrewery/catalyst/app/auth/password"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/mail"
"github.com/SecurityBrewery/catalyst/app/settings"
)
func handleResetPasswordMail(queries *sqlc.Queries, mailer *mail.Mailer) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
type passwordResetData struct {
Email string `json:"email"`
}
b, err := json.Marshal(map[string]any{
"message": "Password reset email sent when the user exists",
})
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to create response: "+err.Error())
return
}
var data passwordResetData
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
errorJSON(w, http.StatusBadRequest, "Invalid request, missing email field")
return
}
user, err := queries.UserByEmail(r.Context(), &data.Email)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
// Do not reveal whether the user exists or not
w.WriteHeader(http.StatusOK)
_, _ = w.Write(b)
return
}
errorJSON(w, http.StatusInternalServerError, "Failed to get user: "+err.Error())
return
}
settings, err := settings.Load(r.Context(), queries)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to load settings: "+err.Error())
return
}
resetToken, err := createResetToken(&user, settings)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to create reset token: "+err.Error())
return
}
link := settings.Meta.AppURL + "/ui/password-reset?mail=" + data.Email + "&token=" + resetToken
subject := settings.Meta.ResetPasswordTemplate.Subject
subject = strings.ReplaceAll(subject, "{APP_NAME}", settings.Meta.AppName)
plainTextBody := `Hello,
Thank you for joining us at {APP_NAME}.
Click on the link below to verify your email address or copy the token into the app:
{ACTION_URL}
Thanks, {APP_NAME} team`
plainTextBody = strings.ReplaceAll(plainTextBody, "{ACTION_URL}", link)
plainTextBody = strings.ReplaceAll(plainTextBody, "{APP_NAME}", settings.Meta.AppName)
htmlBody := settings.Meta.ResetPasswordTemplate.Body
htmlBody = strings.ReplaceAll(htmlBody, "{ACTION_URL}", link)
htmlBody = strings.ReplaceAll(htmlBody, "{APP_NAME}", settings.Meta.AppName)
if err := mailer.Send(r.Context(), data.Email, subject, plainTextBody, htmlBody); err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to send password reset email: "+err.Error())
return
}
w.WriteHeader(http.StatusOK)
_, _ = w.Write(b)
}
}
func handlePassword(queries *sqlc.Queries) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
type passwordResetData struct {
Token string `json:"token"`
Email string `json:"email"`
Password string `json:"password"`
PasswordConfirm string `json:"password_confirm"`
}
var data passwordResetData
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
errorJSON(w, http.StatusBadRequest, "Invalid request, missing email or password fields")
return
}
if data.Password != data.PasswordConfirm {
errorJSON(w, http.StatusBadRequest, "Passwords do not match")
return
}
user, err := queries.UserByEmail(r.Context(), &data.Email)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
errorJSON(w, http.StatusBadRequest, "Invalid or expired reset token")
return
}
errorJSON(w, http.StatusInternalServerError, "Failed to get user: "+err.Error())
return
}
settings, err := settings.Load(r.Context(), queries)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to load settings: "+err.Error())
return
}
if err := verifyResetToken(data.Token, &user, settings.Meta.AppURL, settings.RecordPasswordResetToken.Secret); err != nil {
errorJSON(w, http.StatusBadRequest, "Invalid or expired reset token: "+err.Error())
return
}
passwordHash, tokenKey, err := password.Hash(data.Password)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to hash password: "+err.Error())
return
}
now := time.Now().UTC()
if _, err := queries.UpdateUser(r.Context(), sqlc.UpdateUserParams{
ID: user.ID,
PasswordHash: &passwordHash,
TokenKey: &tokenKey,
LastResetSentAt: &now,
}); err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to update password: "+err.Error())
return
}
b, err := json.Marshal(map[string]any{
"message": "Password reset successfully",
})
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to create response: "+err.Error())
return
}
w.WriteHeader(http.StatusOK)
_, _ = w.Write(b)
}
}

View File

@@ -0,0 +1,94 @@
package auth
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
func TestService_createResetToken(t *testing.T) {
t.Parallel()
type args struct {
createUser *sqlc.User
tokenDuration time.Duration
waitDuration time.Duration
verifyUser *sqlc.User
}
tests := []struct {
name string
args args
wantErr assert.ErrorAssertionFunc
}{
{
name: "valid token",
args: args{
createUser: &sqlc.User{ID: "testuser", Tokenkey: "testtoken"},
tokenDuration: time.Hour,
waitDuration: 0,
verifyUser: &sqlc.User{
ID: "testuser",
Tokenkey: "testtoken",
Updated: mustParse(t, "2006-01-02 15:04:05Z", "2025-06-02 19:18:06.292Z"),
},
},
wantErr: assert.NoError,
},
{
name: "expired token",
args: args{
createUser: &sqlc.User{ID: "testuser", Tokenkey: "testtoken"},
tokenDuration: 0,
waitDuration: time.Second,
verifyUser: &sqlc.User{
ID: "testuser",
Tokenkey: "testtoken",
Updated: mustParse(t, "2006-01-02 15:04:05Z", "2025-06-02 19:18:06.292Z"),
},
},
wantErr: assert.Error,
},
{
name: "invalid token",
args: args{
createUser: &sqlc.User{ID: "testuser", Tokenkey: "testtoken"},
tokenDuration: time.Hour,
waitDuration: 0,
verifyUser: &sqlc.User{
ID: "invaliduser",
Tokenkey: "invalidtoken",
Updated: mustParse(t, "2006-01-02 15:04:05Z", "2025-06-02 19:18:06.292Z"),
},
},
wantErr: assert.Error,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := createResetTokenWithDuration(tt.args.createUser, "", "", tt.args.tokenDuration)
require.NoError(t, err, "createResetToken()")
time.Sleep(tt.args.waitDuration)
err = verifyResetToken(got, tt.args.verifyUser, "", "")
tt.wantErr(t, err, "verifyResetToken()")
})
}
}
func mustParse(t *testing.T, layout, value string) time.Time {
t.Helper()
parsed, err := time.Parse(layout, value)
require.NoError(t, err, "mustParse()")
return parsed
}

58
app/auth/server.go Normal file
View File

@@ -0,0 +1,58 @@
package auth
import (
"encoding/json"
"net/http"
"strings"
"github.com/go-chi/chi/v5"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/mail"
)
func Server(queries *sqlc.Queries, mailer *mail.Mailer) http.Handler {
router := chi.NewRouter()
router.Get("/user", handleUser(queries))
router.Post("/local/login", handleLogin(queries))
router.Post("/local/reset-password-mail", handleResetPasswordMail(queries, mailer))
router.Post("/local/reset-password", handlePassword(queries))
return router
}
func handleUser(queries *sqlc.Queries) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
authorizationHeader := r.Header.Get("Authorization")
bearerToken := strings.TrimPrefix(authorizationHeader, bearerPrefix)
user, _, err := verifyAccessToken(r.Context(), bearerToken, queries)
if err != nil {
_, _ = w.Write([]byte("null"))
return
}
permissions, err := queries.ListUserPermissions(r.Context(), user.ID)
if err != nil {
errorJSON(w, http.StatusInternalServerError, err.Error())
return
}
b, err := json.Marshal(map[string]any{
"user": user,
"permissions": permissions,
})
if err != nil {
errorJSON(w, http.StatusInternalServerError, err.Error())
return
}
r.Header.Set("Content-Type", "application/json")
_, _ = w.Write(b)
}
}

99
app/auth/server_local.go Normal file
View File

@@ -0,0 +1,99 @@
package auth
import (
"context"
"encoding/json"
"errors"
"fmt"
"net/http"
"time"
"golang.org/x/crypto/bcrypt"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/settings"
)
var ErrUserInactive = errors.New("user is inactive")
func handleLogin(queries *sqlc.Queries) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
type loginData struct {
Email string `json:"email"`
Password string `json:"password"`
}
var data loginData
if err := json.NewDecoder(r.Body).Decode(&data); err != nil {
unauthorizedJSON(w, "Invalid request")
return
}
user, err := loginWithMail(r.Context(), data.Email, data.Password, queries)
if err != nil {
if errors.Is(err, ErrUserInactive) {
unauthorizedJSON(w, "User is inactive")
return
}
unauthorizedJSON(w, "Login failed")
return
}
permissions, err := queries.ListUserPermissions(r.Context(), user.ID)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to get user permissions")
return
}
settings, err := settings.Load(r.Context(), queries)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to load settings")
return
}
duration := time.Duration(settings.RecordAuthToken.Duration) * time.Second
token, err := CreateAccessToken(r.Context(), user, permissions, duration, queries)
if err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to create login token")
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
response := map[string]string{
"token": token,
}
if err := json.NewEncoder(w).Encode(response); err != nil {
errorJSON(w, http.StatusInternalServerError, "Failed to encode response")
return
}
}
}
func loginWithMail(ctx context.Context, mail, password string, queries *sqlc.Queries) (*sqlc.User, error) {
user, err := queries.UserByEmail(ctx, &mail)
if err != nil {
return nil, fmt.Errorf("failed to find user by email %q: %w", mail, err)
}
if !user.Active {
return nil, ErrUserInactive
}
if err := bcrypt.CompareHashAndPassword([]byte(user.Passwordhash), []byte(password)); err != nil {
return nil, fmt.Errorf("invalid credentials: %w", err)
}
return &user, nil
}

225
app/auth/token.go Normal file
View File

@@ -0,0 +1,225 @@
package auth
import (
"context"
"fmt"
"time"
"github.com/golang-jwt/jwt/v5"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/settings"
)
const (
purposeAccess = "access"
purposeReset = "reset"
scopeReset = "reset"
)
func CreateAccessToken(ctx context.Context, user *sqlc.User, permissions []string, duration time.Duration, queries *sqlc.Queries) (string, error) {
settings, err := settings.Load(ctx, queries)
if err != nil {
return "", fmt.Errorf("failed to load settings: %w", err)
}
return createToken(user, duration, purposeAccess, permissions, settings.Meta.AppURL, settings.RecordAuthToken.Secret)
}
func createResetToken(user *sqlc.User, settings *settings.Settings) (string, error) {
duration := time.Duration(settings.RecordPasswordResetToken.Duration) * time.Second
return createResetTokenWithDuration(user, settings.Meta.AppURL, settings.RecordPasswordResetToken.Secret, duration)
}
func createResetTokenWithDuration(user *sqlc.User, url, appToken string, duration time.Duration) (string, error) {
return createToken(user, duration, purposeReset, []string{scopeReset}, url, appToken)
}
func createToken(user *sqlc.User, duration time.Duration, purpose string, scopes []string, url, appToken string) (string, error) {
if scopes == nil {
scopes = []string{}
}
claims := jwt.MapClaims{
"sub": user.ID,
"exp": time.Now().Add(duration).Unix(),
"iat": time.Now().Unix(),
"iss": url,
"purpose": purpose,
"scopes": scopes,
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
signingKey := user.Tokenkey + appToken
return token.SignedString([]byte(signingKey))
}
func verifyToken(tokenStr string, user *sqlc.User, url, appToken string) (jwt.MapClaims, error) { //nolint:cyclop
signingKey := user.Tokenkey + appToken
token, err := jwt.Parse(tokenStr, func(t *jwt.Token) (any, error) {
if _, ok := t.Method.(*jwt.SigningMethodHMAC); !ok {
return nil, fmt.Errorf("unexpected algorithm: %v", t.Header["alg"])
}
return []byte(signingKey), nil
})
if err != nil {
return nil, fmt.Errorf("failed to verify token: %w", err)
}
if !token.Valid {
return nil, fmt.Errorf("token invalid")
}
claims, ok := token.Claims.(jwt.MapClaims)
if !ok {
return nil, fmt.Errorf("invalid token claims")
}
iss, err := claims.GetIssuer()
if err != nil {
return nil, fmt.Errorf("failed to get issuer: %w", err)
}
if iss != url {
return nil, fmt.Errorf("token issued by a different server")
}
sub, err := claims.GetSubject()
if err != nil {
return nil, fmt.Errorf("failed to get subject: %w", err)
}
if sub != user.ID {
return nil, fmt.Errorf("token belongs to a different user")
}
iat, err := claims.GetExpirationTime()
if err != nil {
return nil, fmt.Errorf("failed to get expiration time: %w", err)
}
if iat.Before(time.Now()) {
return nil, fmt.Errorf("token expired")
}
return claims, nil
}
func verifyAccessToken(ctx context.Context, bearerToken string, queries *sqlc.Queries) (*sqlc.User, jwt.MapClaims, error) {
token, _, err := jwt.NewParser().ParseUnverified(bearerToken, jwt.MapClaims{})
if err != nil {
return nil, nil, fmt.Errorf("failed to parse token: %w", err)
}
claims, ok := token.Claims.(jwt.MapClaims)
if !ok {
return nil, nil, fmt.Errorf("failed to parse token claims")
}
sub, err := claims.GetSubject()
if err != nil {
return nil, nil, fmt.Errorf("token invalid: %w", err)
}
user, err := queries.GetUser(ctx, sub)
if err != nil {
return nil, nil, fmt.Errorf("failed to retrieve user for subject %s: %w", sub, err)
}
settings, err := settings.Load(ctx, queries)
if err != nil {
return nil, nil, fmt.Errorf("failed to load settings: %w", err)
}
claims, err = verifyToken(bearerToken, &user, settings.Meta.AppURL, settings.RecordAuthToken.Secret)
if err != nil {
return nil, nil, fmt.Errorf("failed to verify token: %w", err)
}
if err := hasPurpose(claims, purposeAccess); err != nil {
return nil, nil, fmt.Errorf("failed to check scopes: %w", err)
}
return &user, claims, nil
}
func verifyResetToken(tokenStr string, user *sqlc.User, url, appToken string) error {
claims, err := verifyToken(tokenStr, user, url, appToken)
if err != nil {
return err
}
iat, err := claims.GetIssuedAt()
if err != nil {
return fmt.Errorf("failed to get issued at: %w", err)
}
lastUpdated := user.Updated // TODO: create a last reset at column
if iat.Before(lastUpdated) {
return fmt.Errorf("token already used")
}
if err := hasPurpose(claims, purposeReset); err != nil {
return fmt.Errorf("failed to check scopes: %w", err)
}
return nil
}
func hasPurpose(claim jwt.MapClaims, expectedPurpose string) error {
purpose, err := purpose(claim)
if err != nil {
return fmt.Errorf("failed to get purposes: %w", err)
}
if purpose != expectedPurpose {
return fmt.Errorf("token has wrong purpose: %s, expected: %s", purpose, expectedPurpose)
}
return nil
}
func purpose(claim jwt.MapClaims) (string, error) {
purposeClaim, ok := claim["purpose"]
if !ok {
return "", fmt.Errorf("no purpose found")
}
purpose, ok := purposeClaim.(string)
if !ok {
return "", fmt.Errorf("invalid purpose type")
}
return purpose, nil
}
func scopes(claim jwt.MapClaims) ([]string, error) {
scopesClaim, ok := claim["scopes"]
if !ok {
return nil, fmt.Errorf("no scopes found")
}
scopesSlice, ok := scopesClaim.([]any)
if !ok {
return nil, fmt.Errorf("invalid scopes claim type: %T", scopesClaim)
}
scopes := make([]string, 0, len(scopesSlice))
for _, scope := range scopesSlice {
scopeStr, ok := scope.(string)
if !ok {
return nil, fmt.Errorf("invalid scope claim element type: %T", scope)
}
scopes = append(scopes, scopeStr)
}
return scopes, nil
}

View File

@@ -0,0 +1,46 @@
package usercontext
import (
"context"
"net/http"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
type userKey struct{}
func UserRequest(r *http.Request, user *sqlc.User) *http.Request {
return r.WithContext(UserContext(r.Context(), user))
}
func UserContext(ctx context.Context, user *sqlc.User) context.Context {
return context.WithValue(ctx, userKey{}, user)
}
func UserFromContext(ctx context.Context) (*sqlc.User, bool) {
user, ok := ctx.Value(userKey{}).(*sqlc.User)
if !ok {
return nil, false
}
return user, true
}
type permissionKey struct{}
func PermissionRequest(r *http.Request, permissions []string) *http.Request {
return r.WithContext(PermissionContext(r.Context(), permissions))
}
func PermissionContext(ctx context.Context, permissions []string) context.Context {
return context.WithValue(ctx, permissionKey{}, permissions)
}
func PermissionFromContext(ctx context.Context) ([]string, bool) {
permissions, ok := ctx.Value(permissionKey{}).([]string)
if !ok {
return nil, false
}
return permissions, true
}

View File

@@ -0,0 +1,116 @@
package usercontext
import (
"net/http"
"reflect"
"testing"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
func TestPermissionContext(t *testing.T) {
t.Parallel()
tests := []struct {
name string
user *sqlc.User
permissions []string
wantPerms []string
wantOk bool
}{
{
name: "Set and get permissions",
permissions: []string{"ticket:read", "ticket:write"},
wantPerms: []string{"ticket:read", "ticket:write"},
wantOk: true,
},
{
name: "No permissions set",
wantPerms: nil,
wantOk: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
// Test context functions
ctx := PermissionContext(t.Context(), tt.permissions)
gotPerms, gotOk := PermissionFromContext(ctx)
if !reflect.DeepEqual(gotPerms, tt.wantPerms) {
t.Errorf("PermissionFromContext() got perms = %v, want %v", gotPerms, tt.wantPerms)
}
if gotOk != tt.wantOk {
t.Errorf("PermissionFromContext() got ok = %v, want %v", gotOk, tt.wantOk)
}
// Test request functions
req := &http.Request{}
req = PermissionRequest(req, tt.permissions)
gotPerms, gotOk = PermissionFromContext(req.Context())
if !reflect.DeepEqual(gotPerms, tt.wantPerms) {
t.Errorf("PermissionFromContext() got perms = %v, want %v", gotPerms, tt.wantPerms)
}
if gotOk != tt.wantOk {
t.Errorf("PermissionFromContext() got ok = %v, want %v", gotOk, tt.wantOk)
}
})
}
}
func TestUserContext(t *testing.T) {
t.Parallel()
tests := []struct {
name string
user *sqlc.User
wantOk bool
}{
{
name: "Set and get user",
user: &sqlc.User{ID: "test-user"},
wantOk: true,
},
{
name: "No user set",
user: nil,
wantOk: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
// Test context functions
ctx := UserContext(t.Context(), tt.user)
gotUser, gotOk := UserFromContext(ctx)
if !reflect.DeepEqual(gotUser, tt.user) {
t.Errorf("UserFromContext() got user = %v, want %v", gotUser, tt.user)
}
if gotOk != tt.wantOk {
t.Errorf("UserFromContext() got ok = %v, want %v", gotOk, tt.wantOk)
}
// Test request functions
req := &http.Request{}
req = UserRequest(req, tt.user)
gotUser, gotOk = UserFromContext(req.Context())
if !reflect.DeepEqual(gotUser, tt.user) {
t.Errorf("UserFromContext() got user = %v, want %v", gotUser, tt.user)
}
if gotOk != tt.wantOk {
t.Errorf("UserFromContext() got ok = %v, want %v", gotOk, tt.wantOk)
}
})
}
}

View File

@@ -1,4 +1,4 @@
package testing
package counter
import "sync"

View File

@@ -1,4 +1,4 @@
package testing
package counter
import (
"testing"

442
app/data/demo.go Normal file
View File

@@ -0,0 +1,442 @@
package data
import (
"context"
_ "embed"
"fmt"
"time"
"github.com/brianvoe/gofakeit/v7"
"github.com/SecurityBrewery/catalyst/app/auth"
"github.com/SecurityBrewery/catalyst/app/auth/password"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
const (
minimumUserCount = 1
minimumTicketCount = 1
)
var (
//go:embed scripts/createticket.py
createTicketPy string
//go:embed scripts/alertingest.py
alertIngestPy string
//go:embed scripts/assigntickets.py
assignTicketsPy string
)
func GenerateDemoData(ctx context.Context, queries *sqlc.Queries, userCount, ticketCount int) error {
if userCount < minimumUserCount {
userCount = minimumUserCount
}
if ticketCount < minimumTicketCount {
ticketCount = minimumTicketCount
}
types, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListTypesRow, error) {
return queries.ListTypes(ctx, sqlc.ListTypesParams{Limit: limit, Offset: offset})
})
if err != nil {
return fmt.Errorf("failed to list types: %w", err)
}
users, err := generateDemoUsers(ctx, queries, userCount, ticketCount)
if err != nil {
return fmt.Errorf("failed to create user records: %w", err)
}
if len(types) == 0 {
return fmt.Errorf("no types found")
}
if len(users) == 0 {
return fmt.Errorf("no users found")
}
if err := generateDemoTickets(ctx, queries, users, types, ticketCount); err != nil {
return fmt.Errorf("failed to create ticket records: %w", err)
}
if err := generateDemoReactions(ctx, queries, ticketCount); err != nil {
return fmt.Errorf("failed to create reaction records: %w", err)
}
if err := generateDemoGroups(ctx, queries, users, ticketCount); err != nil {
return fmt.Errorf("failed to create group records: %w", err)
}
return nil
}
func generateDemoUsers(ctx context.Context, queries *sqlc.Queries, count, ticketCount int) ([]sqlc.User, error) {
users := make([]sqlc.User, 0, count)
// create the test user
user, err := queries.GetUser(ctx, "u_test")
if err != nil {
newUser, err := createTestUser(ctx, queries)
if err != nil {
return nil, err
}
users = append(users, newUser)
} else {
users = append(users, user)
}
for range count - 1 {
newUser, err := createDemoUser(ctx, queries, ticketCount)
if err != nil {
return nil, err
}
users = append(users, newUser)
}
return users, nil
}
func createDemoUser(ctx context.Context, queries *sqlc.Queries, ticketCount int) (sqlc.User, error) {
username := gofakeit.Username()
passwordHash, tokenKey, err := password.Hash(gofakeit.Password(true, true, true, true, false, 16))
if err != nil {
return sqlc.User{}, fmt.Errorf("failed to hash password: %w", err)
}
created, updated := dates(ticketCount)
return queries.InsertUser(ctx, sqlc.InsertUserParams{
ID: database.GenerateID("u"),
Name: pointer.Pointer(gofakeit.Name()),
Email: pointer.Pointer(username + "@catalyst-soar.com"),
Username: username,
PasswordHash: passwordHash,
TokenKey: tokenKey,
Active: gofakeit.Bool(),
Created: created,
Updated: updated,
})
}
var ticketCreated = time.Date(2025, 2, 1, 11, 29, 35, 0, time.UTC)
func generateDemoTickets(ctx context.Context, queries *sqlc.Queries, users []sqlc.User, types []sqlc.ListTypesRow, count int) error { //nolint:cyclop
for range count {
newTicket, err := createDemoTicket(ctx, queries, random(types), random(users).ID, fakeTicketTitle(), fakeTicketDescription(), count)
if err != nil {
return fmt.Errorf("failed to create ticket: %w", err)
}
for range gofakeit.IntRange(1, 5) {
_, err := createDemoComment(ctx, queries, newTicket.ID, random(users).ID, fakeTicketComment(), count)
if err != nil {
return fmt.Errorf("failed to create comment for ticket %s: %w", newTicket.ID, err)
}
}
for range gofakeit.IntRange(1, 5) {
_, err := createDemoTimeline(ctx, queries, newTicket.ID, fakeTicketTimelineMessage(), count)
if err != nil {
return fmt.Errorf("failed to create timeline for ticket %s: %w", newTicket.ID, err)
}
}
for range gofakeit.IntRange(1, 5) {
_, err := createDemoTask(ctx, queries, newTicket.ID, random(users).ID, fakeTicketTask(), count)
if err != nil {
return fmt.Errorf("failed to create task for ticket %s: %w", newTicket.ID, err)
}
}
for range gofakeit.IntRange(1, 5) {
_, err := createDemoLink(ctx, queries, newTicket.ID, random([]string{"Blog", "Forum", "Wiki", "Documentation"}), gofakeit.URL(), count)
if err != nil {
return fmt.Errorf("failed to create link for ticket %s: %w", newTicket.ID, err)
}
}
}
return nil
}
func createDemoTicket(ctx context.Context, queries *sqlc.Queries, ticketType sqlc.ListTypesRow, userID, name, description string, ticketCount int) (sqlc.Ticket, error) {
created, updated := dates(ticketCount)
ticket, err := queries.InsertTicket(
ctx,
sqlc.InsertTicketParams{
ID: database.GenerateID(ticketType.Singular),
Name: name,
Description: description,
Open: gofakeit.Bool(),
Owner: &userID,
Schema: marshal(map[string]any{"type": "object", "properties": map[string]any{"tlp": map[string]any{"title": "TLP", "type": "string"}}}),
State: marshal(map[string]any{"severity": "Medium"}),
Type: ticketType.ID,
Created: created,
Updated: updated,
},
)
if err != nil {
return sqlc.Ticket{}, fmt.Errorf("failed to create ticket for user %s: %w", userID, err)
}
return ticket, nil
}
func createDemoComment(ctx context.Context, queries *sqlc.Queries, ticketID, userID, message string, ticketCount int) (*sqlc.Comment, error) {
created, updated := dates(ticketCount)
comment, err := queries.InsertComment(ctx, sqlc.InsertCommentParams{
ID: database.GenerateID("c"),
Ticket: ticketID,
Author: userID,
Message: message,
Created: created,
Updated: updated,
})
if err != nil {
return nil, fmt.Errorf("failed to create comment for ticket %s: %w", ticketID, err)
}
return &comment, nil
}
func createDemoTimeline(ctx context.Context, queries *sqlc.Queries, ticketID, message string, ticketCount int) (*sqlc.Timeline, error) {
created, updated := dates(ticketCount)
timeline, err := queries.InsertTimeline(ctx, sqlc.InsertTimelineParams{
ID: database.GenerateID("tl"),
Ticket: ticketID,
Message: message,
Time: ticketCreated,
Created: created,
Updated: updated,
})
if err != nil {
return nil, fmt.Errorf("failed to create timeline for ticket %s: %w", ticketID, err)
}
return &timeline, nil
}
func createDemoTask(ctx context.Context, queries *sqlc.Queries, ticketID, userID, name string, ticketCount int) (*sqlc.Task, error) {
created, updated := dates(ticketCount)
task, err := queries.InsertTask(ctx, sqlc.InsertTaskParams{
ID: database.GenerateID("t"),
Ticket: ticketID,
Owner: &userID,
Name: name,
Open: gofakeit.Bool(),
Created: created,
Updated: updated,
})
if err != nil {
return nil, fmt.Errorf("failed to create task for ticket %s: %w", ticketID, err)
}
return &task, nil
}
func createDemoLink(ctx context.Context, queries *sqlc.Queries, ticketID, name, url string, ticketCount int) (*sqlc.Link, error) {
created, updated := dates(ticketCount)
link, err := queries.InsertLink(ctx, sqlc.InsertLinkParams{
ID: database.GenerateID("l"),
Ticket: ticketID,
Name: name,
Url: url,
Created: created,
Updated: updated,
})
if err != nil {
return nil, fmt.Errorf("failed to create link for ticket %s: %w", ticketID, err)
}
return &link, nil
}
func generateDemoReactions(ctx context.Context, queries *sqlc.Queries, ticketCount int) error {
created, updated := dates(ticketCount)
_, err := queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-schedule",
Name: "Create New Ticket",
Trigger: "schedule",
Triggerdata: marshal(map[string]any{"expression": "12 * * * *"}),
Action: "python",
Actiondata: marshal(map[string]any{
"requirements": "requests",
"script": createTicketPy,
}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create reaction for schedule trigger: %w", err)
}
created, updated = dates(ticketCount)
_, err = queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-webhook",
Name: "Alert Ingest Webhook",
Trigger: "webhook",
Triggerdata: marshal(map[string]any{"token": "1234567890", "path": "webhook"}),
Action: "python",
Actiondata: marshal(map[string]any{
"requirements": "requests",
"script": alertIngestPy,
}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create reaction for webhook trigger: %w", err)
}
created, updated = dates(ticketCount)
_, err = queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-hook",
Name: "Assign new Tickets",
Trigger: "hook",
Triggerdata: marshal(map[string]any{"collections": []any{"tickets"}, "events": []any{"create"}}),
Action: "python",
Actiondata: marshal(map[string]any{
"requirements": "requests",
"script": assignTicketsPy,
}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create reaction for hook trigger: %w", err)
}
return nil
}
func generateDemoGroups(ctx context.Context, queries *sqlc.Queries, users []sqlc.User, ticketCount int) error { //nolint:cyclop
created, updated := dates(ticketCount)
_, err := queries.InsertGroup(ctx, sqlc.InsertGroupParams{
ID: "team-ir",
Name: "IR Team",
Permissions: auth.ToJSONArray(ctx, []string{}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create IR team group: %w", err)
}
created, updated = dates(ticketCount)
_, err = queries.InsertGroup(ctx, sqlc.InsertGroupParams{
ID: "team-seceng",
Name: "Security Engineering Team",
Permissions: auth.ToJSONArray(ctx, []string{}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create IR team group: %w", err)
}
created, updated = dates(ticketCount)
_, err = queries.InsertGroup(ctx, sqlc.InsertGroupParams{
ID: "team-security",
Name: "Security Team",
Permissions: auth.ToJSONArray(ctx, []string{}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create security team group: %w", err)
}
created, updated = dates(ticketCount)
_, err = queries.InsertGroup(ctx, sqlc.InsertGroupParams{
ID: "g-engineer",
Name: "Engineer",
Permissions: auth.ToJSONArray(ctx, []string{"reaction:read", "reaction:write"}),
Created: created,
Updated: updated,
})
if err != nil {
return fmt.Errorf("failed to create analyst group: %w", err)
}
for _, user := range users {
group := gofakeit.RandomString([]string{"team-seceng", "team-ir"})
if user.ID == "u_test" {
group = "admin"
}
if err := queries.AssignGroupToUser(ctx, sqlc.AssignGroupToUserParams{
UserID: user.ID,
GroupID: group,
}); err != nil {
return fmt.Errorf("failed to assign group %s to user %s: %w", group, user.ID, err)
}
}
err = queries.AssignParentGroup(ctx, sqlc.AssignParentGroupParams{
ParentGroupID: "team-ir",
ChildGroupID: "analyst",
})
if err != nil {
return fmt.Errorf("failed to assign parent group: %w", err)
}
err = queries.AssignParentGroup(ctx, sqlc.AssignParentGroupParams{
ParentGroupID: "team-seceng",
ChildGroupID: "g-engineer",
})
if err != nil {
return fmt.Errorf("failed to assign parent group: %w", err)
}
err = queries.AssignParentGroup(ctx, sqlc.AssignParentGroupParams{
ParentGroupID: "team-ir",
ChildGroupID: "team-security",
})
if err != nil {
return fmt.Errorf("failed to assign parent group: %w", err)
}
err = queries.AssignParentGroup(ctx, sqlc.AssignParentGroupParams{
ParentGroupID: "team-seceng",
ChildGroupID: "team-security",
})
if err != nil {
return fmt.Errorf("failed to assign parent group: %w", err)
}
return nil
}
func weeksAgo(c int) time.Time {
return time.Now().UTC().AddDate(0, 0, -7*c)
}
func dates(ticketCount int) (time.Time, time.Time) {
const ticketsPerWeek = 10
weeks := ticketCount / ticketsPerWeek
created := gofakeit.DateRange(weeksAgo(1), weeksAgo(weeks+1)).UTC()
updated := gofakeit.DateRange(created, time.Now()).UTC()
return created, updated
}

26
app/data/demo_test.go Normal file
View File

@@ -0,0 +1,26 @@
package data_test
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/data"
catalystTesting "github.com/SecurityBrewery/catalyst/testing"
)
func TestGenerate(t *testing.T) {
t.Parallel()
app, cleanup, _ := catalystTesting.App(t)
t.Cleanup(cleanup)
_ = app.Queries.DeleteUser(t.Context(), "u_admin")
_ = app.Queries.DeleteUser(t.Context(), "u_bob_analyst")
_ = app.Queries.DeleteGroup(t.Context(), "g_admin")
_ = app.Queries.DeleteGroup(t.Context(), "g_analyst")
err := data.GenerateDemoData(t.Context(), app.Queries, 4, 4)
require.NoError(t, err, "failed to generate fake data")
}

View File

@@ -0,0 +1,20 @@
import sys
import json
import random
import os
import requests
# Parse the event from the webhook payload
event = json.loads(sys.argv[1])
body = json.loads(event["body"])
url = os.environ["CATALYST_APP_URL"]
header = {"Authorization": "Bearer " + os.environ["CATALYST_TOKEN"]}
# Create a new ticket
requests.post(url + "/api/tickets", headers=header, json={
"name": body["name"],
"type": "alert",
"open": True,
})

View File

@@ -0,0 +1,21 @@
import sys
import json
import random
import os
import requests
# Parse the ticket from the input
ticket = json.loads(sys.argv[1])
url = os.environ["CATALYST_APP_URL"]
header = {"Authorization": "Bearer " + os.environ["CATALYST_TOKEN"]}
# Get a random user
users = requests.get(url + "/api/users", headers=header).json()
random_user = random.choice(users)
# Assign the ticket to the random user
requests.patch(url + "/api/tickets/" + ticket["record"]["id"], headers=header, json={
"owner": random_user["id"]
})

View File

@@ -0,0 +1,20 @@
import sys
import json
import random
import os
import requests
url = os.environ["CATALYST_APP_URL"]
header = {"Authorization": "Bearer " + os.environ["CATALYST_TOKEN"]}
newtickets = requests.get(url + "/api/tickets?limit=3", headers=header).json()
for ticket in newtickets:
requests.delete(url + "/api/tickets/" + ticket["id"], headers=header)
# Create a new ticket
requests.post(url + "/api/tickets", headers=header, json={
"name": "New Ticket",
"type": "alert",
"open": True,
})

View File

@@ -0,0 +1,21 @@
import sys
import json
import random
import os
from pocketbase import PocketBase
# Connect to the PocketBase server
client = PocketBase(os.environ["CATALYST_APP_URL"])
client.auth_store.save(token=os.environ["CATALYST_TOKEN"])
newtickets = client.collection("tickets").get_list(1, 200, {"filter": 'name = "New Ticket"'})
for ticket in newtickets.items:
client.collection("tickets").delete(ticket.id)
# Create a new ticket
client.collection("tickets").create({
"name": "New Ticket",
"type": "alert",
"open": True,
})

227
app/data/testdata.go Normal file
View File

@@ -0,0 +1,227 @@
package data
import (
"context"
"encoding/json"
"log/slog"
"os"
"path"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
const (
AdminEmail = "admin@catalyst-soar.com"
AnalystEmail = "analyst@catalyst-soar.com"
)
func DefaultTestData(t *testing.T, dir string, queries *sqlc.Queries) {
t.Helper()
parseTime := func(s string) time.Time {
t, _ := time.Parse(time.RFC3339Nano, s)
return t
}
ctx := t.Context()
// Insert users
_, err := queries.InsertUser(ctx, sqlc.InsertUserParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
Email: pointer.Pointer("analyst@catalyst-soar.com"),
Username: "u_bob_analyst",
Name: pointer.Pointer("Bob Analyst"),
PasswordHash: "$2a$10$ZEHNh9ZKJ81N717wovDnMuLwZOLa6.g22IRzRr4goG6zGN.57UzJG",
TokenKey: "z3Jj8bbzcq_cSZs07XKoGlB0UtvmQiphHgwNkE4akoY=",
Active: true,
ID: "u_bob_analyst",
})
require.NoError(t, err, "failed to insert analyst user")
_, err = queries.InsertUser(ctx, sqlc.InsertUserParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
Email: pointer.Pointer("admin@catalyst-soar.com"),
Username: "u_admin",
Name: pointer.Pointer("Admin User"),
PasswordHash: "$2a$10$Z3/0HHWau6oi1t1aRPiI0uiVOWI.IosTAYEL0DJ2XJaalP9kesgBa",
TokenKey: "5BWDKLIAn3SQkpQlBUGrS_XEbFf91DsDpuh_Xmt4Nwg=",
Active: true,
ID: "u_admin",
})
require.NoError(t, err, "failed to insert admin user")
// Insert webhooks
_, err = queries.InsertWebhook(ctx, sqlc.InsertWebhookParams{
ID: "w_test_webhook",
Name: "Test Webhook",
Collection: "tickets",
Destination: "https://example.com",
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert webhook")
// Insert types
_, err = queries.InsertType(ctx, sqlc.InsertTypeParams{
ID: "test-type",
Singular: "Test",
Plural: "Tests",
Schema: []byte(`{}`),
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert type")
// Insert tickets
_, err = queries.InsertTicket(ctx, sqlc.InsertTicketParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
Description: "This is a test ticket.",
ID: "test-ticket",
Name: "Test Ticket",
Open: true,
Owner: pointer.Pointer("u_bob_analyst"),
Schema: json.RawMessage(`{"type":"object","properties":{"tlp":{"title":"TLP","type":"string"}}}`),
State: json.RawMessage(`{"tlp":"AMBER"}`),
Type: "incident",
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert ticket")
// Insert tasks
_, err = queries.InsertTask(ctx, sqlc.InsertTaskParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
ID: "k_test_task",
Name: "Test Task",
Open: true,
Owner: pointer.Pointer("u_bob_analyst"),
Ticket: "test-ticket",
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert task")
// Insert comments
_, err = queries.InsertComment(ctx, sqlc.InsertCommentParams{
Author: "u_bob_analyst",
Created: parseTime("2025-06-21T22:21:26.271Z"),
ID: "c_test_comment",
Message: "Initial comment on the test ticket.",
Ticket: "test-ticket",
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert comment")
// Insert timeline
_, err = queries.InsertTimeline(ctx, sqlc.InsertTimelineParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
ID: "h_test_timeline",
Message: "Initial timeline entry.",
Ticket: "test-ticket",
Time: parseTime("2023-01-01T00:00:00Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert timeline entry")
// Insert links
_, err = queries.InsertLink(ctx, sqlc.InsertLinkParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
ID: "l_test_link",
Name: "Catalyst",
Ticket: "test-ticket",
Updated: parseTime("2025-06-21T22:21:26.271Z"),
Url: "https://example.com",
})
require.NoError(t, err, "failed to insert link")
// Insert files
_, err = queries.InsertFile(ctx, sqlc.InsertFileParams{
Created: parseTime("2025-06-21T22:21:26.271Z"),
ID: "b_test_file",
Name: "hello.txt",
Size: 5,
Ticket: "test-ticket",
Updated: parseTime("2025-06-21T22:21:26.271Z"),
Blob: "hello_a20DUE9c77rj.txt",
})
require.NoError(t, err, "failed to insert file")
// Insert features
_, err = queries.CreateFeature(ctx, "dev")
require.NoError(t, err, "failed to insert feature 'dev'")
// Insert reactions
_, err = queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-test-webhook",
Name: "Reaction",
Action: "python",
Actiondata: []byte(`{"requirements":"requests","script":"print('Hello, World!')"}`),
Trigger: "webhook",
Triggerdata: []byte(`{"token":"1234567890","path":"test"}`),
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert reaction")
_, err = queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-test-proxy",
Action: "webhook",
Name: "Reaction",
Actiondata: []byte(`{"headers":{"Content-Type":"application/json"},"url":"http://127.0.0.1:12345/webhook"}`),
Trigger: "webhook",
Triggerdata: []byte(`{"path":"test2"}`),
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert reaction")
_, err = queries.InsertReaction(ctx, sqlc.InsertReactionParams{
ID: "r-test-hook",
Name: "Hook",
Action: "python",
Actiondata: []byte(`{"requirements":"requests","script":"import requests\nrequests.post('http://127.0.0.1:12346/test', json={'test':True})"}`),
Trigger: "hook",
Triggerdata: json.RawMessage(`{"collections":["tickets"],"events":["create"]}`),
Created: parseTime("2025-06-21T22:21:26.271Z"),
Updated: parseTime("2025-06-21T22:21:26.271Z"),
})
require.NoError(t, err, "failed to insert reaction")
// Insert user_groups
err = queries.AssignGroupToUser(ctx, sqlc.AssignGroupToUserParams{
UserID: "u_bob_analyst",
GroupID: "analyst",
})
require.NoError(t, err, "failed to assign analyst group to user")
err = queries.AssignGroupToUser(ctx, sqlc.AssignGroupToUserParams{
UserID: "u_admin",
GroupID: "admin",
})
require.NoError(t, err, "failed to assign admin group to user")
files, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListFilesRow, error) {
return queries.ListFiles(ctx, sqlc.ListFilesParams{Limit: limit, Offset: offset})
})
require.NoError(t, err, "failed to list files")
for _, file := range files {
_ = os.MkdirAll(path.Join(dir, "uploads", file.ID), 0o755)
infoFilePath := path.Join(dir, "uploads", file.ID+".info")
slog.InfoContext(t.Context(), "Creating file info", "path", infoFilePath)
err = os.WriteFile(infoFilePath, []byte(`{"MetaData":{"filetype":"text/plain"}}`), 0o600)
require.NoError(t, err, "failed to write file info")
err = os.WriteFile(path.Join(dir, "uploads", file.ID, file.Blob), []byte("hello"), 0o600)
require.NoError(t, err, "failed to write file blob")
}
}

16
app/data/testdata.sql Normal file
View File

@@ -0,0 +1,16 @@
INSERT INTO users VALUES('2025-06-21 22:21:26.271Z','2025-06-21 22:21:26.271Z','analyst@catalyst-soar.com','u_bob_analyst','','','Bob Analyst','$2a$10$ZEHNh9ZKJ81N717wovDnMuLwZOLa6.g22IRzRr4goG6zGN.57UzJG','z3Jj8bbzcq_cSZs07XKoGlB0UtvmQiphHgwNkE4akoY=','2025-06-21 22:21:26.271Z','u_bob_analyst',1);
INSERT INTO users VALUES('2025-06-21 22:21:26.271Z','2025-06-21 22:21:26.271Z','admin@catalyst-soar.com','u_admin','','','Admin User','$2a$10$Z3/0HHWau6oi1t1aRPiI0uiVOWI.IosTAYEL0DJ2XJaalP9kesgBa','5BWDKLIAn3SQkpQlBUGrS_XEbFf91DsDpuh_Xmt4Nwg=','2025-06-21 22:21:26.271Z','u_admin',1);
INSERT INTO webhooks VALUES('tickets','2025-06-21 22:21:26.271Z','https://example.com','w_test_webhook','Test Webhook','2025-06-21 22:21:26.271Z');
INSERT INTO types VALUES('2025-06-21 22:21:26.271Z','Bug','test-type','Tests','{}','Test','2025-06-21 22:21:26.271Z');
INSERT INTO tickets VALUES('2025-06-21 22:21:26.271Z','This is a test ticket.','test-ticket','Test Ticket',1,'u_bob_analyst','','{"type":"object","properties":{"tlp":{"title":"TLP","type":"string"}}}','{"tlp":"AMBER"}','incident','2025-06-21 22:21:26.271Z');
INSERT INTO tasks VALUES('2025-06-21 22:21:26.271Z','k_test_task','Test Task',1,'u_bob_analyst','test-ticket','2025-06-21 22:21:26.271Z');
INSERT INTO comments VALUES('u_bob_analyst','2025-06-21 22:21:26.271Z','c_test_comment','Initial comment on the test ticket.','test-ticket','2025-06-21 22:21:26.271Z');
INSERT INTO timeline VALUES('2025-06-21 22:21:26.271Z','h_test_timeline','Initial timeline entry.','test-ticket','2023-01-01T00:00:00Z','2025-06-21 22:21:26.271Z');
INSERT INTO links VALUES('2025-06-21 22:21:26.271Z','l_test_link','Catalyst','test-ticket','2025-06-21 22:21:26.271Z','https://example.com');
INSERT INTO files VALUES('hello_a20DUE9c77rj.txt','2025-06-21 22:21:26.271Z','b_test_file','hello.txt',5,'test-ticket','2025-06-21 22:21:26.271Z');
INSERT INTO features VALUES('2025-06-21 22:21:26.271Z','rce91818107f46a','dev','2025-06-21 22:21:26.271Z');
INSERT INTO reactions VALUES('python','{"requirements":"requests","script":"print(''Hello, World!'')"}','','r-test-webhook','Reaction','webhook','{"token":"1234567890","path":"test"}','2025-06-21 22:21:26.271Z');
INSERT INTO reactions VALUES('webhook','{"headers":{"Content-Type":"application/json"},"url":"http://127.0.0.1:12345/webhook"}','','r-test-proxy','Reaction','webhook','{"path":"test2"}','2025-06-21 22:21:26.271Z');
INSERT INTO reactions VALUES('python','{"requirements":"requests","script":"import requests\nrequests.post(''http://127.0.0.1:12346/test'', json={''test'':True})"}','','r-test-hook','Hook','hook','{"collections":["tickets"],"events":["create"]}','2025-06-21 22:21:26.271Z');
INSERT INTO user_groups VALUES('u_bob_analyst','analyst');
INSERT INTO user_groups VALUES('u_admin','admin');

84
app/data/testdata_test.go Normal file
View File

@@ -0,0 +1,84 @@
package data
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
func TestDBInitialization(t *testing.T) {
t.Parallel()
queries := NewTestDB(t, t.TempDir())
user, err := queries.SystemUser(t.Context())
require.NoError(t, err)
assert.Equal(t, "system", user.ID)
types, err := queries.ListTypes(t.Context(), sqlc.ListTypesParams{Offset: 0, Limit: 10})
require.NoError(t, err)
assert.GreaterOrEqual(t, len(types), 1)
}
func TestNewTestDBDefaultData(t *testing.T) {
t.Parallel()
queries := NewTestDB(t, t.TempDir())
user, err := queries.UserByEmail(t.Context(), pointer.Pointer(AdminEmail))
require.NoError(t, err)
assert.Equal(t, AdminEmail, *user.Email)
ticket, err := queries.Ticket(t.Context(), "test-ticket")
require.NoError(t, err)
assert.Equal(t, "test-ticket", ticket.ID)
comment, err := queries.GetComment(t.Context(), "c_test_comment")
require.NoError(t, err)
assert.Equal(t, "c_test_comment", comment.ID)
timeline, err := queries.GetTimeline(t.Context(), "h_test_timeline")
require.NoError(t, err)
assert.Equal(t, "h_test_timeline", timeline.ID)
}
func TestReadWrite(t *testing.T) {
t.Parallel()
queries := NewTestDB(t, t.TempDir())
for range 3 {
y, err := queries.CreateType(t.Context(), sqlc.CreateTypeParams{
Singular: "Foo",
Plural: "Foos",
Icon: pointer.Pointer("Bug"),
Schema: json.RawMessage("{}"),
})
require.NoError(t, err)
_, err = queries.GetType(t.Context(), y.ID)
require.NoError(t, err)
err = queries.DeleteType(t.Context(), y.ID)
require.NoError(t, err)
}
}
func TestRead(t *testing.T) {
t.Parallel()
queries := NewTestDB(t, t.TempDir())
// read from a table
_, err := queries.GetUser(t.Context(), "u_bob_analyst")
require.NoError(t, err)
// read from a view
_, err = queries.GetSidebar(t.Context())
require.NoError(t, err)
}

27
app/data/testing.go Normal file
View File

@@ -0,0 +1,27 @@
package data
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/migration"
"github.com/SecurityBrewery/catalyst/app/upload"
)
func NewTestDB(t *testing.T, dir string) *sqlc.Queries {
t.Helper()
queries := database.TestDB(t, dir)
uploader, err := upload.New(dir)
require.NoError(t, err)
err = migration.Apply(t.Context(), queries, dir, uploader)
require.NoError(t, err)
DefaultTestData(t, dir, queries)
return queries
}

40
app/data/testing_test.go Normal file
View File

@@ -0,0 +1,40 @@
package data
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
func TestNewTestDB(t *testing.T) {
t.Parallel()
dir := t.TempDir()
queries := NewTestDB(t, dir)
user, err := queries.GetUser(t.Context(), "u_bob_analyst")
require.NoError(t, err)
assert.Equal(t, "u_bob_analyst", user.ID)
assert.Equal(t, "Bob Analyst", *user.Name)
assert.Equal(t, time.Date(2025, time.June, 21, 22, 21, 26, 271000000, time.UTC), user.Created)
alice, err := queries.InsertUser(t.Context(), sqlc.InsertUserParams{
ID: "u_alice_admin",
Name: pointer.Pointer("Alice Admin"),
Username: "alice_admin",
PasswordHash: "",
TokenKey: "",
Created: time.Date(2025, time.June, 21, 22, 21, 26, 0, time.UTC),
Updated: time.Date(2025, time.June, 21, 22, 21, 26, 0, time.UTC),
})
require.NoError(t, err)
assert.Equal(t, time.Date(2025, time.June, 21, 22, 21, 26, 0, time.UTC), alice.Created)
}

View File

@@ -1,7 +1,28 @@
package fakedata
package data
import "github.com/brianvoe/gofakeit/v7"
func fakeTicketTitle() string {
return random([]string{
"Unauthorized Access Attempt",
"Multiple Failed Login Attempts",
"Suspicious File Download",
"User Account Locked",
"Unusual Network Activity",
"Phishing Email Reported",
"Sensitive Data Transfer Detected",
"Malware Infection Found",
"Unauthorized Device Connected",
"Brute-Force Attack Attempt",
"Security Patch Required",
"External IP Address Probing Network",
"Suspicious Behavior Detected",
"Unauthorized Software Installation",
"Access Control System Malfunction",
"DDoS Attack Detected",
})
}
func fakeTicketDescription() string {
return random([]string{
"Unauthorized access attempt detected in the main server room.",

View File

@@ -1,4 +1,4 @@
package fakedata
package data
import (
"testing"

216
app/data/upgradetest.go Normal file
View File

@@ -0,0 +1,216 @@
package data
import (
"context"
_ "embed"
"encoding/json"
"fmt"
"time"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
//go:embed scripts/upgradetest.py
var Script string
func GenerateUpgradeTestData(ctx context.Context, queries *sqlc.Queries) error { //nolint:cyclop
if _, err := createTestUser(ctx, queries); err != nil {
return err
}
for _, ticket := range CreateUpgradeTestDataTickets() {
_, err := queries.InsertTicket(ctx, sqlc.InsertTicketParams{
ID: ticket.ID,
Name: ticket.Name,
Type: ticket.Type,
Description: ticket.Description,
Open: ticket.Open,
Schema: ticket.Schema,
State: ticket.State,
Owner: ticket.Owner,
Resolution: ticket.Resolution,
Created: ticket.Created,
Updated: ticket.Updated,
})
if err != nil {
return fmt.Errorf("failed to create ticket: %w", err)
}
}
for _, comment := range CreateUpgradeTestDataComments() {
_, err := queries.InsertComment(ctx, sqlc.InsertCommentParams{
ID: comment.ID,
Ticket: comment.Ticket,
Author: comment.Author,
Message: comment.Message,
Created: comment.Created,
Updated: comment.Updated,
})
if err != nil {
return fmt.Errorf("failed to create comment: %w", err)
}
}
for _, timeline := range CreateUpgradeTestDataTimeline() {
_, err := queries.InsertTimeline(ctx, sqlc.InsertTimelineParams{
ID: timeline.ID,
Ticket: timeline.Ticket,
Time: timeline.Time,
Message: timeline.Message,
Created: timeline.Created,
Updated: timeline.Updated,
})
if err != nil {
return fmt.Errorf("failed to create timeline: %w", err)
}
}
for _, task := range CreateUpgradeTestDataTasks() {
_, err := queries.InsertTask(ctx, sqlc.InsertTaskParams{
ID: task.ID,
Ticket: task.Ticket,
Name: task.Name,
Open: task.Open,
Owner: task.Owner,
Created: task.Created,
Updated: task.Updated,
})
if err != nil {
return fmt.Errorf("failed to create task: %w", err)
}
}
for _, link := range CreateUpgradeTestDataLinks() {
_, err := queries.InsertLink(ctx, sqlc.InsertLinkParams{
ID: link.ID,
Ticket: link.Ticket,
Url: link.Url,
Name: link.Name,
Created: link.Created,
Updated: link.Updated,
})
if err != nil {
return fmt.Errorf("failed to create link: %w", err)
}
}
for _, reaction := range CreateUpgradeTestDataReaction() {
_, err := queries.InsertReaction(ctx, sqlc.InsertReactionParams{ //nolint: staticcheck
ID: reaction.ID,
Name: reaction.Name,
Trigger: reaction.Trigger,
Triggerdata: reaction.Triggerdata,
Action: reaction.Action,
Actiondata: reaction.Actiondata,
Created: reaction.Created,
Updated: reaction.Updated,
})
if err != nil {
return fmt.Errorf("failed to create reaction: %w", err)
}
}
return nil
}
func CreateUpgradeTestDataTickets() map[string]sqlc.Ticket {
return map[string]sqlc.Ticket{
"t_0": {
ID: "t_0",
Created: ticketCreated,
Updated: ticketCreated.Add(time.Minute * 5),
Name: "phishing-123",
Type: "alert",
Description: "Phishing email reported by several employees.",
Open: true,
Schema: json.RawMessage(`{"type":"object","properties":{"tlp":{"title":"TLP","type":"string"}}}`),
State: json.RawMessage(`{"severity":"Medium"}`),
Owner: pointer.Pointer("u_test"),
},
}
}
func CreateUpgradeTestDataComments() map[string]sqlc.Comment {
return map[string]sqlc.Comment{
"c_0": {
ID: "c_0",
Created: ticketCreated.Add(time.Minute * 10),
Updated: ticketCreated.Add(time.Minute * 15),
Ticket: "t_0",
Author: "u_test",
Message: "This is a test comment.",
},
}
}
func CreateUpgradeTestDataTimeline() map[string]sqlc.Timeline {
return map[string]sqlc.Timeline{
"tl_0": {
ID: "tl_0",
Created: ticketCreated.Add(time.Minute * 15),
Updated: ticketCreated.Add(time.Minute * 20),
Ticket: "t_0",
Time: ticketCreated.Add(time.Minute * 15),
Message: "This is a test timeline message.",
},
}
}
func CreateUpgradeTestDataTasks() map[string]sqlc.Task {
return map[string]sqlc.Task{
"ts_0": {
ID: "ts_0",
Created: ticketCreated.Add(time.Minute * 20),
Updated: ticketCreated.Add(time.Minute * 25),
Ticket: "t_0",
Name: "This is a test task.",
Open: true,
Owner: pointer.Pointer("u_test"),
},
}
}
func CreateUpgradeTestDataLinks() map[string]sqlc.Link {
return map[string]sqlc.Link{
"l_0": {
ID: "l_0",
Created: ticketCreated.Add(time.Minute * 25),
Updated: ticketCreated.Add(time.Minute * 30),
Ticket: "t_0",
Url: "https://www.example.com",
Name: "This is a test link.",
},
}
}
func CreateUpgradeTestDataReaction() map[string]sqlc.Reaction {
var (
reactionCreated = time.Date(2025, 2, 1, 11, 30, 0, 0, time.UTC)
reactionUpdated = reactionCreated.Add(time.Minute * 5)
)
createTicketActionData := marshal(map[string]any{
"requirements": "pocketbase",
"script": Script,
})
return map[string]sqlc.Reaction{
"w_0": {
ID: "w_0",
Created: reactionCreated,
Updated: reactionUpdated,
Name: "Create New Ticket",
Trigger: "schedule",
Triggerdata: json.RawMessage(`{"expression":"12 * * * *"}`),
Action: "python",
Actiondata: createTicketActionData,
},
}
}
func marshal(m map[string]any) json.RawMessage {
b, _ := json.Marshal(m) //nolint:errchkjson
return b
}

30
app/data/user.go Normal file
View File

@@ -0,0 +1,30 @@
package data
import (
"context"
"fmt"
"time"
"github.com/SecurityBrewery/catalyst/app/auth/password"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/pointer"
)
func createTestUser(ctx context.Context, queries *sqlc.Queries) (sqlc.User, error) {
passwordHash, tokenKey, err := password.Hash("1234567890")
if err != nil {
return sqlc.User{}, fmt.Errorf("failed to hash password: %w", err)
}
return queries.InsertUser(ctx, sqlc.InsertUserParams{
ID: "u_test",
Username: "u_test",
Name: pointer.Pointer("Test User"),
Email: pointer.Pointer("user@catalyst-soar.com"),
Active: true,
PasswordHash: passwordHash,
TokenKey: tokenKey,
Created: time.Now(),
Updated: time.Now(),
})
}

112
app/database/db.go Normal file
View File

@@ -0,0 +1,112 @@
package database
import (
"context"
"crypto/rand"
"database/sql"
"fmt"
"log/slog"
"os"
"path/filepath"
"strings"
"testing"
"time"
_ "github.com/mattn/go-sqlite3" // import sqlite driver
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
const sqliteDriver = "sqlite3"
func DB(ctx context.Context, dir string) (*sqlc.Queries, func(), error) {
filename := filepath.Join(dir, "data.db")
slog.InfoContext(ctx, "Connecting to database", "path", filename)
// see https://briandouglas.ie/sqlite-defaults/ for more details
pragmas := []string{
// Enable WAL mode for better concurrency
"journal_mode=WAL",
// Enable synchronous mode for better data integrity
"synchronous=NORMAL",
// Set busy timeout to 5 seconds
"busy_timeout=5000",
// Set cache size to 20MB
"cache_size=-20000",
// Enable foreign key checks
"foreign_keys=ON",
// Enable incremental vacuuming
"auto_vacuum=INCREMENTAL",
// Set temp store to memory
"temp_store=MEMORY",
// Set mmap size to 2GB
"mmap_size=2147483648",
// Set page size to 8192
"page_size=8192",
}
_ = os.MkdirAll(filepath.Dir(filename), 0o755)
write, err := sql.Open(sqliteDriver, fmt.Sprintf("file:%s", filename))
if err != nil {
return nil, nil, fmt.Errorf("failed to open database: %w", err)
}
write.SetMaxOpenConns(1)
write.SetConnMaxIdleTime(time.Minute)
for _, pragma := range pragmas {
if _, err := write.ExecContext(ctx, fmt.Sprintf("PRAGMA %s", pragma)); err != nil {
return nil, nil, fmt.Errorf("failed to set pragma %s: %w", pragma, err)
}
}
read, err := sql.Open(sqliteDriver, fmt.Sprintf("file:%s?mode=ro", filename))
if err != nil {
return nil, nil, fmt.Errorf("failed to open database: %w", err)
}
read.SetMaxOpenConns(100)
read.SetConnMaxIdleTime(time.Minute)
queries := sqlc.New(read, write)
return queries, func() {
if err := read.Close(); err != nil {
slog.Error("failed to close read connection", "error", err)
}
if err := write.Close(); err != nil {
slog.Error("failed to close write connection", "error", err)
}
}, nil
}
func TestDB(t *testing.T, dir string) *sqlc.Queries {
queries, cleanup, err := DB(t.Context(), filepath.Join(dir, "data.db"))
require.NoError(t, err)
t.Cleanup(cleanup)
return queries
}
func GenerateID(prefix string) string {
return strings.ToLower(prefix) + randomstring(12)
}
const base32alphabet = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
func randomstring(l int) string {
rand.Text()
src := make([]byte, l)
_, _ = rand.Read(src)
for i := range src {
src[i] = base32alphabet[int(src[i])%len(base32alphabet)]
}
return string(src)
}

21
app/database/db_test.go Normal file
View File

@@ -0,0 +1,21 @@
package database_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
func TestDBForeignKeyConstraints(t *testing.T) {
t.Parallel()
queries := database.TestDB(t, t.TempDir())
assert.Error(t, queries.AssignGroupToUser(t.Context(), sqlc.AssignGroupToUserParams{
UserID: "does_not_exist",
GroupID: "also_missing",
}))
}

View File

@@ -0,0 +1,236 @@
CREATE TABLE IF NOT EXISTS _migrations
(
file VARCHAR(255) PRIMARY KEY NOT NULL,
applied INTEGER NOT NULL
);
CREATE TABLE IF NOT EXISTS _admins
(
id TEXT PRIMARY KEY NOT NULL,
avatar INTEGER DEFAULT 0 NOT NULL,
email TEXT UNIQUE NOT NULL,
tokenKey TEXT UNIQUE NOT NULL,
passwordHash TEXT NOT NULL,
lastResetSentAt TEXT DEFAULT "" NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS _collections
(
id TEXT PRIMARY KEY NOT NULL,
system BOOLEAN DEFAULT FALSE NOT NULL,
type TEXT DEFAULT "base" NOT NULL,
name TEXT UNIQUE NOT NULL,
schema JSON DEFAULT "[]" NOT NULL,
indexes JSON DEFAULT "[]" NOT NULL,
listRule TEXT DEFAULT NULL,
viewRule TEXT DEFAULT NULL,
createRule TEXT DEFAULT NULL,
updateRule TEXT DEFAULT NULL,
deleteRule TEXT DEFAULT NULL,
options JSON DEFAULT "{}" NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS _params
(
id TEXT PRIMARY KEY NOT NULL,
key TEXT UNIQUE NOT NULL,
value JSON DEFAULT NULL,
created TEXT DEFAULT "" NOT NULL,
updated TEXT DEFAULT "" NOT NULL
);
CREATE TABLE IF NOT EXISTS _externalAuths
(
id TEXT PRIMARY KEY NOT NULL,
collectionId TEXT NOT NULL,
recordId TEXT NOT NULL,
provider TEXT NOT NULL,
providerId TEXT NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
---
FOREIGN KEY (collectionId) REFERENCES _collections (id) ON UPDATE CASCADE ON DELETE CASCADE
);
CREATE UNIQUE INDEX IF NOT EXISTS _externalAuths_record_provider_idx on _externalAuths (collectionId, recordId, provider);
CREATE UNIQUE INDEX IF NOT EXISTS _externalAuths_collection_provider_idx on _externalAuths (collectionId, provider, providerId);
CREATE TABLE IF NOT EXISTS users
(
avatar TEXT DEFAULT '' NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
email TEXT DEFAULT '' NOT NULL,
emailVisibility BOOLEAN DEFAULT FALSE NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
lastLoginAlertSentAt TEXT DEFAULT '' NOT NULL,
lastResetSentAt TEXT DEFAULT '' NOT NULL,
lastVerificationSentAt TEXT DEFAULT '' NOT NULL,
name TEXT DEFAULT '' NOT NULL,
passwordHash TEXT NOT NULL,
tokenKey TEXT NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
username TEXT NOT NULL,
verified BOOLEAN DEFAULT FALSE NOT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS __pb_users_auth__username_idx ON users (username);
CREATE UNIQUE INDEX IF NOT EXISTS __pb_users_auth__email_idx ON users (email) WHERE email != '';
CREATE UNIQUE INDEX IF NOT EXISTS __pb_users_auth__tokenKey_idx ON users (tokenKey);
CREATE TABLE IF NOT EXISTS webhooks
(
collection TEXT DEFAULT '' NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
destination TEXT DEFAULT '' NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS types
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
icon TEXT DEFAULT '' NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
plural TEXT DEFAULT '' NOT NULL,
schema JSON DEFAULT NULL,
singular TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS tickets
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
description TEXT DEFAULT '' NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
open BOOLEAN DEFAULT FALSE NOT NULL,
owner TEXT DEFAULT '' NOT NULL,
resolution TEXT DEFAULT '' NOT NULL,
schema JSON DEFAULT NULL,
state JSON DEFAULT NULL,
type TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS tasks
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
open BOOLEAN DEFAULT FALSE NOT NULL,
owner TEXT DEFAULT '' NOT NULL,
ticket TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS comments
(
author TEXT DEFAULT '' NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
message TEXT DEFAULT '' NOT NULL,
ticket TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS timeline
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
message TEXT DEFAULT '' NOT NULL,
ticket TEXT DEFAULT '' NOT NULL,
time TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS links
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
ticket TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
url TEXT DEFAULT '' NOT NULL
);
CREATE TABLE IF NOT EXISTS files
(
blob TEXT DEFAULT '' NOT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
size NUMERIC DEFAULT 0 NOT NULL,
ticket TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE TABLE IF NOT EXISTS features
(
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE UNIQUE INDEX IF NOT EXISTS unique_name ON features (name);
CREATE VIEW IF NOT EXISTS sidebar AS
SELECT types.id as id,
types.singular as singular,
types.plural as plural,
types.icon as icon,
(SELECT COUNT(tickets.id) FROM tickets WHERE tickets.type = types.id AND tickets.open = true) as count
FROM types
ORDER BY types.plural;
CREATE TABLE IF NOT EXISTS reactions
(
action TEXT DEFAULT '' NOT NULL,
actiondata JSON DEFAULT NULL,
created TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL,
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT DEFAULT '' NOT NULL,
trigger TEXT DEFAULT '' NOT NULL,
triggerdata JSON DEFAULT NULL,
updated TEXT DEFAULT (strftime('%Y-%m-%d %H:%M:%fZ')) NOT NULL
);
CREATE VIEW IF NOT EXISTS ticket_search AS
SELECT tickets.id,
tickets.name,
tickets.created,
tickets.description,
tickets.open,
tickets.type,
tickets.state,
users.name as owner_name,
group_concat(comments.message
) as comment_messages,
group_concat(files.name
) as file_names,
group_concat(links.name
) as link_names,
group_concat(links.url
) as link_urls,
group_concat(tasks.name
) as task_names,
group_concat(timeline.message
) as timeline_messages
FROM tickets
LEFT JOIN comments ON comments.ticket = tickets.id
LEFT JOIN files ON files.ticket = tickets.id
LEFT JOIN links ON links.ticket = tickets.id
LEFT JOIN tasks ON tasks.ticket = tickets.id
LEFT JOIN timeline ON timeline.ticket = tickets.id
LEFT JOIN users ON users.id = tickets.owner
GROUP BY tickets.id;
CREATE VIEW IF NOT EXISTS dashboard_counts AS
SELECT id, count
FROM (SELECT 'users' as id,
COUNT(users.id
) as count
FROM users
UNION
SELECT 'tickets' as id,
COUNT(tickets.id
) as count
FROM tickets
UNION
SELECT 'tasks' as id,
COUNT(tasks.id
) as count
FROM tasks
UNION
SELECT 'reactions' as id,
COUNT(reactions.id
) as count
FROM reactions) as counts;

View File

@@ -0,0 +1,390 @@
DROP TABLE _migrations;
DROP TABLE _collections;
DROP TABLE _externalauths;
DROP VIEW sidebar;
DROP VIEW ticket_search;
DROP VIEW dashboard_counts;
--- _params
CREATE TABLE new_params
(
key TEXT PRIMARY KEY NOT NULL,
value JSON
);
INSERT INTO new_params
(key, value)
SELECT key, value
FROM _params;
DROP TABLE _params;
ALTER TABLE new_params
RENAME TO _params;
--- users
CREATE TABLE new_users
(
id TEXT PRIMARY KEY DEFAULT ('u' || lower(hex(randomblob(7)))) NOT NULL,
username TEXT NOT NULL,
passwordHash TEXT NOT NULL,
tokenKey TEXT NOT NULL,
active BOOLEAN NOT NULL,
name TEXT,
email TEXT,
avatar TEXT,
lastresetsentat DATETIME,
lastverificationsentat DATETIME,
admin BOOLEAN NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
);
INSERT INTO new_users
(avatar, email, id, lastresetsentat, lastverificationsentat, name, passwordHash, tokenKey, username, active, admin,
created,
updated)
SELECT avatar,
email,
id,
lastResetSentAt,
lastVerificationSentAt,
name,
passwordHash,
tokenKey,
username,
verified,
false,
created,
updated
FROM users;
INSERT INTO new_users
(avatar, email, id, lastresetsentat, lastverificationsentat, name, passwordHash, tokenKey, username, active, admin,
created,
updated)
SELECT avatar,
email,
id,
lastResetSentAt,
'',
email,
passwordHash,
tokenKey,
id,
true,
true,
created,
updated
FROM _admins;
DROP TABLE users;
DROP TABLE _admins;
ALTER TABLE new_users
RENAME TO users;
--- webhooks
CREATE TABLE new_webhooks
(
id TEXT PRIMARY KEY DEFAULT ('w' || lower(hex(randomblob(7)))) NOT NULL,
collection TEXT NOT NULL,
destination TEXT NOT NULL,
name TEXT NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
);
INSERT INTO new_webhooks
(collection, destination, id, name, created, updated)
SELECT collection, destination, id, name, datetime(created), datetime(updated)
FROM webhooks;
DROP TABLE webhooks;
ALTER TABLE new_webhooks
RENAME TO webhooks;
--- types
CREATE TABLE new_types
(
id TEXT PRIMARY KEY DEFAULT ('y' || lower(hex(randomblob(7)))) NOT NULL,
icon TEXT,
singular TEXT NOT NULL,
plural TEXT NOT NULL,
schema JSON,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
);
INSERT INTO new_types
(id, icon, singular, plural, schema, created, updated)
SELECT id, icon, singular, plural, schema, created, updated
FROM types;
DROP TABLE types;
ALTER TABLE new_types
RENAME TO types;
--- ticket
CREATE TABLE new_tickets
(
id TEXT PRIMARY KEY DEFAULT ('t' || lower(hex(randomblob(7)))) NOT NULL,
type TEXT NOT NULL,
owner TEXT,
name TEXT NOT NULL,
description TEXT NOT NULL,
open BOOLEAN NOT NULL,
resolution TEXT,
schema JSON,
state JSON,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (type) REFERENCES types (id) ON DELETE SET NULL,
FOREIGN KEY (owner) REFERENCES users (id) ON DELETE SET NULL
);
INSERT INTO new_tickets
(id, name, description, open, owner, resolution, schema, state, type, created, updated)
SELECT id,
name,
description,
open,
owner,
resolution,
schema,
state,
type,
created,
updated
FROM tickets;
DROP TABLE tickets;
ALTER TABLE new_tickets
RENAME TO tickets;
--- tasks
CREATE TABLE new_tasks
(
id TEXT PRIMARY KEY DEFAULT ('t' || lower(hex(randomblob(7)))) NOT NULL,
ticket TEXT NOT NULL,
owner TEXT,
name TEXT NOT NULL,
open BOOLEAN NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (ticket) REFERENCES tickets (id) ON DELETE CASCADE,
FOREIGN KEY (owner) REFERENCES users (id) ON DELETE SET NULL
);
INSERT INTO new_tasks
(id, ticket, owner, name, open, created, updated)
SELECT id, ticket, owner, name, open, created, updated
FROM tasks;
DROP TABLE tasks;
ALTER TABLE new_tasks
RENAME TO tasks;
--- comments
CREATE TABLE new_comments
(
id TEXT PRIMARY KEY DEFAULT ('c' || lower(hex(randomblob(7)))) NOT NULL,
ticket TEXT NOT NULL,
author TEXT NOT NULL,
message TEXT NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (ticket) REFERENCES tickets (id) ON DELETE CASCADE,
FOREIGN KEY (author) REFERENCES users (id) ON DELETE SET NULL
);
INSERT INTO new_comments
(id, ticket, author, message, created, updated)
SELECT id, ticket, author, message, created, updated
FROM comments;
DROP TABLE comments;
ALTER TABLE new_comments
RENAME TO comments;
--- timeline
CREATE TABLE new_timeline
(
id TEXT PRIMARY KEY DEFAULT ('h' || lower(hex(randomblob(7)))) NOT NULL,
ticket TEXT NOT NULL,
message TEXT NOT NULL,
time DATETIME NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (ticket) REFERENCES tickets (id) ON DELETE CASCADE
);
INSERT INTO new_timeline
(id, ticket, message, time, created, updated)
SELECT id, ticket, message, time, created, updated
FROM timeline;
DROP TABLE timeline;
ALTER TABLE new_timeline
RENAME TO timeline;
--- links
CREATE TABLE new_links
(
id TEXT PRIMARY KEY DEFAULT ('l' || lower(hex(randomblob(7)))) NOT NULL,
ticket TEXT NOT NULL,
name TEXT NOT NULL,
url TEXT NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (ticket) REFERENCES tickets (id) ON DELETE CASCADE
);
INSERT INTO new_links
(id, ticket, name, url, created, updated)
SELECT id, ticket, name, url, datetime(created), datetime(updated)
FROM links;
DROP TABLE links;
ALTER TABLE new_links
RENAME TO links;
--- files
CREATE TABLE new_files
(
id TEXT PRIMARY KEY DEFAULT ('b' || lower(hex(randomblob(7)))) NOT NULL,
ticket TEXT NOT NULL,
name TEXT NOT NULL,
blob TEXT NOT NULL,
size NUMERIC NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
FOREIGN KEY (ticket) REFERENCES tickets (id) ON DELETE CASCADE
);
INSERT INTO new_files
(id, name, blob, size, ticket, created, updated)
SELECT id, name, blob, size, ticket, created, updated
FROM files;
DROP TABLE files;
ALTER TABLE new_files
RENAME TO files;
--- features
CREATE TABLE new_features
(
key TEXT PRIMARY KEY NOT NULL
);
INSERT INTO new_features
(key)
SELECT name
FROM features;
DROP TABLE features;
ALTER TABLE new_features
RENAME TO features;
--- reactions
CREATE TABLE new_reactions
(
id TEXT PRIMARY KEY DEFAULT ('r' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT NOT NULL,
action TEXT NOT NULL,
actiondata JSON NOT NULL,
trigger TEXT NOT NULL,
triggerdata JSON NOT NULL,
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
);
INSERT INTO new_reactions
(id, name, action, actiondata, trigger, triggerdata, created, updated)
SELECT id,
name,
action,
actionData,
trigger,
triggerData,
created,
updated
FROM reactions;
DROP TABLE reactions;
ALTER TABLE new_reactions
RENAME TO reactions;
--- views
CREATE VIEW sidebar AS
SELECT types.id as id,
types.singular as singular,
types.plural as plural,
types.icon as icon,
(SELECT COUNT(tickets.id) FROM tickets WHERE tickets.type = types.id AND tickets.open = true) as count
FROM types
ORDER BY types.plural;
CREATE VIEW ticket_search AS
SELECT tickets.id,
tickets.name,
tickets.created,
tickets.description,
tickets.open,
tickets.type,
tickets.state,
users.name as owner_name,
group_concat(comments.message
) as comment_messages,
group_concat(files.name
) as file_names,
group_concat(links.name
) as link_names,
group_concat(links.url
) as link_urls,
group_concat(tasks.name
) as task_names,
group_concat(timeline.message
) as timeline_messages
FROM tickets
LEFT JOIN comments ON comments.ticket = tickets.id
LEFT JOIN files ON files.ticket = tickets.id
LEFT JOIN links ON links.ticket = tickets.id
LEFT JOIN tasks ON tasks.ticket = tickets.id
LEFT JOIN timeline ON timeline.ticket = tickets.id
LEFT JOIN users ON users.id = tickets.owner
GROUP BY tickets.id;
CREATE VIEW dashboard_counts AS
SELECT id, count
FROM (SELECT 'users' as id,
COUNT(users.id
) as count
FROM users
UNION
SELECT 'tickets' as id,
COUNT(tickets.id
) as count
FROM tickets
UNION
SELECT 'tasks' as id,
COUNT(tasks.id
) as count
FROM tasks
UNION
SELECT 'reactions' as id,
COUNT(reactions.id
) as count
FROM reactions) as counts;

View File

@@ -0,0 +1,5 @@
INSERT OR IGNORE INTO types (id, singular, plural, icon, schema) VALUES ('alert', 'Alert', 'Alerts', 'AlertTriangle', '{"type": "object", "properties": { "severity": { "title": "Severity", "enum": ["Low", "Medium", "High"]}}, "required": ["severity"]}');
INSERT OR IGNORE INTO types (id, singular, plural, icon, schema) VALUES ('incident', 'Incident', 'Incidents', 'Flame', '{"type": "object", "properties": { "severity": { "title": "Severity", "enum": ["Low", "Medium", "High"]}}, "required": ["severity"]}');
INSERT OR IGNORE INTO types (id, singular, plural, icon, schema) VALUES ('vulnerability', 'Vulnerability', 'Vulnerabilities', 'Bug', '{"type": "object", "properties": { "severity": { "title": "Severity", "enum": ["Low", "Medium", "High"]}}, "required": ["severity"]}');
INSERT OR IGNORE INTO users (id, name, username, passwordHash, tokenKey, active, admin) VALUES ('system', 'System', 'system', '', lower(hex(randomblob(26))), true, true);

View File

@@ -0,0 +1,82 @@
CREATE TABLE groups
(
id TEXT PRIMARY KEY DEFAULT ('g' || lower(hex(randomblob(7)))) NOT NULL,
name TEXT UNIQUE NOT NULL,
permissions TEXT NOT NULL, -- JSON array string like '["read:article","write:article"]'
created DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL,
updated DATETIME DEFAULT CURRENT_TIMESTAMP NOT NULL
);
CREATE TABLE user_groups
(
user_id TEXT NOT NULL,
group_id TEXT NOT NULL,
PRIMARY KEY (user_id, group_id),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE,
FOREIGN KEY (group_id) REFERENCES groups (id) ON DELETE CASCADE
);
CREATE TABLE group_inheritance
(
parent_group_id TEXT NOT NULL,
child_group_id TEXT NOT NULL,
PRIMARY KEY (parent_group_id, child_group_id),
FOREIGN KEY (parent_group_id) REFERENCES groups (id) ON DELETE CASCADE,
FOREIGN KEY (child_group_id) REFERENCES groups (id) ON DELETE CASCADE
);
CREATE VIEW group_effective_groups AS
WITH RECURSIVE all_groups(child_group_id, parent_group_id, group_type)
AS (SELECT rr.child_group_id, rr.parent_group_id, 'direct' AS group_type
FROM group_inheritance rr
UNION
SELECT ar.child_group_id, ri.parent_group_id, 'indirect' AS group_type
FROM all_groups ar
JOIN group_inheritance ri ON ri.child_group_id = ar.parent_group_id)
SELECT child_group_id, parent_group_id, group_type
FROM all_groups;
CREATE VIEW group_effective_permissions AS
SELECT re.parent_group_id, CAST(json_each.value AS TEXT) AS permission
FROM group_effective_groups re
JOIN groups r ON r.id = re.child_group_id, json_each(r.permissions);
CREATE VIEW user_effective_groups AS
WITH RECURSIVE all_groups(user_id, group_id, group_type) AS (
-- Direct groups
SELECT ur.user_id, ur.group_id, 'direct' AS group_type
FROM user_groups ur
UNION
-- Inherited groups
SELECT ar.user_id, ri.child_group_id, 'indirect' AS group_type
FROM all_groups ar
JOIN group_inheritance ri ON ri.parent_group_id = ar.group_id)
SELECT user_id,
group_id,
group_type
FROM all_groups;
CREATE VIEW user_effective_permissions AS
SELECT DISTINCT uer.user_id,
CAST(json_each.value AS TEXT) AS permission
FROM user_effective_groups uer
JOIN groups r ON r.id = uer.group_id, json_each(r.permissions);
INSERT INTO groups (id, name, permissions)
VALUES ('analyst', 'Analyst', '["type:read", "file:read", "ticket:read", "ticket:write", "user:read", "group:read"]'),
('admin', 'Admin', '["admin"]');
INSERT INTO user_groups (user_id, group_id)
SELECT id, 'analyst'
FROM users
WHERE NOT admin;
INSERT INTO user_groups (user_id, group_id)
SELECT id, 'admin'
FROM users
WHERE admin;
ALTER TABLE users
DROP COLUMN admin;

View File

@@ -0,0 +1,6 @@
package migrations
import "embed"
//go:embed *.sql
var Migrations embed.FS

52
app/database/paginate.go Normal file
View File

@@ -0,0 +1,52 @@
package database
import (
"context"
"database/sql"
"errors"
)
func Paginate(ctx context.Context, f func(ctx context.Context, offset, limit int64) (nextPage bool, err error)) error {
const pageSize int64 = 100
for i := range int64(1000) {
nextPage, err := f(ctx, i*pageSize, pageSize)
if err != nil {
if errors.Is(err, sql.ErrNoRows) {
// No more features to process, exit the loop
return nil
}
return err
}
if !nextPage {
return nil
}
}
return errors.New("pagination limit reached, too many pages")
}
func PaginateItems[T any](ctx context.Context, f func(ctx context.Context, offset, limit int64) (items []T, err error)) ([]T, error) {
var allItems []T
if err := Paginate(ctx, func(ctx context.Context, offset, limit int64) (nextPage bool, err error) {
items, err := f(ctx, offset, limit)
if err != nil {
return false, err
}
if len(items) == 0 {
return false, nil
}
allItems = append(allItems, items...)
return true, nil
}); err != nil {
return nil, err
}
return allItems, nil
}

View File

@@ -0,0 +1,97 @@
package database
import (
"context"
"database/sql"
"errors"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestPaginate_AllPages(t *testing.T) {
t.Parallel()
calls := 0
err := Paginate(t.Context(), func(_ context.Context, _, _ int64) (bool, error) {
calls++
if calls < 3 {
return true, nil
}
return false, nil
})
require.NoError(t, err, "expected no error")
assert.Equal(t, 3, calls, "expected 3 calls")
}
func TestPaginate_EarlyStop(t *testing.T) {
t.Parallel()
calls := 0
err := Paginate(t.Context(), func(_ context.Context, _, _ int64) (bool, error) {
calls++
return false, nil
})
require.NoError(t, err, "expected no error")
assert.Equal(t, 1, calls, "expected 1 call")
}
func TestPaginate_Error(t *testing.T) {
t.Parallel()
errTest := errors.New("fail")
err := Paginate(t.Context(), func(_ context.Context, _, _ int64) (bool, error) {
return false, errTest
})
assert.ErrorIs(t, err, errTest, "expected error")
}
func TestPaginate_NoRows(t *testing.T) {
t.Parallel()
err := Paginate(t.Context(), func(_ context.Context, _, _ int64) (bool, error) {
return false, sql.ErrNoRows
})
require.NoError(t, err, "expected no error")
}
func TestPaginateItems(t *testing.T) {
t.Parallel()
calls := 0
f := func(_ context.Context, offset, _ int64) ([]int, error) {
calls++
if offset >= 100 {
return nil, sql.ErrNoRows
}
return []int{1}, nil
}
items, err := PaginateItems(t.Context(), f)
require.NoError(t, err, "expected no error")
assert.Equal(t, []int{1}, items, "expected items to match")
assert.Equal(t, 2, calls, "expected 2 calls")
}
func TestPaginateItemsLarge(t *testing.T) {
t.Parallel()
calls := 0
f := func(_ context.Context, offset, _ int64) ([]int, error) {
calls++
if offset >= 200 {
return nil, sql.ErrNoRows
}
return []int{1}, nil
}
items, err := PaginateItems(t.Context(), f)
require.NoError(t, err, "expected no error")
assert.Equal(t, []int{1, 1}, items, "expected items to match")
assert.Equal(t, 3, calls, "expected 3 calls")
}

285
app/database/read.sql Normal file
View File

@@ -0,0 +1,285 @@
-- name: Param :one
SELECT *
FROM _params
WHERE _params.key = @key;
-------------------------------------------------------------------
-- name: Ticket :one
SELECT tickets.*, users.name as owner_name, types.singular as type_singular, types.plural as type_plural
FROM tickets
LEFT JOIN users ON users.id = tickets.owner
LEFT JOIN types ON types.id = tickets.type
WHERE tickets.id = @id;
-- name: ListTickets :many
SELECT tickets.*,
users.name as owner_name,
types.singular as type_singular,
types.plural as type_plural,
COUNT(*) OVER () as total_count
FROM tickets
LEFT JOIN users ON users.id = tickets.owner
LEFT JOIN types ON types.id = tickets.type
ORDER BY tickets.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetComment :one
SELECT comments.*, users.name as author_name
FROM comments
LEFT JOIN users ON users.id = comments.author
WHERE comments.id = @id;
-- name: ListComments :many
SELECT comments.*, users.name as author_name, COUNT(*) OVER () as total_count
FROM comments
LEFT JOIN users ON users.id = comments.author
WHERE ticket = @ticket
OR @ticket = ''
ORDER BY comments.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetFeature :one
SELECT *
FROM features
WHERE key = @key;
-- name: ListFeatures :many
SELECT features.*, COUNT(*) OVER () as total_count
FROM features
ORDER BY features.key DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetFile :one
SELECT *
FROM files
WHERE id = @id;
-- name: ListFiles :many
SELECT files.*, COUNT(*) OVER () as total_count
FROM files
WHERE ticket = @ticket
OR @ticket = ''
ORDER BY files.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetLink :one
SELECT *
FROM links
WHERE id = @id;
-- name: ListLinks :many
SELECT links.*, COUNT(*) OVER () as total_count
FROM links
WHERE ticket = @ticket
OR @ticket = ''
ORDER BY links.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetReaction :one
SELECT *
FROM reactions
WHERE id = @id;
-- name: ListReactions :many
SELECT reactions.*, COUNT(*) OVER () as total_count
FROM reactions
ORDER BY reactions.created DESC
LIMIT @limit OFFSET @offset;
-- name: ListReactionsByTrigger :many
SELECT reactions.*, COUNT(*) OVER () as total_count
FROM reactions
WHERE trigger = @trigger
ORDER BY reactions.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetTask :one
SELECT tasks.*, users.name as owner_name, tickets.name as ticket_name, tickets.type as ticket_type
FROM tasks
LEFT JOIN users ON users.id = tasks.owner
LEFT JOIN tickets ON tickets.id = tasks.ticket
WHERE tasks.id = @id;
-- name: ListTasks :many
SELECT tasks.*,
users.name as owner_name,
tickets.name as ticket_name,
tickets.type as ticket_type,
COUNT(*) OVER () as total_count
FROM tasks
LEFT JOIN users ON users.id = tasks.owner
LEFT JOIN tickets ON tickets.id = tasks.ticket
WHERE ticket = @ticket
OR @ticket = ''
ORDER BY tasks.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetTimeline :one
SELECT *
FROM timeline
WHERE id = @id;
-- name: ListTimeline :many
SELECT timeline.*, COUNT(*) OVER () as total_count
FROM timeline
WHERE ticket = @ticket
OR @ticket = ''
ORDER BY timeline.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetType :one
SELECT *
FROM types
WHERE id = @id;
-- name: ListTypes :many
SELECT types.*, COUNT(*) OVER () as total_count
FROM types
ORDER BY created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetUser :one
SELECT *
FROM users
WHERE id = @id;
-- name: UserByUserName :one
SELECT *
FROM users
WHERE username = @username;
-- name: UserByEmail :one
SELECT *
FROM users
WHERE email = @email;
-- name: SystemUser :one
SELECT *
FROM users
WHERE id = 'system';
-- name: ListUsers :many
SELECT users.*, COUNT(*) OVER () as total_count
FROM users
WHERE id != 'system'
ORDER BY users.created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetWebhook :one
SELECT *
FROM webhooks
WHERE id = @id;
-- name: ListWebhooks :many
SELECT webhooks.*, COUNT(*) OVER () as total_count
FROM webhooks
ORDER BY created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetDashboardCounts :many
SELECT *
FROM dashboard_counts;
-- name: GetSidebar :many
SELECT *
FROM sidebar;
-- name: SearchTickets :many
SELECT id,
name,
created,
description,
open,
type,
state,
owner_name,
COUNT(*) OVER () as total_count
FROM ticket_search
WHERE (@query = '' OR (name LIKE '%' || @query || '%'
OR description LIKE '%' || @query || '%'
OR comment_messages LIKE '%' || @query || '%'
OR file_names LIKE '%' || @query || '%'
OR link_names LIKE '%' || @query || '%'
OR link_urls LIKE '%' || @query || '%'
OR task_names LIKE '%' || @query || '%'
OR timeline_messages LIKE '%' || @query || '%'))
AND (sqlc.narg('type') IS NULL OR type = sqlc.narg('type'))
AND (sqlc.narg('open') IS NULL OR open = sqlc.narg('open'))
ORDER BY created DESC
LIMIT @limit OFFSET @offset;
------------------------------------------------------------------
-- name: GetGroup :one
SELECT *
FROM groups
WHERE id = @id;
-- name: ListGroups :many
SELECT g.*, COUNT(*) OVER () as total_count
FROM groups AS g
ORDER BY g.created DESC
LIMIT @limit OFFSET @offset;
-- name: ListUserGroups :many
SELECT g.*, uer.group_type, COUNT(*) OVER () as total_count
FROM user_effective_groups uer
JOIN groups AS g ON g.id = uer.group_id
WHERE uer.user_id = @user_id
ORDER BY g.name DESC;
-- name: ListGroupUsers :many
SELECT users.*, uer.group_type
FROM user_effective_groups uer
JOIN users ON users.id = uer.user_id
WHERE uer.group_id = @group_id
ORDER BY users.name DESC;
-- name: ListUserPermissions :many
SELECT user_effective_permissions.permission
FROM user_effective_permissions
WHERE user_id = @user_id
ORDER BY permission;
-- name: ListParentGroups :many
SELECT g.*, group_effective_groups.group_type
FROM group_effective_groups
JOIN groups AS g ON g.id = group_effective_groups.child_group_id
WHERE parent_group_id = @group_id
ORDER BY group_effective_groups.group_type;
-- name: ListChildGroups :many
SELECT g.*, group_effective_groups.group_type
FROM group_effective_groups
JOIN groups AS g ON g.id = group_effective_groups.parent_group_id
WHERE child_group_id = @group_id
ORDER BY group_effective_groups.group_type;
-- name: ListParentPermissions :many
SELECT group_effective_permissions.permission
FROM group_effective_permissions
WHERE parent_group_id = @group_id
ORDER BY permission;

View File

@@ -0,0 +1,38 @@
package sqlc
import (
"context"
"database/sql"
)
type DBTX interface {
ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error)
PrepareContext(ctx context.Context, query string) (*sql.Stmt, error)
QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error)
QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row
}
type Queries struct {
*ReadQueries
*WriteQueries
ReadDB *sql.DB
WriteDB *sql.DB
}
type ReadQueries struct {
db DBTX
}
type WriteQueries struct {
db DBTX
}
func New(readDB, writeDB *sql.DB) *Queries {
return &Queries{
ReadQueries: &ReadQueries{db: readDB},
WriteQueries: &WriteQueries{db: writeDB},
ReadDB: readDB,
WriteDB: writeDB,
}
}

32
app/database/sqlc.yaml Normal file
View File

@@ -0,0 +1,32 @@
version: "2"
sql:
- engine: "sqlite"
queries: "read.sql"
schema: "migrations"
gen:
go:
package: "sqlc"
out: "sqlc"
emit_json_tags: true
emit_pointers_for_null_types: true
overrides:
- { "column": "*.schema", "go_type": { "type": "[]byte" } }
- { "column": "*.state", "go_type": { "type": "[]byte" } }
- { "column": "reactions.actiondata", "go_type": { "type": "[]byte" } }
- { "column": "reactions.triggerdata", "go_type": { "type": "[]byte" } }
- { "column": "_params.value", "go_type": { "type": "[]byte" } }
- engine: "sqlite"
queries: "write.sql"
schema: "migrations"
gen:
go:
package: "sqlc"
out: "sqlc"
emit_json_tags: true
emit_pointers_for_null_types: true
overrides:
- { "column": "*.schema", "go_type": { "type": "[]byte" } }
- { "column": "*.state", "go_type": { "type": "[]byte" } }
- { "column": "reactions.actiondata", "go_type": { "type": "[]byte" } }
- { "column": "reactions.triggerdata", "go_type": { "type": "[]byte" } }
- { "column": "_params.value", "go_type": { "type": "[]byte" } }

38
app/database/sqlc/db.go Normal file
View File

@@ -0,0 +1,38 @@
package sqlc
import (
"context"
"database/sql"
)
type DBTX interface {
ExecContext(ctx context.Context, query string, args ...interface{}) (sql.Result, error)
PrepareContext(ctx context.Context, query string) (*sql.Stmt, error)
QueryContext(ctx context.Context, query string, args ...interface{}) (*sql.Rows, error)
QueryRowContext(ctx context.Context, query string, args ...interface{}) *sql.Row
}
type Queries struct {
*ReadQueries
*WriteQueries
ReadDB *sql.DB
WriteDB *sql.DB
}
type ReadQueries struct {
db DBTX
}
type WriteQueries struct {
db DBTX
}
func New(readDB, writeDB *sql.DB) *Queries {
return &Queries{
ReadQueries: &ReadQueries{db: readDB},
WriteQueries: &WriteQueries{db: writeDB},
ReadDB: readDB,
WriteDB: writeDB,
}
}

194
app/database/sqlc/models.go Normal file
View File

@@ -0,0 +1,194 @@
// Code generated by sqlc. DO NOT EDIT.
// versions:
// sqlc v1.29.0
package sqlc
import (
"time"
)
type Comment struct {
ID string `json:"id"`
Ticket string `json:"ticket"`
Author string `json:"author"`
Message string `json:"message"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type DashboardCount struct {
ID string `json:"id"`
Count int64 `json:"count"`
}
type Feature struct {
Key string `json:"key"`
}
type File struct {
ID string `json:"id"`
Ticket string `json:"ticket"`
Name string `json:"name"`
Blob string `json:"blob"`
Size float64 `json:"size"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type Group struct {
ID string `json:"id"`
Name string `json:"name"`
Permissions string `json:"permissions"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type GroupEffectiveGroup struct {
ChildGroupID string `json:"child_group_id"`
ParentGroupID string `json:"parent_group_id"`
GroupType string `json:"group_type"`
}
type GroupEffectivePermission struct {
ParentGroupID string `json:"parent_group_id"`
Permission string `json:"permission"`
}
type GroupInheritance struct {
ParentGroupID string `json:"parent_group_id"`
ChildGroupID string `json:"child_group_id"`
}
type Link struct {
ID string `json:"id"`
Ticket string `json:"ticket"`
Name string `json:"name"`
Url string `json:"url"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type Param struct {
Key string `json:"key"`
Value []byte `json:"value"`
}
type Reaction struct {
ID string `json:"id"`
Name string `json:"name"`
Action string `json:"action"`
Actiondata []byte `json:"actiondata"`
Trigger string `json:"trigger"`
Triggerdata []byte `json:"triggerdata"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type Sidebar struct {
ID string `json:"id"`
Singular string `json:"singular"`
Plural string `json:"plural"`
Icon *string `json:"icon"`
Count int64 `json:"count"`
}
type Task struct {
ID string `json:"id"`
Ticket string `json:"ticket"`
Owner *string `json:"owner"`
Name string `json:"name"`
Open bool `json:"open"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type Ticket struct {
ID string `json:"id"`
Type string `json:"type"`
Owner *string `json:"owner"`
Name string `json:"name"`
Description string `json:"description"`
Open bool `json:"open"`
Resolution *string `json:"resolution"`
Schema []byte `json:"schema"`
State []byte `json:"state"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type TicketSearch struct {
ID string `json:"id"`
Name string `json:"name"`
Created time.Time `json:"created"`
Description string `json:"description"`
Open bool `json:"open"`
Type string `json:"type"`
State []byte `json:"state"`
OwnerName *string `json:"owner_name"`
CommentMessages string `json:"comment_messages"`
FileNames string `json:"file_names"`
LinkNames string `json:"link_names"`
LinkUrls string `json:"link_urls"`
TaskNames string `json:"task_names"`
TimelineMessages string `json:"timeline_messages"`
}
type Timeline struct {
ID string `json:"id"`
Ticket string `json:"ticket"`
Message string `json:"message"`
Time time.Time `json:"time"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type Type struct {
ID string `json:"id"`
Icon *string `json:"icon"`
Singular string `json:"singular"`
Plural string `json:"plural"`
Schema []byte `json:"schema"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type User struct {
ID string `json:"id"`
Username string `json:"username"`
Passwordhash string `json:"passwordhash"`
Tokenkey string `json:"tokenkey"`
Active bool `json:"active"`
Name *string `json:"name"`
Email *string `json:"email"`
Avatar *string `json:"avatar"`
Lastresetsentat *time.Time `json:"lastresetsentat"`
Lastverificationsentat *time.Time `json:"lastverificationsentat"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}
type UserEffectiveGroup struct {
UserID string `json:"user_id"`
GroupID string `json:"group_id"`
GroupType string `json:"group_type"`
}
type UserEffectivePermission struct {
UserID string `json:"user_id"`
Permission string `json:"permission"`
}
type UserGroup struct {
UserID string `json:"user_id"`
GroupID string `json:"group_id"`
}
type Webhook struct {
ID string `json:"id"`
Collection string `json:"collection"`
Destination string `json:"destination"`
Name string `json:"name"`
Created time.Time `json:"created"`
Updated time.Time `json:"updated"`
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

45
app/database/tables.go Normal file
View File

@@ -0,0 +1,45 @@
package database
type Table struct {
ID string `json:"id"`
Name string `json:"name"`
}
var (
TicketsTable = Table{ID: "tickets", Name: "Tickets"}
CommentsTable = Table{ID: "comments", Name: "Comments"}
LinksTable = Table{ID: "links", Name: "Links"}
TasksTable = Table{ID: "tasks", Name: "Tasks"}
TimelinesTable = Table{ID: "timeline", Name: "Timeline"}
FilesTable = Table{ID: "files", Name: "Files"}
TypesTable = Table{ID: "types", Name: "Types"}
UsersTable = Table{ID: "users", Name: "Users"}
GroupsTable = Table{ID: "groups", Name: "Groups"}
ReactionsTable = Table{ID: "reactions", Name: "Reactions"}
WebhooksTable = Table{ID: "webhooks", Name: "Webhooks"}
DashboardCountsTable = Table{ID: "dashboard_counts", Name: "Dashboard Counts"}
SidebarTable = Table{ID: "sidebar", Name: "Sidebar"}
UserPermissionTable = Table{ID: "user_permissions", Name: "User Permissions"}
UserGroupTable = Table{ID: "user_groups", Name: "User Groups"}
GroupUserTable = Table{ID: "group_users", Name: "Group Users"}
GroupPermissionTable = Table{ID: "group_permissions", Name: "Group Permissions"}
GroupParentTable = Table{ID: "group_parents", Name: "Group Parents"}
GroupChildTable = Table{ID: "group_children", Name: "Group Children"}
CreateAction = "create"
UpdateAction = "update"
DeleteAction = "delete"
)
func Tables() []Table {
return []Table{
TicketsTable,
FilesTable,
TypesTable,
UsersTable,
GroupsTable,
ReactionsTable,
WebhooksTable,
}
}

328
app/database/write.sql Normal file
View File

@@ -0,0 +1,328 @@
-- name: CreateParam :exec
INSERT INTO _params (key, value)
VALUES (@key, @value)
RETURNING *;
-- name: UpdateParam :exec
UPDATE _params
SET value = @value
WHERE key = @key
RETURNING *;
------------------------------------------------------------------
-- name: InsertTicket :one
INSERT INTO tickets (id, name, description, open, owner, resolution, schema, state, type, created, updated)
VALUES (@id, @name, @description, @open, @owner, @resolution, @schema, @state, @type, @created, @updated)
RETURNING *;
-- name: CreateTicket :one
INSERT INTO tickets (name, description, open, owner, resolution, schema, state, type)
VALUES (@name, @description, @open, @owner, @resolution, @schema, @state, @type)
RETURNING *;
-- name: UpdateTicket :one
UPDATE tickets
SET name = coalesce(sqlc.narg('name'), name),
description = coalesce(sqlc.narg('description'), description),
open = coalesce(sqlc.narg('open'), open),
owner = coalesce(sqlc.narg('owner'), owner),
resolution = coalesce(sqlc.narg('resolution'), resolution),
schema = coalesce(sqlc.narg('schema'), schema),
state = coalesce(sqlc.narg('state'), state),
type = coalesce(sqlc.narg('type'), type)
WHERE id = @id
RETURNING *;
-- name: DeleteTicket :exec
DELETE
FROM tickets
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertComment :one
INSERT INTO comments (id, author, message, ticket, created, updated)
VALUES (@id, @author, @message, @ticket, @created, @updated)
RETURNING *;
-- name: CreateComment :one
INSERT INTO comments (author, message, ticket)
VALUES (@author, @message, @ticket)
RETURNING *;
-- name: UpdateComment :one
UPDATE comments
SET message = coalesce(sqlc.narg('message'), message)
WHERE id = @id
RETURNING *;
-- name: DeleteComment :exec
DELETE
FROM comments
WHERE id = @id;
------------------------------------------------------------------
-- name: CreateFeature :one
INSERT INTO features (key)
VALUES (@key)
RETURNING *;
-- name: DeleteFeature :exec
DELETE
FROM features
WHERE key = @key;
------------------------------------------------------------------
-- name: InsertFile :one
INSERT INTO files (id, name, blob, size, ticket, created, updated)
VALUES (@id, @name, @blob, @size, @ticket, @created, @updated)
RETURNING *;
-- name: CreateFile :one
INSERT INTO files (name, blob, size, ticket)
VALUES (@name, @blob, @size, @ticket)
RETURNING *;
-- name: UpdateFile :one
UPDATE files
SET name = coalesce(sqlc.narg('name'), name),
blob = coalesce(sqlc.narg('blob'), blob),
size = coalesce(sqlc.narg('size'), size)
WHERE id = @id
RETURNING *;
-- name: DeleteFile :exec
DELETE
FROM files
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertLink :one
INSERT INTO links (id, name, url, ticket, created, updated)
VALUES (@id, @name, @url, @ticket, @created, @updated)
RETURNING *;
-- name: CreateLink :one
INSERT INTO links (name, url, ticket)
VALUES (@name, @url, @ticket)
RETURNING *;
-- name: UpdateLink :one
UPDATE links
SET name = coalesce(sqlc.narg('name'), name),
url = coalesce(sqlc.narg('url'), url)
WHERE id = @id
RETURNING *;
-- name: DeleteLink :exec
DELETE
FROM links
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertReaction :one
INSERT INTO reactions (id, name, action, actiondata, trigger, triggerdata, created, updated)
VALUES (@id, @name, @action, @actiondata, @trigger, @triggerdata, @created, @updated)
RETURNING *;
-- name: CreateReaction :one
INSERT INTO reactions (name, action, actiondata, trigger, triggerdata)
VALUES (@name, @action, @actiondata, @trigger, @triggerdata)
RETURNING *;
-- name: UpdateReaction :one
UPDATE reactions
SET name = coalesce(sqlc.narg('name'), name),
action = coalesce(sqlc.narg('action'), action),
actiondata = coalesce(sqlc.narg('actiondata'), actiondata),
trigger = coalesce(sqlc.narg('trigger'), trigger),
triggerdata = coalesce(sqlc.narg('triggerdata'), triggerdata)
WHERE id = @id
RETURNING *;
-- name: DeleteReaction :exec
DELETE
FROM reactions
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertTask :one
INSERT INTO tasks (id, name, open, owner, ticket, created, updated)
VALUES (@id, @name, @open, @owner, @ticket, @created, @updated)
RETURNING *;
-- name: CreateTask :one
INSERT INTO tasks (name, open, owner, ticket)
VALUES (@name, @open, @owner, @ticket)
RETURNING *;
-- name: UpdateTask :one
UPDATE tasks
SET name = coalesce(sqlc.narg('name'), name),
open = coalesce(sqlc.narg('open'), open),
owner = coalesce(sqlc.narg('owner'), owner)
WHERE id = @id
RETURNING *;
-- name: DeleteTask :exec
DELETE
FROM tasks
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertTimeline :one
INSERT INTO timeline (id, message, ticket, time, created, updated)
VALUES (@id, @message, @ticket, @time, @created, @updated)
RETURNING *;
-- name: CreateTimeline :one
INSERT INTO timeline (message, ticket, time)
VALUES (@message, @ticket, @time)
RETURNING *;
-- name: UpdateTimeline :one
UPDATE timeline
SET message = coalesce(sqlc.narg('message'), message),
time = coalesce(sqlc.narg('time'), time)
WHERE id = @id
RETURNING *;
-- name: DeleteTimeline :exec
DELETE
FROM timeline
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertType :one
INSERT INTO types (id, singular, plural, icon, schema, created, updated)
VALUES (@id, @singular, @plural, @icon, @schema, @created, @updated)
RETURNING *;
-- name: CreateType :one
INSERT INTO types (singular, plural, icon, schema)
VALUES (@singular, @plural, @icon, @schema)
RETURNING *;
-- name: UpdateType :one
UPDATE types
SET singular = coalesce(sqlc.narg('singular'), singular),
plural = coalesce(sqlc.narg('plural'), plural),
icon = coalesce(sqlc.narg('icon'), icon),
schema = coalesce(sqlc.narg('schema'), schema)
WHERE id = @id
RETURNING *;
-- name: DeleteType :exec
DELETE
FROM types
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertUser :one
INSERT INTO users (id, name, email, username, passwordHash, tokenKey, avatar, active, created, updated)
VALUES (@id, @name, @email, @username, @passwordHash, @tokenKey, @avatar, @active, @created, @updated)
RETURNING *;
-- name: CreateUser :one
INSERT INTO users (name, email, username, passwordHash, tokenKey, avatar, active)
VALUES (@name, @email, @username, @passwordHash, @tokenKey, @avatar, @active)
RETURNING *;
-- name: UpdateUser :one
UPDATE users
SET name = coalesce(sqlc.narg('name'), name),
email = coalesce(sqlc.narg('email'), email),
username = coalesce(sqlc.narg('username'), username),
passwordHash = coalesce(sqlc.narg('passwordHash'), passwordHash),
tokenKey = coalesce(sqlc.narg('tokenKey'), tokenKey),
avatar = coalesce(sqlc.narg('avatar'), avatar),
active = coalesce(sqlc.narg('active'), active),
lastResetSentAt = coalesce(sqlc.narg('lastResetSentAt'), lastResetSentAt),
lastVerificationSentAt = coalesce(sqlc.narg('lastVerificationSentAt'), lastVerificationSentAt)
WHERE id = @id
AND id != 'system'
RETURNING *;
-- name: DeleteUser :exec
DELETE
FROM users
WHERE id = @id
AND id != 'system';
------------------------------------------------------------------
-- name: InsertWebhook :one
INSERT INTO webhooks (id, name, collection, destination, created, updated)
VALUES (@id, @name, @collection, @destination, @created, @updated)
RETURNING *;
-- name: CreateWebhook :one
INSERT INTO webhooks (name, collection, destination)
VALUES (@name, @collection, @destination)
RETURNING *;
-- name: UpdateWebhook :one
UPDATE webhooks
SET name = coalesce(sqlc.narg('name'), name),
collection = coalesce(sqlc.narg('collection'), collection),
destination = coalesce(sqlc.narg('destination'), destination)
WHERE id = @id
RETURNING *;
-- name: DeleteWebhook :exec
DELETE
FROM webhooks
WHERE id = @id;
------------------------------------------------------------------
-- name: InsertGroup :one
INSERT INTO groups (id, name, permissions, created, updated)
VALUES (@id, @name, @permissions, @created, @updated)
RETURNING *;
-- name: CreateGroup :one
INSERT INTO groups (name, permissions)
VALUES (@name, @permissions)
RETURNING *;
-- name: UpdateGroup :one
UPDATE groups
SET name = coalesce(sqlc.narg('name'), name),
permissions = coalesce(sqlc.narg('permissions'), permissions)
WHERE id = @id
RETURNING *;
-- name: DeleteGroup :exec
DELETE
FROM groups
WHERE id = @id;
-- name: AssignGroupToUser :exec
INSERT INTO user_groups (user_id, group_id)
VALUES (@user_id, @group_id);
-- name: RemoveGroupFromUser :exec
DELETE
FROM user_groups
WHERE user_id = @user_id
AND group_id = @group_id;
-- name: AssignParentGroup :exec
INSERT INTO group_inheritance (parent_group_id, child_group_id)
VALUES (@parent_group_id, @child_group_id);
-- name: RemoveParentGroup :exec
DELETE
FROM group_inheritance
WHERE parent_group_id = @parent_group_id
AND child_group_id = @child_group_id;

View File

@@ -1,27 +0,0 @@
package app
import (
"github.com/pocketbase/pocketbase/core"
"github.com/spf13/cobra"
"github.com/SecurityBrewery/catalyst/fakedata"
)
func fakeDataCmd(app core.App) *cobra.Command {
var userCount, ticketCount int
cmd := &cobra.Command{
Use: "fake-data",
Run: func(_ *cobra.Command, _ []string) {
if err := fakedata.Generate(app, userCount, ticketCount); err != nil {
app.Logger().Error(err.Error())
}
},
}
cmd.PersistentFlags().IntVar(&userCount, "users", 10, "Number of users to generate")
cmd.PersistentFlags().IntVar(&ticketCount, "tickets", 100, "Number of tickets to generate")
return cmd
}

View File

@@ -1,124 +0,0 @@
package app
import (
"slices"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/models"
"github.com/spf13/cobra"
"github.com/SecurityBrewery/catalyst/migrations"
)
func HasFlag(app core.App, flag string) bool {
records, err := app.Dao().FindRecordsByExpr(migrations.FeatureCollectionName, dbx.HashExp{"name": flag})
if err != nil {
app.Logger().Error(err.Error())
return false
}
for _, r := range records {
if r.GetString("name") == flag {
return true
}
}
return false
}
func Flags(app core.App) ([]string, error) {
records, err := app.Dao().FindRecordsByExpr(migrations.FeatureCollectionName)
if err != nil {
return nil, err
}
flags := make([]string, 0, len(records))
for _, r := range records {
flags = append(flags, r.GetString("name"))
}
return flags, nil
}
func SetFlags(app core.App, args []string) error {
featureCollection, err := app.Dao().FindCollectionByNameOrId(migrations.FeatureCollectionName)
if err != nil {
return err
}
featureRecords, err := app.Dao().FindRecordsByExpr(migrations.FeatureCollectionName)
if err != nil {
return err
}
var existingFlags []string //nolint:prealloc
for _, featureRecord := range featureRecords {
// remove feature flags that are not in the args
if !slices.Contains(args, featureRecord.GetString("name")) {
if err := app.Dao().DeleteRecord(featureRecord); err != nil {
return err
}
continue
}
existingFlags = append(existingFlags, featureRecord.GetString("name"))
}
for _, arg := range args {
if slices.Contains(existingFlags, arg) {
continue
}
// add feature flags that are not in the args
record := models.NewRecord(featureCollection)
record.Set("name", arg)
if err := app.Dao().SaveRecord(record); err != nil {
return err
}
}
return nil
}
func setFeatureFlagsCmd(app core.App) *cobra.Command {
return &cobra.Command{
Use: "set-feature-flags",
Run: func(_ *cobra.Command, args []string) {
if err := SetFlags(app, args); err != nil {
app.Logger().Error(err.Error())
}
},
}
}
func setAppURL(app core.App) *cobra.Command {
return &cobra.Command{
Use: "set-app-url",
Run: func(_ *cobra.Command, args []string) {
if len(args) != 1 {
app.Logger().Error("missing app url")
return
}
settings, err := app.Settings().Clone()
if err != nil {
app.Logger().Error(err.Error())
return
}
settings.Meta.AppUrl = args[0]
if err := app.Dao().SaveSettings(settings); err != nil {
app.Logger().Error(err.Error())
}
},
}
}

View File

@@ -1,69 +0,0 @@
package app_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app"
catalystTesting "github.com/SecurityBrewery/catalyst/testing"
)
func TestHasFlag(t *testing.T) {
t.Parallel()
catalystApp, _, cleanup := catalystTesting.App(t)
defer cleanup()
// stage 1
assert.False(t, app.HasFlag(catalystApp, "test"))
// stage 2
require.NoError(t, app.SetFlags(catalystApp, []string{"test"}))
assert.True(t, app.HasFlag(catalystApp, "test"))
}
func Test_flags(t *testing.T) {
t.Parallel()
catalystApp, _, cleanup := catalystTesting.App(t)
defer cleanup()
got, err := app.Flags(catalystApp)
require.NoError(t, err)
want := []string{}
assert.ElementsMatch(t, want, got)
}
func Test_setFlags(t *testing.T) {
t.Parallel()
catalystApp, _, cleanup := catalystTesting.App(t)
defer cleanup()
// stage 1
require.NoError(t, app.SetFlags(catalystApp, []string{"test"}))
got, err := app.Flags(catalystApp)
require.NoError(t, err)
assert.ElementsMatch(t, []string{"test"}, got)
// stage 2
require.NoError(t, app.SetFlags(catalystApp, []string{"test2"}))
got, err = app.Flags(catalystApp)
require.NoError(t, err)
assert.ElementsMatch(t, []string{"test2"}, got)
// stage 3
require.NoError(t, app.SetFlags(catalystApp, []string{"test", "test2"}))
got, err = app.Flags(catalystApp)
require.NoError(t, err)
assert.ElementsMatch(t, []string{"test", "test2"}, got)
}

17
app/hook/hook.go Normal file
View File

@@ -0,0 +1,17 @@
package hook
import "context"
type Hook struct {
subscribers []func(ctx context.Context, table string, record any)
}
func (h *Hook) Publish(ctx context.Context, table string, record any) {
for _, subscriber := range h.subscribers {
subscriber(ctx, table, record)
}
}
func (h *Hook) Subscribe(fn func(ctx context.Context, table string, record any)) {
h.subscribers = append(h.subscribers, fn)
}

126
app/hook/hook_test.go Normal file
View File

@@ -0,0 +1,126 @@
package hook
import (
"context"
"testing"
)
func TestHook_Publish(t *testing.T) {
t.Parallel()
type fields struct {
subscribers []func(ctx context.Context, table string, record any)
}
type args struct {
table string
record any
}
var called bool
subscriber := func(_ context.Context, _ string, _ any) {
called = true
}
tests := []struct {
name string
fields fields
args args
want bool
}{
{
name: "publish with no subscribers",
fields: fields{
subscribers: nil,
},
args: args{
table: "test_table",
record: "test_record",
},
want: false,
},
{
name: "publish with one subscriber",
fields: fields{
subscribers: []func(ctx context.Context, table string, record any){subscriber},
},
args: args{
table: "test_table",
record: "test_record",
},
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
called = false
h := &Hook{
subscribers: tt.fields.subscribers,
}
h.Publish(t.Context(), tt.args.table, tt.args.record)
if called != tt.want {
t.Errorf("Hook.Publish() called = %v, want %v", called, tt.want)
}
})
}
}
func TestHook_Subscribe(t *testing.T) {
t.Parallel()
type fields struct {
subscribers []func(ctx context.Context, table string, record any)
}
type args struct {
fn func(ctx context.Context, table string, record any)
}
subscriber := func(_ context.Context, _ string, _ any) {}
tests := []struct {
name string
fields fields
args args
want int
}{
{
name: "subscribe to empty hook",
fields: fields{
subscribers: nil,
},
args: args{
fn: subscriber,
},
want: 1,
},
{
name: "subscribe to hook with existing subscriber",
fields: fields{
subscribers: []func(ctx context.Context, table string, record any){subscriber},
},
args: args{
fn: subscriber,
},
want: 2,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
h := &Hook{
subscribers: tt.fields.subscribers,
}
h.Subscribe(tt.args.fn)
if got := len(h.subscribers); got != tt.want {
t.Errorf("Hook.Subscribe() subscriber count = %v, want %v", got, tt.want)
}
})
}
}

25
app/hook/hooks.go Normal file
View File

@@ -0,0 +1,25 @@
package hook
type Hooks struct {
OnRecordsListRequest *Hook
OnRecordViewRequest *Hook
OnRecordBeforeCreateRequest *Hook
OnRecordAfterCreateRequest *Hook
OnRecordBeforeUpdateRequest *Hook
OnRecordAfterUpdateRequest *Hook
OnRecordBeforeDeleteRequest *Hook
OnRecordAfterDeleteRequest *Hook
}
func NewHooks() *Hooks {
return &Hooks{
OnRecordsListRequest: &Hook{},
OnRecordViewRequest: &Hook{},
OnRecordBeforeCreateRequest: &Hook{},
OnRecordAfterCreateRequest: &Hook{},
OnRecordBeforeUpdateRequest: &Hook{},
OnRecordAfterUpdateRequest: &Hook{},
OnRecordBeforeDeleteRequest: &Hook{},
OnRecordAfterDeleteRequest: &Hook{},
}
}

109
app/mail/mail.go Normal file
View File

@@ -0,0 +1,109 @@
package mail
import (
"cmp"
"context"
"fmt"
"log/slog"
"github.com/wneessen/go-mail"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/settings"
)
type Mailer struct {
queries *sqlc.Queries
}
func New(queries *sqlc.Queries) *Mailer {
return &Mailer{
queries: queries,
}
}
func (m *Mailer) Send(ctx context.Context, to, subject, plainTextBody, htmlBody string) error {
settings, err := settings.Load(ctx, m.queries)
if err != nil {
return fmt.Errorf("failed to load settings: %w", err)
}
if !settings.SMTP.Enabled {
return fmt.Errorf("SMTP is not enabled in settings")
}
if settings.SMTP.Host == "" || settings.SMTP.Username == "" || settings.SMTP.Password == "" {
return fmt.Errorf("SMTP settings are not configured properly: host, username, and password must be set")
}
client, err := mailClient(settings)
if err != nil {
return fmt.Errorf("failed to create mail client: %w", err)
}
message, err := createMessage(settings, to, subject, plainTextBody, htmlBody)
if err != nil {
return fmt.Errorf("failed to create mail message: %w", err)
}
if err := client.DialAndSend(message); err != nil {
return fmt.Errorf("failed to deliver mail: %w", err)
}
slog.InfoContext(ctx, "mail sent successfully", "to", to, "subject", subject)
return nil
}
func createMessage(settings *settings.Settings, to string, subject string, plainTextBody, htmlBody string) (*mail.Msg, error) {
message := mail.NewMsg()
if err := message.FromFormat(settings.Meta.SenderName, settings.Meta.SenderAddress); err != nil {
return nil, fmt.Errorf("failed to set FROM address: %w", err)
}
if err := message.To(to); err != nil {
return nil, fmt.Errorf("failed to set TO address: %w", err)
}
message.Subject(subject)
message.SetBodyString(mail.TypeTextPlain, plainTextBody)
if htmlBody != "" {
message.SetBodyString(mail.TypeTextHTML, htmlBody)
}
return message, nil
}
func mailClient(settings *settings.Settings) (*mail.Client, error) {
var authType mail.SMTPAuthType
if err := authType.UnmarshalString(cmp.Or(settings.SMTP.AuthMethod, "plain")); err != nil {
return nil, fmt.Errorf("failed to parse SMTP auth method: %w", err)
}
opts := []mail.Option{
mail.WithSMTPAuth(authType),
mail.WithUsername(settings.SMTP.Username),
mail.WithPassword(settings.SMTP.Password),
}
if settings.SMTP.Port != 0 {
opts = append(opts, mail.WithPort(settings.SMTP.Port))
}
if settings.SMTP.TLS {
opts = append(opts, mail.WithSSL())
}
if settings.SMTP.LocalName != "" {
opts = append(opts, mail.WithHELO(settings.SMTP.LocalName))
}
client, err := mail.NewClient(settings.SMTP.Host, opts...)
if err != nil {
return nil, fmt.Errorf("failed to create new mail delivery client: %w", err)
}
return client, nil
}

View File

@@ -1,72 +0,0 @@
package app
import (
"strings"
"github.com/pocketbase/dbx"
"github.com/pocketbase/pocketbase/core"
"github.com/pocketbase/pocketbase/migrations"
"github.com/pocketbase/pocketbase/migrations/logs"
"github.com/pocketbase/pocketbase/tools/migrate"
)
type migration struct {
db *dbx.DB
migrations migrate.MigrationsList
}
func MigrateDBs(app core.App) error {
for _, m := range []migration{
{db: app.DB(), migrations: migrations.AppMigrations},
{db: app.LogsDB(), migrations: logs.LogsMigrations},
} {
runner, err := migrate.NewRunner(m.db, m.migrations)
if err != nil {
return err
}
if _, err := runner.Up(); err != nil {
return err
}
}
return nil
}
func isIgnored(err error) bool {
// this fix ignores some errors that come from upstream migrations.
ignoreErrors := []string{
"1673167670_multi_match_migrate",
"1660821103_add_user_ip_column",
}
for _, ignore := range ignoreErrors {
if strings.Contains(err.Error(), ignore) {
return true
}
}
return false
}
func MigrateDBsDown(app core.App) error {
for _, m := range []migration{
{db: app.DB(), migrations: migrations.AppMigrations},
{db: app.LogsDB(), migrations: logs.LogsMigrations},
} {
runner, err := migrate.NewRunner(m.db, m.migrations)
if err != nil {
return err
}
if _, err := runner.Down(len(m.migrations.Items())); err != nil {
if isIgnored(err) {
continue
}
return err
}
}
return nil
}

View File

@@ -1,39 +0,0 @@
package app
import (
"errors"
"testing"
"github.com/stretchr/testify/assert"
)
func Test_isIgnored(t *testing.T) {
t.Parallel()
type args struct {
err error
}
tests := []struct {
name string
args args
want bool
}{
{
name: "error is ignored",
args: args{err: errors.New("1673167670_multi_match_migrate")},
want: true,
},
{
name: "error is not ignored",
args: args{err: errors.New("1673167670_multi_match")},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
assert.Equalf(t, tt.want, isIgnored(tt.args.err), "isIgnored(%v)", tt.args.err)
})
}
}

View File

@@ -1,23 +0,0 @@
package app_test
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app"
"github.com/SecurityBrewery/catalyst/migrations"
catalystTesting "github.com/SecurityBrewery/catalyst/testing"
)
func Test_MigrateDBsDown(t *testing.T) {
t.Parallel()
catalystApp, _, cleanup := catalystTesting.App(t)
defer cleanup()
_, err := catalystApp.Dao().FindCollectionByNameOrId(migrations.ReactionCollectionName)
require.NoError(t, err)
require.NoError(t, app.MigrateDBsDown(catalystApp))
}

View File

@@ -0,0 +1,56 @@
package migration
import (
"context"
"fmt"
"io/fs"
"os"
"path/filepath"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/upload"
)
type filesMigration struct{}
func newFilesMigration() func() (migration, error) {
return func() (migration, error) {
return filesMigration{}, nil
}
}
func (filesMigration) name() string { return "005_pocketbase_files_to_tusd" }
func (filesMigration) up(ctx context.Context, queries *sqlc.Queries, dir string, uploader *upload.Uploader) error {
oldUploadDir := filepath.Join(dir, "storage")
if _, err := os.Stat(oldUploadDir); os.IsNotExist(err) {
// If the old upload directory does not exist, we assume no migration is needed.
return nil
}
oldUploadRoot, err := os.OpenRoot(oldUploadDir)
if err != nil {
return fmt.Errorf("open old uploads root: %w", err)
}
files, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListFilesRow, error) {
return queries.ListFiles(ctx, sqlc.ListFilesParams{Limit: limit, Offset: offset})
})
if err != nil {
return fmt.Errorf("list files: %w", err)
}
for _, file := range files {
data, err := fs.ReadFile(oldUploadRoot.FS(), filepath.Join(file.ID, file.Blob))
if err != nil {
return fmt.Errorf("read file %s: %w", file.Blob, err)
}
if _, err := uploader.CreateFile(file.ID, file.Name, data); err != nil {
return err
}
}
return nil
}

View File

@@ -0,0 +1,49 @@
package migration
import (
"context"
"fmt"
"log/slog"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/upload"
)
type migration interface {
name() string
up(ctx context.Context, queries *sqlc.Queries, dir string, uploader *upload.Uploader) error
}
func Apply(ctx context.Context, queries *sqlc.Queries, dir string, uploader *upload.Uploader) error {
currentVersion, err := version(ctx, queries.WriteDB)
if err != nil {
return err
}
slog.InfoContext(ctx, "Current database version", "version", currentVersion)
migrations, err := migrations(currentVersion)
if err != nil {
return fmt.Errorf("failed to get migrations: %w", err)
}
if len(migrations) == 0 {
slog.InfoContext(ctx, "No migrations to apply")
return nil
}
for _, m := range migrations {
slog.InfoContext(ctx, "Applying migration", "name", m.name())
if err := m.up(ctx, queries, dir, uploader); err != nil {
return fmt.Errorf("migration %s failed: %w", m.name(), err)
}
}
if err := setVersion(ctx, queries.WriteDB, currentVersion+len(migrations)); err != nil {
return err
}
return nil
}

View File

@@ -0,0 +1,22 @@
package migration
import (
"testing"
_ "github.com/mattn/go-sqlite3"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/upload"
)
func TestApply(t *testing.T) {
t.Parallel()
dir := t.TempDir()
queries := database.TestDB(t, dir)
uploader, err := upload.New(dir)
require.NoError(t, err)
require.NoError(t, Apply(t.Context(), queries, dir, uploader))
}

View File

@@ -0,0 +1,34 @@
package migration
import "fmt"
var migrationGenerators = []func() (migration, error){
newSQLMigration("000_create_pocketbase_tables"),
newSQLMigration("001_create_tables"),
newFilesMigration(),
newSQLMigration("002_create_defaultdata"),
newSQLMigration("003_create_groups"),
}
func migrations(version int) ([]migration, error) {
var migrations []migration
if version < 0 || version > len(migrationGenerators) {
return nil, fmt.Errorf("invalid migration version: %d", version)
}
if version == len(migrationGenerators) {
return migrations, nil // No migrations to apply
}
for _, migrationFunc := range migrationGenerators[version:] {
migration, err := migrationFunc()
if err != nil {
return nil, fmt.Errorf("failed to create migration: %w", err)
}
migrations = append(migrations, migration)
}
return migrations, nil
}

View File

@@ -0,0 +1,32 @@
package migration
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestMigrations_Success(t *testing.T) {
t.Parallel()
migs, err := migrations(0)
require.NoError(t, err)
require.Len(t, migs, len(migrationGenerators))
}
func TestMigrations_VersionOffset(t *testing.T) {
t.Parallel()
migs, err := migrations(1)
require.NoError(t, err)
require.Len(t, migs, len(migrationGenerators)-1)
}
func TestMigrations_Error(t *testing.T) {
t.Parallel()
migs, err := migrations(999) // Invalid version
require.Error(t, err)
require.Nil(t, migs)
require.Contains(t, err.Error(), "invalid migration version: 999")
}

42
app/migration/sql.go Normal file
View File

@@ -0,0 +1,42 @@
package migration
import (
"context"
"fmt"
sqlmigrations "github.com/SecurityBrewery/catalyst/app/database/migrations"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/upload"
)
type sqlMigration struct {
sqlName string
upSQL string
}
func newSQLMigration(name string) func() (migration, error) {
return func() (migration, error) {
up, err := sqlmigrations.Migrations.ReadFile(name + ".up.sql")
if err != nil {
return nil, fmt.Errorf("failed to read up migration file for %s: %w", name, err)
}
return &sqlMigration{
sqlName: name,
upSQL: string(up),
}, nil
}
}
func (m sqlMigration) name() string {
return m.sqlName
}
func (m sqlMigration) up(ctx context.Context, queries *sqlc.Queries, _ string, _ *upload.Uploader) error {
_, err := queries.WriteDB.ExecContext(ctx, m.upSQL)
if err != nil {
return fmt.Errorf("migration %s up failed: %w", m.sqlName, err)
}
return nil
}

41
app/migration/sql_test.go Normal file
View File

@@ -0,0 +1,41 @@
package migration
import (
"testing"
_ "github.com/mattn/go-sqlite3"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/upload"
)
func TestSQLMigration_UpAndDown(t *testing.T) {
t.Parallel()
m := sqlMigration{
sqlName: "test_migration",
upSQL: "CREATE TABLE test_table (id INTEGER PRIMARY KEY, name TEXT);",
}
dir := t.TempDir()
queries := database.TestDB(t, dir)
uploader, err := upload.New(dir)
require.NoError(t, err)
// Test up
require.NoError(t, m.up(t.Context(), queries, dir, uploader))
// Table should exist
_, err = queries.WriteDB.ExecContext(t.Context(), "INSERT INTO test_table (name) VALUES ('foo')")
require.NoError(t, err)
}
func TestNewSQLMigration_FileNotFound(t *testing.T) {
t.Parallel()
f := newSQLMigration("does_not_exist")
_, err := f()
require.Error(t, err)
require.Contains(t, err.Error(), "failed to read up migration file")
}

27
app/migration/version.go Normal file
View File

@@ -0,0 +1,27 @@
package migration
import (
"context"
"database/sql"
"fmt"
)
func version(ctx context.Context, db *sql.DB) (int, error) {
// get the current version of the database
var currentVersion int
if err := db.QueryRowContext(ctx, "PRAGMA user_version").Scan(&currentVersion); err != nil {
return 0, fmt.Errorf("failed to get current database version: %w", err)
}
return currentVersion, nil
}
func setVersion(ctx context.Context, db *sql.DB, version int) error {
// Update the database version after successful migration
_, err := db.ExecContext(ctx, fmt.Sprintf("PRAGMA user_version = %d", version))
if err != nil {
return fmt.Errorf("failed to update database version: %w", err)
}
return nil
}

View File

@@ -0,0 +1,29 @@
package migration
import (
"database/sql"
"testing"
_ "github.com/mattn/go-sqlite3"
"github.com/stretchr/testify/require"
)
func TestVersionAndSetVersion(t *testing.T) {
t.Parallel()
db, err := sql.Open("sqlite3", ":memory:")
require.NoError(t, err, "failed to open in-memory db")
defer db.Close()
ver, err := version(t.Context(), db)
require.NoError(t, err, "failed to get version")
require.Equal(t, 0, ver, "expected version 0")
err = setVersion(t.Context(), db, 2)
require.NoError(t, err, "failed to set version")
ver, err = version(t.Context(), db)
require.NoError(t, err, "failed to get version after set")
require.Equal(t, 2, ver, "expected version 2")
}

8
app/openapi/config.yml Normal file
View File

@@ -0,0 +1,8 @@
package: openapi
generate:
chi-server: true
models: true
strict-server: true
output: app/openapi/gen.go
output-options:
skip-prune: true

7384
app/openapi/gen.go Normal file

File diff suppressed because it is too large Load Diff

15
app/pointer/pointer.go Normal file
View File

@@ -0,0 +1,15 @@
package pointer
func Pointer[T any](v T) *T {
return &v
}
func Dereference[T any](v *T) T {
if v == nil {
var zero T
return zero
}
return *v
}

View File

@@ -0,0 +1,34 @@
package pointer
import "testing"
func TestPointer(t *testing.T) {
t.Parallel()
v := 42
ptr := Pointer(v)
if ptr == nil {
t.Fatal("Pointer returned nil")
}
if *ptr != v {
t.Errorf("Pointer value = %v, want %v", *ptr, v)
}
}
func TestDereference(t *testing.T) {
t.Parallel()
v := 42
ptr := &v
if Dereference(ptr) != v {
t.Errorf("Dereference(ptr) = %v, want %v", Dereference(ptr), v)
}
var nilPtr *int
if Dereference(nilPtr) != 0 {
t.Errorf("Dereference(nil) = %v, want 0", Dereference(nilPtr))
}
}

View File

@@ -0,0 +1,72 @@
package action
import (
"context"
"encoding/json"
"fmt"
"time"
"github.com/SecurityBrewery/catalyst/app/auth"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/reaction/action/python"
"github.com/SecurityBrewery/catalyst/app/reaction/action/webhook"
)
func Run(ctx context.Context, url string, queries *sqlc.Queries, actionName string, actionData, payload json.RawMessage) ([]byte, error) {
action, err := decode(actionName, actionData)
if err != nil {
return nil, err
}
if a, ok := action.(authenticatedAction); ok {
token, err := systemToken(ctx, queries)
if err != nil {
return nil, fmt.Errorf("failed to get system token: %w", err)
}
a.SetEnv([]string{
"CATALYST_APP_URL=" + url,
"CATALYST_TOKEN=" + token,
})
}
return action.Run(ctx, payload)
}
type action interface {
Run(ctx context.Context, payload json.RawMessage) ([]byte, error)
}
type authenticatedAction interface {
SetEnv(env []string)
}
func decode(actionName string, actionData json.RawMessage) (action, error) {
switch actionName {
case "python":
var reaction python.Python
if err := json.Unmarshal(actionData, &reaction); err != nil {
return nil, err
}
return &reaction, nil
case "webhook":
var reaction webhook.Webhook
if err := json.Unmarshal(actionData, &reaction); err != nil {
return nil, err
}
return &reaction, nil
default:
return nil, fmt.Errorf("action %q not found", actionName)
}
}
func systemToken(ctx context.Context, queries *sqlc.Queries) (string, error) {
user, err := queries.SystemUser(ctx)
if err != nil {
return "", fmt.Errorf("failed to find system auth record: %w", err)
}
return auth.CreateAccessToken(ctx, &user, auth.All(), time.Hour, queries)
}

View File

@@ -2,6 +2,7 @@ package python
import (
"context"
"encoding/json"
"errors"
"fmt"
"os"
@@ -20,7 +21,7 @@ func (a *Python) SetEnv(env []string) {
a.env = env
}
func (a *Python) Run(ctx context.Context, payload string) ([]byte, error) {
func (a *Python) Run(ctx context.Context, payload json.RawMessage) ([]byte, error) {
tempDir, err := os.MkdirTemp("", "catalyst_action")
if err != nil {
return nil, err
@@ -48,7 +49,7 @@ func (a *Python) Run(ctx context.Context, payload string) ([]byte, error) {
return nil, fmt.Errorf("failed to run install requirements, %w: %s", err, string(b))
}
b, err = a.pythonRunScript(ctx, tempDir, payload)
b, err = a.pythonRunScript(ctx, tempDir, string(payload))
if err != nil {
var ee *exec.ExitError
if errors.As(err, &ee) {

View File

@@ -1,12 +1,12 @@
package python_test
import (
"context"
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/SecurityBrewery/catalyst/reaction/action/python"
"github.com/SecurityBrewery/catalyst/app/reaction/action/python"
)
func TestPython_Run(t *testing.T) {
@@ -89,13 +89,13 @@ func TestPython_Run(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
ctx := context.Background()
ctx := t.Context()
a := &python.Python{
Requirements: tt.fields.Requirements,
Script: tt.fields.Script,
}
got, err := a.Run(ctx, tt.args.payload)
got, err := a.Run(ctx, json.RawMessage(tt.args.payload))
tt.wantErr(t, err)
assert.Equal(t, tt.want, got)

View File

@@ -5,7 +5,7 @@ import (
"io"
"testing"
"github.com/SecurityBrewery/catalyst/reaction/action/webhook"
"github.com/SecurityBrewery/catalyst/app/reaction/action/webhook"
)
func TestEncodeBody(t *testing.T) {

View File

@@ -1,10 +1,10 @@
package webhook
import (
"bytes"
"context"
"encoding/json"
"net/http"
"strings"
)
type Webhook struct {
@@ -12,8 +12,8 @@ type Webhook struct {
URL string `json:"url"`
}
func (a *Webhook) Run(ctx context.Context, payload string) ([]byte, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, a.URL, strings.NewReader(payload))
func (a *Webhook) Run(ctx context.Context, payload json.RawMessage) ([]byte, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPost, a.URL, bytes.NewReader(payload))
if err != nil {
return nil, err
}

View File

@@ -1,7 +1,6 @@
package webhook_test
import (
"context"
"encoding/json"
"net/http"
"testing"
@@ -11,18 +10,18 @@ import (
"github.com/stretchr/testify/require"
"github.com/tidwall/sjson"
"github.com/SecurityBrewery/catalyst/reaction/action/webhook"
catalystTesting "github.com/SecurityBrewery/catalyst/testing"
"github.com/SecurityBrewery/catalyst/app/reaction/action/webhook"
testing2 "github.com/SecurityBrewery/catalyst/testing"
)
func TestWebhook_Run(t *testing.T) {
t.Parallel()
server := catalystTesting.NewRecordingServer()
server := testing2.NewRecordingServer()
go http.ListenAndServe("127.0.0.1:12347", server) //nolint:gosec,errcheck
if err := catalystTesting.WaitForStatus("http://127.0.0.1:12347/health", http.StatusOK, 5*time.Second); err != nil {
if err := testing2.WaitForStatus("http://127.0.0.1:12347/health", http.StatusOK, 5*time.Second); err != nil {
t.Fatal(err)
}
@@ -54,10 +53,10 @@ func TestWebhook_Run(t *testing.T) {
want: map[string]any{
"statusCode": 200,
"headers": map[string]any{
"Content-Length": []any{"14"},
"Content-Length": []any{"13"},
"Content-Type": []any{"application/json; charset=UTF-8"},
},
"body": "{\"test\":true}\n",
"body": "{\"test\":true}",
"isBase64Encoded": false,
},
wantErr: assert.NoError,
@@ -67,13 +66,11 @@ func TestWebhook_Run(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
ctx := context.Background()
a := &webhook.Webhook{
Headers: tt.fields.Headers,
URL: tt.fields.URL,
}
got, err := a.Run(ctx, tt.args.payload)
got, err := a.Run(t.Context(), json.RawMessage(tt.args.payload))
tt.wantErr(t, err)
want, err := json.Marshal(tt.want)

View File

@@ -0,0 +1,113 @@
package schedule
import (
"context"
"encoding/json"
"errors"
"fmt"
"log/slog"
"github.com/go-co-op/gocron/v2"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/reaction/action"
"github.com/SecurityBrewery/catalyst/app/settings"
)
type Scheduler struct {
scheduler gocron.Scheduler
queries *sqlc.Queries
}
type Schedule struct {
Expression string `json:"expression"`
}
func New(ctx context.Context, queries *sqlc.Queries) (*Scheduler, error) {
innerScheduler, err := gocron.NewScheduler()
if err != nil {
return nil, fmt.Errorf("failed to create scheduler: %w", err)
}
scheduler := &Scheduler{
scheduler: innerScheduler,
queries: queries,
}
if err := scheduler.loadJobs(ctx); err != nil {
return nil, fmt.Errorf("failed to load jobs: %w", err)
}
innerScheduler.Start()
return scheduler, nil
}
func (s *Scheduler) AddReaction(reaction *sqlc.Reaction) error {
var schedule Schedule
if err := json.Unmarshal(reaction.Triggerdata, &schedule); err != nil {
return fmt.Errorf("failed to unmarshal schedule data: %w", err)
}
_, err := s.scheduler.NewJob(
gocron.CronJob(schedule.Expression, false),
gocron.NewTask(
func(ctx context.Context) {
settings, err := settings.Load(ctx, s.queries)
if err != nil {
slog.ErrorContext(ctx, "Failed to load settings", "error", err)
return
}
_, err = action.Run(ctx, settings.Meta.AppURL, s.queries, reaction.Action, reaction.Actiondata, json.RawMessage("{}"))
if err != nil {
slog.ErrorContext(ctx, "Failed to run schedule reaction", "error", err, "reaction_id", reaction.ID)
}
},
),
gocron.WithTags(reaction.ID),
)
if err != nil {
return fmt.Errorf("failed to create new job for reaction %s: %w", reaction.ID, err)
}
return nil
}
func (s *Scheduler) RemoveReaction(id string) {
s.scheduler.RemoveByTags(id)
}
func (s *Scheduler) loadJobs(ctx context.Context) error {
reactions, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListReactionsByTriggerRow, error) {
return s.queries.ListReactionsByTrigger(ctx, sqlc.ListReactionsByTriggerParams{Trigger: "schedule", Limit: limit, Offset: offset})
})
if err != nil {
return fmt.Errorf("failed to find schedule reaction: %w", err)
}
if len(reactions) == 0 {
return nil
}
var errs []error
for _, reaction := range reactions {
if err := s.AddReaction(&sqlc.Reaction{
Action: reaction.Action,
Actiondata: reaction.Actiondata,
Created: reaction.Created,
ID: reaction.ID,
Name: reaction.Name,
Trigger: reaction.Trigger,
Triggerdata: reaction.Triggerdata,
Updated: reaction.Updated,
}); err != nil {
errs = append(errs, fmt.Errorf("failed to add reaction %s: %w", reaction.ID, err))
}
}
return errors.Join(errs...)
}

17
app/reaction/trigger.go Normal file
View File

@@ -0,0 +1,17 @@
package reaction
import (
"github.com/go-chi/chi/v5"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/hook"
reactionHook "github.com/SecurityBrewery/catalyst/app/reaction/trigger/hook"
"github.com/SecurityBrewery/catalyst/app/reaction/trigger/webhook"
)
func BindHooks(hooks *hook.Hooks, router chi.Router, queries *sqlc.Queries, test bool) error {
reactionHook.BindHooks(hooks, queries, test)
webhook.BindHooks(router, queries)
return nil
}

View File

@@ -0,0 +1,122 @@
package hook
import (
"context"
"encoding/json"
"errors"
"fmt"
"log/slog"
"slices"
"github.com/SecurityBrewery/catalyst/app/auth/usercontext"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/hook"
"github.com/SecurityBrewery/catalyst/app/reaction/action"
"github.com/SecurityBrewery/catalyst/app/settings"
"github.com/SecurityBrewery/catalyst/app/webhook"
)
type Hook struct {
Collections []string `json:"collections"`
Events []string `json:"events"`
}
func BindHooks(hooks *hook.Hooks, queries *sqlc.Queries, test bool) {
hooks.OnRecordAfterCreateRequest.Subscribe(func(ctx context.Context, table string, record any) {
bindHook(ctx, queries, database.CreateAction, table, record, test)
})
hooks.OnRecordAfterUpdateRequest.Subscribe(func(ctx context.Context, table string, record any) {
bindHook(ctx, queries, database.UpdateAction, table, record, test)
})
hooks.OnRecordAfterDeleteRequest.Subscribe(func(ctx context.Context, table string, record any) {
bindHook(ctx, queries, database.DeleteAction, table, record, test)
})
}
func bindHook(ctx context.Context, queries *sqlc.Queries, event, collection string, record any, test bool) {
user, ok := usercontext.UserFromContext(ctx)
if !ok {
slog.ErrorContext(ctx, "failed to get user from session")
return
}
if !test {
go mustRunHook(context.Background(), queries, collection, event, record, user) //nolint:contextcheck
} else {
mustRunHook(ctx, queries, collection, event, record, user)
}
}
func mustRunHook(ctx context.Context, queries *sqlc.Queries, collection, event string, record any, auth *sqlc.User) {
if err := runHook(ctx, queries, collection, event, record, auth); err != nil {
slog.ErrorContext(ctx, fmt.Sprintf("failed to run hook reaction: %v", err))
}
}
func runHook(ctx context.Context, queries *sqlc.Queries, collection, event string, record any, auth *sqlc.User) error {
payload, err := json.Marshal(&webhook.Payload{
Action: event,
Collection: collection,
Record: record,
Auth: auth,
Admin: nil,
})
if err != nil {
return fmt.Errorf("failed to marshal webhook payload: %w", err)
}
hooks, err := findByHookTrigger(ctx, queries, collection, event)
if err != nil {
return fmt.Errorf("failed to find hook by trigger: %w", err)
}
if len(hooks) == 0 {
return nil
}
settings, err := settings.Load(ctx, queries)
if err != nil {
return fmt.Errorf("failed to load settings: %w", err)
}
var errs []error
for _, hook := range hooks {
_, err = action.Run(ctx, settings.Meta.AppURL, queries, hook.Action, hook.Actiondata, payload)
if err != nil {
errs = append(errs, fmt.Errorf("failed to run hook reaction: %w", err))
}
}
return errors.Join(errs...)
}
func findByHookTrigger(ctx context.Context, queries *sqlc.Queries, collection, event string) ([]*sqlc.ListReactionsByTriggerRow, error) {
reactions, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListReactionsByTriggerRow, error) {
return queries.ListReactionsByTrigger(ctx, sqlc.ListReactionsByTriggerParams{Trigger: "hook", Limit: limit, Offset: offset})
})
if err != nil {
return nil, fmt.Errorf("failed to find hook reaction: %w", err)
}
if len(reactions) == 0 {
return nil, nil
}
var matchedRecords []*sqlc.ListReactionsByTriggerRow
for _, reaction := range reactions {
var hook Hook
if err := json.Unmarshal(reaction.Triggerdata, &hook); err != nil {
return nil, err
}
if slices.Contains(hook.Collections, collection) && slices.Contains(hook.Events, event) {
matchedRecords = append(matchedRecords, &reaction)
}
}
return matchedRecords, nil
}

View File

@@ -1,7 +1,6 @@
package webhook
import (
"encoding/json"
"net/http"
"net/url"
)
@@ -14,10 +13,3 @@ type Request struct {
Body string `json:"body"`
IsBase64Encoded bool `json:"isBase64Encoded"`
}
// IsJSON checks if the data is JSON.
func IsJSON(data []byte) bool {
var msg json.RawMessage
return json.Unmarshal(data, &msg) == nil
}

View File

@@ -0,0 +1,162 @@
package webhook
import (
"context"
"encoding/base64"
"encoding/json"
"fmt"
"log/slog"
"net/http"
"strings"
"github.com/go-chi/chi/v5"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/reaction/action"
"github.com/SecurityBrewery/catalyst/app/reaction/action/webhook"
"github.com/SecurityBrewery/catalyst/app/settings"
)
type Webhook struct {
Token string `json:"token"`
Path string `json:"path"`
}
const prefix = "/reaction/"
func BindHooks(router chi.Router, queries *sqlc.Queries) {
router.HandleFunc(prefix+"*", handle(queries))
}
func handle(queries *sqlc.Queries) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
reaction, payload, status, err := parseRequest(queries, r)
if err != nil {
http.Error(w, err.Error(), status)
return
}
settings, err := settings.Load(r.Context(), queries)
if err != nil {
http.Error(w, "failed to load settings: "+err.Error(), http.StatusInternalServerError)
return
}
output, err := action.Run(r.Context(), settings.Meta.AppURL, queries, reaction.Action, reaction.Actiondata, payload)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if err := writeOutput(w, output); err != nil {
slog.ErrorContext(r.Context(), "failed to write output", "error", err.Error())
}
}
}
func parseRequest(queries *sqlc.Queries, r *http.Request) (*sqlc.ListReactionsByTriggerRow, []byte, int, error) {
if !strings.HasPrefix(r.URL.Path, prefix) {
return nil, nil, http.StatusNotFound, fmt.Errorf("wrong prefix")
}
reactionName := strings.TrimPrefix(r.URL.Path, prefix)
reaction, trigger, found, err := findByWebhookTrigger(r.Context(), queries, reactionName)
if err != nil {
return nil, nil, http.StatusNotFound, err
}
if !found {
return nil, nil, http.StatusNotFound, fmt.Errorf("reaction not found")
}
if trigger.Token != "" {
auth := r.Header.Get("Authorization")
if !strings.HasPrefix(auth, "Bearer ") {
return nil, nil, http.StatusUnauthorized, fmt.Errorf("missing token")
}
if trigger.Token != strings.TrimPrefix(auth, "Bearer ") {
return nil, nil, http.StatusUnauthorized, fmt.Errorf("invalid token")
}
}
body, isBase64Encoded := webhook.EncodeBody(r.Body)
payload, err := json.Marshal(&Request{
Method: r.Method,
Path: r.URL.EscapedPath(),
Headers: r.Header,
Query: r.URL.Query(),
Body: body,
IsBase64Encoded: isBase64Encoded,
})
if err != nil {
return nil, nil, http.StatusInternalServerError, err
}
return reaction, payload, http.StatusOK, nil
}
func findByWebhookTrigger(ctx context.Context, queries *sqlc.Queries, path string) (*sqlc.ListReactionsByTriggerRow, *Webhook, bool, error) {
reactions, err := database.PaginateItems(ctx, func(ctx context.Context, offset, limit int64) ([]sqlc.ListReactionsByTriggerRow, error) {
return queries.ListReactionsByTrigger(ctx, sqlc.ListReactionsByTriggerParams{Trigger: "webhook", Limit: limit, Offset: offset})
})
if err != nil {
return nil, nil, false, err
}
if len(reactions) == 0 {
return nil, nil, false, nil
}
for _, reaction := range reactions {
var webhook Webhook
if err := json.Unmarshal(reaction.Triggerdata, &webhook); err != nil {
return nil, nil, false, err
}
if webhook.Path == path {
return &reaction, &webhook, true, nil
}
}
return nil, nil, false, nil
}
func writeOutput(w http.ResponseWriter, output []byte) error {
var catalystResponse webhook.Response
if err := json.Unmarshal(output, &catalystResponse); err == nil && catalystResponse.StatusCode != 0 {
for key, values := range catalystResponse.Headers {
for _, value := range values {
w.Header().Add(key, value)
}
}
if catalystResponse.IsBase64Encoded {
output, err = base64.StdEncoding.DecodeString(catalystResponse.Body)
if err != nil {
return fmt.Errorf("error decoding base64 body: %w", err)
}
} else {
output = []byte(catalystResponse.Body)
}
}
if json.Valid(output) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(output)
} else {
w.Header().Set("Content-Type", "text/plain")
w.WriteHeader(http.StatusOK)
_, _ = w.Write(output)
}
return nil
}

290
app/rootstore/rootstore.go Normal file
View File

@@ -0,0 +1,290 @@
package rootstore
import (
"context"
"crypto/rand"
"encoding/json"
"errors"
"io"
"io/fs"
"net/http"
"os"
"path/filepath"
"github.com/tus/tusd/v2/pkg/handler"
)
var (
defaultFilePerm = os.FileMode(0o664)
defaultDirectoryPerm = os.FileMode(0o754)
)
const (
// StorageKeyPath is the key of the path of uploaded file in handler.FileInfo.Storage.
StorageKeyPath = "Path"
// StorageKeyInfoPath is the key of the path of .info file in handler.FileInfo.Storage.
StorageKeyInfoPath = "InfoPath"
)
// RootStore is a file system based data store for tusd.
type RootStore struct {
root *os.Root
}
func New(root *os.Root) RootStore {
return RootStore{root: root}
}
// UseIn sets this store as the core data store in the passed composer and adds
// all possible extension to it.
func (store RootStore) UseIn(composer *handler.StoreComposer) {
composer.UseCore(store)
composer.UseTerminater(store)
composer.UseConcater(store)
composer.UseLengthDeferrer(store)
composer.UseContentServer(store)
}
func (store RootStore) NewUpload(_ context.Context, info handler.FileInfo) (handler.Upload, error) {
if info.ID == "" {
info.ID = rand.Text()
}
// The .info file's location can directly be deduced from the upload ID
infoPath := store.infoPath(info.ID)
// The binary file's location might be modified by the pre-create hook.
var binPath string
if info.Storage != nil && info.Storage[StorageKeyPath] != "" {
binPath = info.Storage[StorageKeyPath]
} else {
binPath = store.defaultBinPath(info.ID)
}
info.Storage = map[string]string{
"Type": "rootstore",
StorageKeyPath: binPath,
StorageKeyInfoPath: infoPath,
}
_ = store.root.MkdirAll(filepath.Dir(binPath), defaultDirectoryPerm)
// Create binary file with no content
if err := store.root.WriteFile(binPath, nil, defaultFilePerm); err != nil {
return nil, err
}
upload := &fileUpload{
root: store.root,
info: info,
infoPath: infoPath,
binPath: binPath,
}
// writeInfo creates the file by itself if necessary
if err := upload.writeInfo(); err != nil {
return nil, err
}
return upload, nil
}
func (store RootStore) GetUpload(_ context.Context, id string) (handler.Upload, error) {
infoPath := store.infoPath(id)
data, err := fs.ReadFile(store.root.FS(), filepath.ToSlash(infoPath))
if err != nil {
if os.IsNotExist(err) {
// Interpret os.ErrNotExist as 404 Not Found
err = handler.ErrNotFound
}
return nil, err
}
var info handler.FileInfo
if err := json.Unmarshal(data, &info); err != nil {
return nil, err
}
// If the info file contains a custom path to the binary file, we use that. If not, we
// fall back to the default value (although the Path property should always be set in recent
// tusd versions).
var binPath string
if info.Storage != nil && info.Storage[StorageKeyPath] != "" {
// No filepath.Join here because the joining already happened in NewUpload. Duplicate joining
// with relative paths lead to incorrect paths
binPath = info.Storage[StorageKeyPath]
} else {
binPath = store.defaultBinPath(info.ID)
}
stat, err := store.root.Stat(binPath)
if err != nil {
if os.IsNotExist(err) {
// Interpret os.ErrNotExist as 404 Not Found
err = handler.ErrNotFound
}
return nil, err
}
info.Offset = stat.Size()
return &fileUpload{
root: store.root,
info: info,
binPath: binPath,
infoPath: infoPath,
}, nil
}
func (store RootStore) AsTerminatableUpload(upload handler.Upload) handler.TerminatableUpload {
return upload.(*fileUpload) //nolint:forcetypeassert
}
func (store RootStore) AsLengthDeclarableUpload(upload handler.Upload) handler.LengthDeclarableUpload {
return upload.(*fileUpload) //nolint:forcetypeassert
}
func (store RootStore) AsConcatableUpload(upload handler.Upload) handler.ConcatableUpload {
return upload.(*fileUpload) //nolint:forcetypeassert
}
func (store RootStore) AsServableUpload(upload handler.Upload) handler.ServableUpload {
return upload.(*fileUpload) //nolint:forcetypeassert
}
// defaultBinPath returns the path to the file storing the binary data, if it is
// not customized using the pre-create hook.
func (store RootStore) defaultBinPath(id string) string {
return id
}
// infoPath returns the path to the .info file storing the file's info.
func (store RootStore) infoPath(id string) string {
return id + ".info"
}
type fileUpload struct {
root *os.Root
// info stores the current information about the upload
info handler.FileInfo
// infoPath is the path to the .info file
infoPath string
// binPath is the path to the binary file (which has no extension)
binPath string
}
func (upload *fileUpload) GetInfo(_ context.Context) (handler.FileInfo, error) {
return upload.info, nil
}
func (upload *fileUpload) WriteChunk(_ context.Context, _ int64, src io.Reader) (int64, error) {
file, err := upload.root.OpenFile(upload.binPath, os.O_WRONLY|os.O_APPEND, defaultFilePerm)
if err != nil {
return 0, err
}
// Avoid the use of defer file.Close() here to ensure no errors are lost
// See https://github.com/tus/tusd/issues/698.
n, err := io.Copy(file, src)
upload.info.Offset += n
if err != nil {
file.Close()
return n, err
}
return n, file.Close()
}
func (upload *fileUpload) GetReader(_ context.Context) (io.ReadCloser, error) {
return upload.root.Open(upload.binPath)
}
func (upload *fileUpload) Terminate(_ context.Context) error {
// We ignore errors indicating that the files cannot be found because we want
// to delete them anyways. The files might be removed by a cron job for cleaning up
// or some file might have been removed when tusd crashed during the termination.
err := upload.root.Remove(upload.binPath)
if err != nil && !errors.Is(err, os.ErrNotExist) {
return err
}
err = upload.root.Remove(upload.infoPath)
if err != nil && !errors.Is(err, os.ErrNotExist) {
return err
}
return nil
}
func (upload *fileUpload) ConcatUploads(_ context.Context, uploads []handler.Upload) (err error) {
file, err := upload.root.OpenFile(upload.binPath, os.O_WRONLY|os.O_APPEND, defaultFilePerm)
if err != nil {
return err
}
defer func() {
// Ensure that close error is propagated, if it occurs.
// See https://github.com/tus/tusd/issues/698.
cerr := file.Close()
if err == nil {
err = cerr
}
}()
for _, partialUpload := range uploads {
if err := partialUpload.(*fileUpload).appendTo(file); err != nil { //nolint:forcetypeassert
return err
}
}
return
}
func (upload *fileUpload) appendTo(file *os.File) error {
src, err := upload.root.Open(upload.binPath)
if err != nil {
return err
}
if _, err := io.Copy(file, src); err != nil {
src.Close()
return err
}
return src.Close()
}
func (upload *fileUpload) DeclareLength(_ context.Context, length int64) error {
upload.info.Size = length
upload.info.SizeIsDeferred = false
return upload.writeInfo()
}
// writeInfo updates the entire information. Everything will be overwritten.
func (upload *fileUpload) writeInfo() error {
data, err := json.Marshal(upload.info)
if err != nil {
return err
}
_ = upload.root.MkdirAll(filepath.Dir(upload.infoPath), defaultDirectoryPerm)
return upload.root.WriteFile(upload.infoPath, data, defaultFilePerm)
}
func (upload *fileUpload) FinishUpload(_ context.Context) error {
return nil
}
func (upload *fileUpload) ServeContent(_ context.Context, w http.ResponseWriter, r *http.Request) error {
http.ServeFileFS(w, r, upload.root.FS(), filepath.ToSlash(upload.binPath))
return nil
}

View File

@@ -0,0 +1,391 @@
package rootstore
import (
"io"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/tus/tusd/v2/pkg/handler"
)
// Test interface implementation of FSStore.
var (
_ handler.DataStore = RootStore{}
_ handler.TerminaterDataStore = RootStore{}
_ handler.ConcaterDataStore = RootStore{}
_ handler.LengthDeferrerDataStore = RootStore{}
)
func TestFSStore(t *testing.T) {
t.Parallel()
root, err := os.OpenRoot(t.TempDir())
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
// Create new upload
upload, err := store.NewUpload(ctx, handler.FileInfo{
Size: 42,
MetaData: map[string]string{
"hello": "world",
},
})
require.NoError(t, err)
assert.NotNil(t, upload)
// Check info without writing
info, err := upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 0, info.Offset)
assert.Equal(t, handler.MetaData{"hello": "world"}, info.MetaData)
assert.Len(t, info.Storage, 3)
assert.Equal(t, "rootstore", info.Storage["Type"])
assert.Equal(t, info.ID, info.Storage["Path"])
assert.Equal(t, info.ID+".info", info.Storage["InfoPath"])
// Write data to upload
bytesWritten, err := upload.WriteChunk(ctx, 0, strings.NewReader("hello world"))
require.NoError(t, err)
assert.EqualValues(t, len("hello world"), bytesWritten)
// Check new offset
info, err = upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 11, info.Offset)
// Read content
reader, err := upload.GetReader(ctx)
require.NoError(t, err)
content, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, "hello world", string(content))
reader.Close()
// Serve content
w := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/", nil)
r.Header.Set("Range", "bytes=0-4")
err = store.AsServableUpload(upload).ServeContent(t.Context(), w, r)
require.NoError(t, err)
assert.Equal(t, http.StatusPartialContent, w.Code)
assert.Equal(t, "5", w.Header().Get("Content-Length"))
assert.Equal(t, "text/plain; charset=utf-8", w.Header().Get("Content-Type"))
assert.Equal(t, "bytes 0-4/11", w.Header().Get("Content-Range"))
assert.NotEmpty(t, w.Header().Get("Last-Modified"))
assert.Equal(t, "hello", w.Body.String())
// Terminate upload
require.NoError(t, store.AsTerminatableUpload(upload).Terminate(ctx))
// Test if upload is deleted
upload, err = store.GetUpload(ctx, info.ID)
assert.Nil(t, upload)
assert.Equal(t, handler.ErrNotFound, err)
}
// TestCreateDirectories tests whether an upload with a slash in its ID causes
// the correct directories to be created.
func TestFSStoreCreateDirectories(t *testing.T) {
t.Parallel()
tmp := t.TempDir()
root, err := os.OpenRoot(tmp)
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
// Create new upload
upload, err := store.NewUpload(ctx, handler.FileInfo{
ID: "hello/world/123",
Size: 42,
MetaData: map[string]string{
"hello": "world",
},
})
require.NoError(t, err)
assert.NotNil(t, upload)
// Check info without writing
info, err := upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 0, info.Offset)
assert.Equal(t, handler.MetaData{"hello": "world"}, info.MetaData)
assert.Len(t, info.Storage, 3)
assert.Equal(t, "rootstore", info.Storage["Type"])
assert.Equal(t, filepath.FromSlash(info.ID), info.Storage["Path"])
assert.Equal(t, filepath.FromSlash(info.ID+".info"), info.Storage["InfoPath"])
// Write data to upload
bytesWritten, err := upload.WriteChunk(ctx, 0, strings.NewReader("hello world"))
require.NoError(t, err)
assert.EqualValues(t, len("hello world"), bytesWritten)
// Check new offset
info, err = upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 11, info.Offset)
// Read content
reader, err := upload.GetReader(ctx)
require.NoError(t, err)
content, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, "hello world", string(content))
reader.Close()
// Check that the file and directory exists on disk
statInfo, err := os.Stat(filepath.Join(tmp, "hello/world/123"))
require.NoError(t, err)
assert.True(t, statInfo.Mode().IsRegular())
assert.EqualValues(t, 11, statInfo.Size())
statInfo, err = os.Stat(filepath.Join(tmp, "hello/world/"))
require.NoError(t, err)
assert.True(t, statInfo.Mode().IsDir())
// Terminate upload
require.NoError(t, store.AsTerminatableUpload(upload).Terminate(ctx))
// Test if upload is deleted
upload, err = store.GetUpload(ctx, info.ID)
assert.Nil(t, upload)
assert.Equal(t, handler.ErrNotFound, err)
}
func TestFSStoreNotFound(t *testing.T) {
t.Parallel()
root, err := os.OpenRoot(t.TempDir())
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
upload, err := store.GetUpload(ctx, "upload-that-does-not-exist")
require.Error(t, err)
assert.Equal(t, handler.ErrNotFound, err)
assert.Nil(t, upload)
}
func TestFSStoreConcatUploads(t *testing.T) {
t.Parallel()
tmp := t.TempDir()
root, err := os.OpenRoot(tmp)
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
// Create new upload to hold concatenated upload
finUpload, err := store.NewUpload(ctx, handler.FileInfo{Size: 9})
require.NoError(t, err)
assert.NotNil(t, finUpload)
finInfo, err := finUpload.GetInfo(ctx)
require.NoError(t, err)
finID := finInfo.ID
// Create three uploads for concatenating
partialUploads := make([]handler.Upload, 3)
contents := []string{
"abc",
"def",
"ghi",
}
for i := range 3 {
upload, err := store.NewUpload(ctx, handler.FileInfo{Size: 3})
require.NoError(t, err)
n, err := upload.WriteChunk(ctx, 0, strings.NewReader(contents[i]))
require.NoError(t, err)
assert.EqualValues(t, 3, n)
partialUploads[i] = upload
}
err = store.AsConcatableUpload(finUpload).ConcatUploads(ctx, partialUploads)
require.NoError(t, err)
// Check offset
finUpload, err = store.GetUpload(ctx, finID)
require.NoError(t, err)
info, err := finUpload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 9, info.Size)
assert.EqualValues(t, 9, info.Offset)
// Read content
reader, err := finUpload.GetReader(ctx)
require.NoError(t, err)
content, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, "abcdefghi", string(content))
reader.Close()
}
func TestFSStoreDeclareLength(t *testing.T) {
t.Parallel()
tmp := t.TempDir()
root, err := os.OpenRoot(tmp)
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
upload, err := store.NewUpload(ctx, handler.FileInfo{
Size: 0,
SizeIsDeferred: true,
})
require.NoError(t, err)
assert.NotNil(t, upload)
info, err := upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 0, info.Size)
assert.True(t, info.SizeIsDeferred)
err = store.AsLengthDeclarableUpload(upload).DeclareLength(ctx, 100)
require.NoError(t, err)
updatedInfo, err := upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 100, updatedInfo.Size)
assert.False(t, updatedInfo.SizeIsDeferred)
}
// TestCustomRelativePath tests whether the upload's destination can be customized
// relative to the storage directory.
func TestFSStoreCustomRelativePath(t *testing.T) {
t.Parallel()
tmp := t.TempDir()
root, err := os.OpenRoot(tmp)
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
ctx := t.Context()
// Create new upload
upload, err := store.NewUpload(ctx, handler.FileInfo{
ID: "folder1/info",
Size: 42,
Storage: map[string]string{
"Path": "./folder2/bin",
},
})
require.NoError(t, err)
assert.NotNil(t, upload)
// Check info without writing
info, err := upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 0, info.Offset)
assert.Len(t, info.Storage, 3)
assert.Equal(t, "rootstore", info.Storage["Type"])
assert.Equal(t, filepath.FromSlash("./folder2/bin"), info.Storage["Path"])
assert.Equal(t, filepath.FromSlash("folder1/info.info"), info.Storage["InfoPath"])
// Write data to upload
bytesWritten, err := upload.WriteChunk(ctx, 0, strings.NewReader("hello world"))
require.NoError(t, err)
assert.EqualValues(t, len("hello world"), bytesWritten)
// Check new offset
info, err = upload.GetInfo(ctx)
require.NoError(t, err)
assert.EqualValues(t, 42, info.Size)
assert.EqualValues(t, 11, info.Offset)
// Read content
reader, err := upload.GetReader(ctx)
require.NoError(t, err)
content, err := io.ReadAll(reader)
require.NoError(t, err)
assert.Equal(t, "hello world", string(content))
reader.Close()
// Check that the output file and info file exist on disk
statInfo, err := os.Stat(filepath.Join(tmp, "folder2/bin"))
require.NoError(t, err)
assert.True(t, statInfo.Mode().IsRegular())
assert.EqualValues(t, 11, statInfo.Size())
statInfo, err = os.Stat(filepath.Join(tmp, "folder1/info.info"))
require.NoError(t, err)
assert.True(t, statInfo.Mode().IsRegular())
// Terminate upload
require.NoError(t, store.AsTerminatableUpload(upload).Terminate(ctx))
// Test if upload is deleted
upload, err = store.GetUpload(ctx, info.ID)
assert.Nil(t, upload)
assert.Equal(t, handler.ErrNotFound, err)
}
// TestCustomAbsolutePath tests whether the upload's destination can be customized
// using an absolute path to the storage directory.
func TestFSStoreCustomAbsolutePath(t *testing.T) {
t.Parallel()
root, err := os.OpenRoot(t.TempDir())
require.NoError(t, err)
t.Cleanup(func() { root.Close() })
store := New(root)
// Create new upload, but the Path property points to a directory
// outside of the directory given to FSStore
binPath := filepath.Join(t.TempDir(), "dir/my-upload.bin")
_, err = store.NewUpload(t.Context(), handler.FileInfo{
ID: "my-upload",
Size: 42,
Storage: map[string]string{
"Path": binPath,
},
})
require.Error(t, err)
_, err = os.Stat(binPath)
require.Error(t, err)
}

79
app/router/demomode.go Normal file
View File

@@ -0,0 +1,79 @@
package router
import (
"context"
"log/slog"
"net/http"
"slices"
"strings"
"github.com/SecurityBrewery/catalyst/app/database"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
)
func demoMode(queries *sqlc.Queries) func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if isCriticalPath(r) && isCriticalMethod(r) && isDemoMode(r.Context(), queries) {
http.Error(w, "Cannot modify reactions or files in demo mode", http.StatusForbidden)
return
}
next.ServeHTTP(w, r)
})
}
}
func isCriticalPath(r *http.Request) bool {
// Define critical paths that should not be accessed in demo mode
criticalPaths := []string{
"/api/files",
"/api/groups",
"/api/reactions",
"/api/settings",
"/api/users",
"/api/webhooks",
}
for _, path := range criticalPaths {
if strings.Contains(r.URL.Path, path) {
return true
}
}
return false
}
func isCriticalMethod(r *http.Request) bool {
return !slices.Contains([]string{http.MethodHead, http.MethodGet}, r.Method)
}
func isDemoMode(ctx context.Context, queries *sqlc.Queries) bool {
var demoMode bool
if err := database.Paginate(ctx, func(ctx context.Context, offset, limit int64) (nextPage bool, err error) {
slog.InfoContext(ctx, "Checking for demo mode", "offset", offset, "limit", limit)
features, err := queries.ListFeatures(ctx, sqlc.ListFeaturesParams{Offset: offset, Limit: limit})
if err != nil {
return false, err
}
for _, feature := range features {
if feature.Key == "demo" {
demoMode = true
return false, nil // Stop pagination if demo mode is found
}
}
return len(features) > 0, nil
}); err != nil {
slog.ErrorContext(ctx, "Failed to check demo mode", "error", err)
return false
}
return demoMode
}

115
app/router/demomode_test.go Normal file
View File

@@ -0,0 +1,115 @@
package router
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/SecurityBrewery/catalyst/app/data"
)
func Test_isCriticalPath(t *testing.T) {
t.Parallel()
tests := []struct {
path string
want bool
}{
{"/api/reactions/1", true},
{"/api/files/1", true},
{"/api/other", false},
}
for _, tt := range tests {
req := httptest.NewRequest(http.MethodGet, tt.path, nil)
assert.Equal(t, tt.want, isCriticalPath(req))
}
}
func Test_isCriticalMethod(t *testing.T) {
t.Parallel()
tests := []struct {
method string
want bool
}{
{http.MethodPost, true},
{http.MethodPut, true},
{http.MethodGet, false},
{http.MethodHead, false},
}
for _, tt := range tests {
req := httptest.NewRequest(tt.method, "/", nil)
assert.Equal(t, tt.want, isCriticalMethod(req))
}
}
func Test_isDemoMode(t *testing.T) {
t.Parallel()
queries := data.NewTestDB(t, t.TempDir())
assert.False(t, isDemoMode(t.Context(), queries))
_, err := queries.CreateFeature(t.Context(), "demo")
require.NoError(t, err)
assert.True(t, isDemoMode(t.Context(), queries))
}
func Test_demoModeMiddleware(t *testing.T) {
t.Parallel()
queries := data.NewTestDB(t, t.TempDir())
mw := demoMode(queries)
nextCalled := false
next := http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
nextCalled = true
w.WriteHeader(http.StatusTeapot)
})
// not demo mode
rr := httptest.NewRecorder()
req := httptest.NewRequest(http.MethodPost, "/api/reactions", nil).WithContext(t.Context())
mw(next).ServeHTTP(rr, req)
assert.True(t, nextCalled)
assert.Equal(t, http.StatusTeapot, rr.Code)
// enable demo mode
_, err := queries.CreateFeature(t.Context(), "demo")
require.NoError(t, err)
nextCalled = false
rr = httptest.NewRecorder()
req = httptest.NewRequest(http.MethodPost, "/api/reactions", nil).WithContext(t.Context())
mw(next).ServeHTTP(rr, req)
assert.False(t, nextCalled)
assert.Equal(t, http.StatusForbidden, rr.Code)
// non critical path
nextCalled = false
rr = httptest.NewRecorder()
req = httptest.NewRequest(http.MethodPost, "/api/other", nil).WithContext(t.Context())
mw(next).ServeHTTP(rr, req)
assert.True(t, nextCalled)
assert.Equal(t, http.StatusTeapot, rr.Code)
}
func Test_handlers(t *testing.T) {
t.Parallel()
queries := data.NewTestDB(t, t.TempDir())
// healthHandler
healthRR := httptest.NewRecorder()
healthReq := httptest.NewRequest(http.MethodGet, "/health", nil).WithContext(t.Context())
healthHandler(queries)(healthRR, healthReq)
assert.Equal(t, http.StatusOK, healthRR.Code)
assert.Equal(t, "OK", healthRR.Body.String())
}

37
app/router/http.go Normal file
View File

@@ -0,0 +1,37 @@
package router
import (
"net/http"
"net/http/httputil"
"net/url"
"os"
"strings"
"github.com/SecurityBrewery/catalyst/ui"
)
func staticFiles(w http.ResponseWriter, r *http.Request) {
if devServer := os.Getenv("UI_DEVSERVER"); devServer != "" {
u, _ := url.Parse(devServer)
r.Host = r.URL.Host
httputil.NewSingleHostReverseProxy(u).ServeHTTP(w, r)
return
}
vueStatic(w, r)
}
func vueStatic(w http.ResponseWriter, r *http.Request) {
handler := http.FileServer(http.FS(ui.UI()))
if strings.HasPrefix(r.URL.Path, "/ui/assets/") {
handler = http.StripPrefix("/ui", handler)
} else {
r.URL.Path = "/"
}
handler.ServeHTTP(w, r)
}

30
app/router/http_test.go Normal file
View File

@@ -0,0 +1,30 @@
package router
import (
"net/http"
"net/http/httptest"
"testing"
)
func TestStaticFiles_DevServer(t *testing.T) {
t.Setenv("UI_DEVSERVER", "http://localhost:1234")
rec := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/ui/assets/test.js", nil)
// This will try to proxy, but since the dev server isn't running, it should not panic
// We just want to make sure it doesn't crash
staticFiles(rec, r)
}
func TestStaticFiles_VueStatic(t *testing.T) {
t.Parallel()
rec := httptest.NewRecorder()
r := httptest.NewRequest(http.MethodGet, "/ui/assets/test.js", nil)
staticFiles(rec, r)
// Should not panic, and should serve something (even if it's a 404)
if rec.Result().StatusCode == 0 {
t.Error("expected a status code from vueStatic")
}
}

69
app/router/router.go Normal file
View File

@@ -0,0 +1,69 @@
package router
import (
"log/slog"
"net/http"
"time"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/google/martian/v3/cors"
"github.com/SecurityBrewery/catalyst/app/auth"
"github.com/SecurityBrewery/catalyst/app/database/sqlc"
"github.com/SecurityBrewery/catalyst/app/mail"
"github.com/SecurityBrewery/catalyst/app/service"
"github.com/SecurityBrewery/catalyst/app/upload"
)
func New(service *service.Service, queries *sqlc.Queries, uploader *upload.Uploader, mailer *mail.Mailer) (*chi.Mux, error) {
r := chi.NewRouter()
// middleware for the router
r.Use(func(next http.Handler) http.Handler {
return http.Handler(cors.NewHandler(next))
})
r.Use(demoMode(queries))
r.Use(middleware.RequestID)
r.Use(middleware.RealIP)
r.Use(middleware.Logger)
r.Use(middleware.Timeout(time.Second * 60))
r.Use(middleware.Recoverer)
// base routes
r.Get("/", func(w http.ResponseWriter, r *http.Request) {
http.Redirect(w, r, "/ui/", http.StatusFound)
})
r.Get("/ui/*", staticFiles)
r.Get("/health", healthHandler(queries))
// auth routes
r.Mount("/auth", auth.Server(queries, mailer))
// API routes
r.With(auth.Middleware(queries)).Mount("/api", http.StripPrefix("/api", service))
uploadHandler, err := tusRoutes(queries, uploader)
if err != nil {
return nil, err
}
r.Mount("/files", http.StripPrefix("/files", uploadHandler))
return r, nil
}
func healthHandler(queries *sqlc.Queries) func(w http.ResponseWriter, r *http.Request) {
return func(w http.ResponseWriter, r *http.Request) {
if _, err := queries.ListFeatures(r.Context(), sqlc.ListFeaturesParams{Offset: 0, Limit: 100}); err != nil {
slog.ErrorContext(r.Context(), "Failed to get flags", "error", err)
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
return
}
w.WriteHeader(http.StatusOK)
_, _ = w.Write([]byte("OK"))
}
}

Some files were not shown because too many files have changed in this diff Show More