mirror of
https://github.com/hatchet-dev/hatchet.git
synced 2025-12-16 22:35:11 -06:00
first commit
This commit is contained in:
83
.gitignore
vendored
Normal file
83
.gitignore
vendored
Normal file
@@ -0,0 +1,83 @@
|
||||
.DS_Store
|
||||
.env
|
||||
*.env
|
||||
docker/.env
|
||||
*.pem
|
||||
app
|
||||
!frontend/app
|
||||
*.db
|
||||
test.yaml
|
||||
dist
|
||||
gon.hcl
|
||||
internal/local_templates
|
||||
gon*.hcl
|
||||
*prod.Dockerfile
|
||||
staging.sh
|
||||
*.crt
|
||||
*.key
|
||||
bin
|
||||
openapi.yaml
|
||||
.idea
|
||||
dump.rdb
|
||||
.srl
|
||||
*.srl
|
||||
*.csr
|
||||
*.pfx
|
||||
*.cert
|
||||
|
||||
node_modules
|
||||
|
||||
# Local docs directories
|
||||
/docs/.obsidian
|
||||
|
||||
# Local .terraform directories
|
||||
**/.terraform/*
|
||||
|
||||
.terraform
|
||||
|
||||
.terraform.lock.hcl
|
||||
|
||||
*kubeconfig*
|
||||
!*kubeconfig*.go
|
||||
|
||||
# .tfstate files
|
||||
*.tfstate
|
||||
*.tfstate.*
|
||||
|
||||
# Crash log files
|
||||
crash.log
|
||||
|
||||
# Exclude all .tfvars files, which are likely to contain sentitive data, such as
|
||||
# password, private keys, and other secrets. These should not be part of version
|
||||
# control as they are data points which are potentially sensitive and subject
|
||||
# to change depending on the environment.
|
||||
#
|
||||
*.tfvars
|
||||
*.tfvars.json
|
||||
|
||||
# Ignore override files as they are usually used to override resources locally and so
|
||||
# are not checked in
|
||||
override.tf
|
||||
override.tf.json
|
||||
*_override.tf
|
||||
*_override.tf.json
|
||||
|
||||
# Include override files you do wish to add to version control using negated pattern
|
||||
#
|
||||
# !example_override.tf
|
||||
|
||||
# Include tfplan files to ignore the plan output of command: terraform plan -out=tfplan
|
||||
# example: *tfplan*
|
||||
|
||||
# Ignore CLI configuration files
|
||||
.terraformrc
|
||||
terraform.rc
|
||||
|
||||
|
||||
# Ignore editor files
|
||||
.vscode
|
||||
|
||||
tmp
|
||||
|
||||
postgres-data
|
||||
rabbitmq.conf
|
||||
60
CONTRIBUTING.md
Normal file
60
CONTRIBUTING.md
Normal file
@@ -0,0 +1,60 @@
|
||||
## Development Setup
|
||||
|
||||
> **Note:** this guide assumes you're using MacOS. We simply don't have the bandwidth to test local development on native Windows. Most distros of Linux should work and we would like to support them, so please file an issue if running into an issue with a common distro.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- `go 1.21+`
|
||||
- `docker-compose`
|
||||
- [`Taskfile`](https://taskfile.dev/installation/)
|
||||
- The following additional devtools:
|
||||
- `protoc`: `brew install protobuf`
|
||||
- `sqlc`: `brew install sqlc`
|
||||
- `deepmap/oapi-codegen`: `go install github.com/deepmap/oapi-codegen/v2/cmd/oapi-codegen@latest`
|
||||
- `swagger-cli`: `npm list -g @apidevtools/swagger-cli || npm install -g @apidevtools/swagger-cli`
|
||||
- `nodemon`: `npm list -g nodemon || npm install -g nodemon`
|
||||
- `turbo`: `npm list -g turbo || npm i -g turbo`
|
||||
|
||||
### Setup
|
||||
|
||||
1. Spin up Postgres and RabbitMQ: `docker-compose up -d`
|
||||
|
||||
2. Run `npm install` inside of `./frontend/app`.
|
||||
|
||||
3. Generate certificates needed for communicating between the Hatchet client and engine: `task generate-certs`
|
||||
|
||||
4. Create environment variables:
|
||||
|
||||
```sh
|
||||
alias randstring='f() { openssl rand -base64 69 | tr -d "\n" | tr -d "=+/" | cut -c1-$1 };f'
|
||||
|
||||
cat > .env <<EOF
|
||||
DATABASE_URL='postgresql://hatchet:hatchet@127.0.0.1:5433/hatchet'
|
||||
SERVER_TLS_CERT_FILE=./hack/dev/certs/cluster.pem
|
||||
SERVER_TLS_KEY_FILE=./hack/dev/certs/cluster.key
|
||||
SERVER_TLS_ROOT_CA_FILE=./hack/dev/certs/ca.cert
|
||||
|
||||
SERVER_PORT=8080
|
||||
SERVER_URL=https://app.dev.hatchet-tools.com
|
||||
|
||||
SERVER_AUTH_COOKIE_SECRETS="$(randstring 16) $(randstring 16)"
|
||||
SERVER_AUTH_COOKIE_DOMAIN=app.dev.hatchet-tools.com
|
||||
SERVER_AUTH_COOKIE_INSECURE=false
|
||||
EOF
|
||||
```
|
||||
|
||||
5. Generate a local entry for `app.dev.hatchet-tools.com` in `/etc/hosts`: `task set-etc-hosts` or just append `127.0.0.1 app.dev.hatchet-tools.com` to your `/etc/hosts` file.
|
||||
|
||||
6. Migrate the database: `task prisma-migrate`
|
||||
|
||||
7. Generate all files: `task generate`
|
||||
|
||||
8. Seed the database: `task seed-dev`
|
||||
|
||||
9. Start the Hatchet engine, API server, dashboard, and Prisma studio:
|
||||
|
||||
```sh
|
||||
task start-dev
|
||||
```
|
||||
|
||||
10. To create and test workflows, run the examples in the `./examples` directory. You will need to add the tenant (output from the `task seed-dev` command) to the `.env` file in each example directory.
|
||||
11
Caddyfile
Normal file
11
Caddyfile
Normal file
@@ -0,0 +1,11 @@
|
||||
app.dev.hatchet-tools.com {
|
||||
tls internal
|
||||
|
||||
handle /api/* {
|
||||
reverse_proxy localhost:8080
|
||||
}
|
||||
|
||||
handle /* {
|
||||
reverse_proxy localhost:5173
|
||||
}
|
||||
}
|
||||
21
LICENSE
Normal file
21
LICENSE
Normal file
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2023-present Hatchet Technologies Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
308
README.md
Normal file
308
README.md
Normal file
@@ -0,0 +1,308 @@
|
||||
[](https://join.slack.com/t/hatchet-co/signup) [](https://opensource.org/licenses/MIT) [](https://pkg.go.dev/github.com/hatchet-dev/hatchet)
|
||||
|
||||
## Introduction
|
||||
|
||||
_**Note:** Hatchet is in early development. Changes are not guaranteed to be backwards-compatible. If you'd like to run Hatchet in production, feel free to reach out on Slack for tips._
|
||||
|
||||
Hatchet is an event storage API and workflow engine for distributed applications. Using Hatchet, you can create workers which process a set of background tasks based on different triggers, like events created within your system or a cron schedule.
|
||||
|
||||
As a simple example, let's say you want to perform 3 actions when a user has signed up for your app:
|
||||
|
||||
1. Initialize a set of resources for the user (perhaps a sandbox environment for testing).
|
||||
2. Send the user an automated greeting over email
|
||||
3. Add the user to a newsletter campaign
|
||||
|
||||
With Hatchet, this would look something like the following:
|
||||
|
||||
```yaml
|
||||
name: "post-user-sign-up"
|
||||
version: v0.2.0
|
||||
triggers:
|
||||
events:
|
||||
- user:create
|
||||
jobs:
|
||||
create-resources:
|
||||
steps:
|
||||
- id: createSandbox
|
||||
action: sandbox:create
|
||||
timeout: 60s
|
||||
greet-user:
|
||||
steps:
|
||||
- id: greetUser
|
||||
action: postmark:email-from-template
|
||||
timeout: 15s
|
||||
with:
|
||||
firstName: "{{ .user.firstName }}"
|
||||
email: "{{ .user.email }}"
|
||||
add-to-newsletter:
|
||||
steps:
|
||||
- id: addUserToNewsletter
|
||||
action: newsletter:add-user
|
||||
timeout: 15s
|
||||
with:
|
||||
email: "{{ .user.email }}"
|
||||
```
|
||||
|
||||
In your codebase, you would then create a worker which could perform the following actions:
|
||||
|
||||
the following integrations (see [Writing an integration](#writing-an-integration)):
|
||||
|
||||
- A `sandbox` integration responsible for creating/tearing down a sandbox environment
|
||||
- A `postmark` integration for sending an email from a template
|
||||
- A `newsletter` integration for adding a user to a newsletter campaign
|
||||
|
||||
Ultimately, the goal of Hatchet workflows are that you don't need to write these integrations yourself -- creating a robust set of prebuilt integrations is one of the goals of the project.
|
||||
|
||||
### Why is this useful?
|
||||
|
||||
- No need to build all of your plumbing logic (action 1 -> event 1 -> action 2 -> event 2). Just define your jobs and steps and write your business logic. This is particularly useful the more complex your workflows become.
|
||||
- Using prebuilt integrations with a standard interface makes building auxiliary services like notification systems, billing, backups, and auditing much easier. **Please file an issue if you'd like to see an integration supported.** The following are on the roadmap:
|
||||
- Email providers: Sendgrid, Postmark, AWS SES
|
||||
- Stripe
|
||||
- AWS S3
|
||||
- Additionally, if you're already familiar with/using a workflow engine, making workflows declarative provides several benefits:
|
||||
- Makes spec'ing, debugging and visualizing workflows much simpler
|
||||
- Automatically updates triggers, schedules, and timeouts when they change, rather than doing this through a UI/CLI/SDK
|
||||
- Makes monitoring easier to build by logically separating units of work - jobs will automatically correspond to `BeginSpan`. OpenTelemetry support is on the roadmap.
|
||||
|
||||
## Getting Started
|
||||
|
||||
For a set of end-to-end examples, see the [examples](./examples) directory.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Go 1.21 installed
|
||||
- Taskfile installed (instructions [here](https://taskfile.dev/installation/))
|
||||
|
||||
### Starting Hatchet
|
||||
|
||||
We are working on making it easier to start a Hatchet server. For now, see the [contributing guide](./CONTRIBUTING.md) for getting started.
|
||||
|
||||
### Writing a Workflow
|
||||
|
||||
By default, Hatchet searches for workflows in the `.hatchet` folder relative to the directory you run your application in. However, you can configure this using `worker.WithWorkflowFiles` and the exported `fileutils` package (`fileutils.ReadAllValidFilesInDir`).
|
||||
|
||||
There are two main sections of a workflow file:
|
||||
|
||||
**Triggers (using `on`)**
|
||||
|
||||
This section specifies what triggers a workflow. This can be events or a crontab-like schedule. For example, the following are valid triggers:
|
||||
|
||||
```yaml
|
||||
on:
|
||||
- eventkey1
|
||||
- eventkey2
|
||||
```
|
||||
|
||||
```yaml
|
||||
on:
|
||||
cron:
|
||||
schedule: "*/15 * * * *"
|
||||
```
|
||||
|
||||
There are also a set of keywords `random_15_min`, `random_hourly`, `random_daily` for cron-like schedules. Upon creation of these schedules, a random minute is picked in the given interval - for example, `random_hourly` might result in a schedule `49 * * * *` (the 49th minute of every hour). After creation, these schedules will **not** be updated with a new random schedule.
|
||||
|
||||
```yaml
|
||||
on:
|
||||
cron:
|
||||
schedule: "random_hourly"
|
||||
```
|
||||
|
||||
The point of this is to avoid burstiness if all jobs have the exact same schedule (i.e. runs at the 0th minute of every hour), you may start to run out of memory on your workers.
|
||||
|
||||
**Jobs**
|
||||
|
||||
After defining your triggers, you define a list of jobs to run based on the triggers. **Jobs run in parallel.** Jobs contain the following fields:
|
||||
|
||||
```yaml
|
||||
# ...
|
||||
jobs:
|
||||
my-awesome-job:
|
||||
# (optional) A queue name
|
||||
queue: internal
|
||||
# (optional) A timeout value for the entire job
|
||||
timeout: 60s
|
||||
# (required) A set of steps for the job; see below
|
||||
steps: []
|
||||
```
|
||||
|
||||
Within each job, there are a set of **steps** which run sequentially. A step can contain the following fields:
|
||||
|
||||
```yaml
|
||||
# (required) the name of the step
|
||||
name: Step 1
|
||||
# (required) a unique id for the step (can be referenced by future steps)
|
||||
id: step-1
|
||||
# (required) the action id in the form of "integration_id:action".
|
||||
action: "slack:create-channel"
|
||||
# (required) the timeout of the individual step
|
||||
timeout: 15s
|
||||
# (optional or required, depending on integration) input data to the integration
|
||||
with:
|
||||
key: val
|
||||
```
|
||||
|
||||
### Creating a Worker
|
||||
|
||||
Workers can be created using:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type userCreateEvent struct {
|
||||
Username string `json:"username"`
|
||||
UserId string `json:"user_id"`
|
||||
Data map[string]string `json:"data"`
|
||||
}
|
||||
|
||||
type actionInput struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = worker.RegisterAction("echo:echo", func(ctx context.Context, input *actionInput) (result any, err error) {
|
||||
return map[string]interface{}{
|
||||
"message": input.Message,
|
||||
}, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
worker.Start(interruptCtx)
|
||||
}
|
||||
```
|
||||
|
||||
You can configure the worker with your own set of workflow files using the `client.WithWorkflowFiles` option.
|
||||
|
||||
### Triggering Events
|
||||
|
||||
To trigger events from your main application, use the `client.Event().Push` method:
|
||||
|
||||
```go
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
)
|
||||
|
||||
type userCreateEvent struct {
|
||||
Username string `json:"username"`
|
||||
UserId string `json:"user_id"`
|
||||
Data map[string]string `json:"data"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
client, err := client.New()
|
||||
|
||||
testEvent := userCreateEvent{
|
||||
Username: "echo-test",
|
||||
UserId: "1234",
|
||||
Data: map[string]string{
|
||||
"test": "test",
|
||||
},
|
||||
}
|
||||
|
||||
err = client.Event().Push(
|
||||
context.Background(),
|
||||
"user:create",
|
||||
testEvent,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
You can configure the dispatcher with your own set of workflow files using the `dispatcher.WithWorkflowFiles` option.
|
||||
|
||||
## Why should I care?
|
||||
|
||||
**If you're unfamiliar with background task processing**
|
||||
|
||||
Many APIs start out without a task processing/worker service. You might not need it, but at a certain level of complexity, you probably will. There are a few use-cases where workers start to make sense:
|
||||
|
||||
1. You need to run scheduled tasks which that aren't triggered from your core API. For example, this may be a daily cleanup task, like traversing soft-deleted database entries or backing up data to S3.
|
||||
2. You need to run tasks which are triggered by API events, but aren't required for the core business logic of the handler. For example, you want to add a user to your CRM after they sign up.
|
||||
|
||||
For both of these cases, it's typical to re-use a lot of core functionality from your API, so the most natural place to start is by adding some automation within your API itself; for example, after returning `201 Created`, you might send a greeting to the user, initialize a sandbox environment, send an internal notification that a user signed up, etc, all within your API handlers. Let's say you've handled this case as following:
|
||||
|
||||
```go
|
||||
// Hypothetical handler called via a routing package, let's just pretend it returns an error
|
||||
func MyHandler(ctx context.Context, w http.ResponseWriter, r *http.Request) error {
|
||||
// Boilerplate code to parse the request
|
||||
var newUser User
|
||||
err := json.NewDecoder(r.Body).Decode(&newUser)
|
||||
if err != nil {
|
||||
http.Error(w, "Invalid user data", http.StatusBadRequest)
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate email and password fields...
|
||||
// (Add your validation logic here)
|
||||
|
||||
// Create a user in the database
|
||||
user, err := createUser(ctx, newUser.Email, newUser.Password)
|
||||
if err != nil {
|
||||
// Handle database errors, such as unique constraint violation
|
||||
http.Error(w, "Error creating user", http.StatusInternalServerError)
|
||||
return err
|
||||
}
|
||||
|
||||
// Return 201 created with user type
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
|
||||
// Send user a greeting
|
||||
err := email.SendGreetingEmail(context.Background(), user)
|
||||
|
||||
if err != nil {
|
||||
// can't return an error, since header is already set
|
||||
fmt.Println(err)
|
||||
}
|
||||
|
||||
// ... other post-signup operations
|
||||
}
|
||||
```
|
||||
|
||||
At some point, you realize all of these background operations don't really belong in the handler -- when they're part of the handler, they're more difficult to monitor and observe, difficult to retry (especially if a third-party service goes down), and bloat your handlers (which could cause goroutine leakage or memory issues).
|
||||
|
||||
This is where a service like Hatchet suited for background/task processing comes in.
|
||||
|
||||
## I'd Like to Contribute
|
||||
|
||||
Hatchet is still in very early development -- as a result, there are very few development docs. However, please feel free to reach out on the #contributing channel on [Slack](https://join.slack.com/t/hatchet-co/signup) to shape the direction of the project.
|
||||
68
Taskfile.yaml
Normal file
68
Taskfile.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
version: "3"
|
||||
|
||||
tasks:
|
||||
write-default-env:
|
||||
cmds:
|
||||
- sh ./hack/dev/write-default-env.sh
|
||||
set-etc-hosts:
|
||||
cmds:
|
||||
- sudo sh ./hack/dev/manage-hosts.sh add 127.0.0.1 app.dev.hatchet-tools.com
|
||||
prisma-migrate:
|
||||
cmds:
|
||||
- sh ./hack/dev/run-go-with-env.sh run github.com/steebchen/prisma-client-go migrate dev
|
||||
seed-dev:
|
||||
cmds:
|
||||
- sh ./hack/dev/run-npx-with-env.sh prisma db push --force-reset
|
||||
- sh ./hack/dev/run-go-with-env.sh run ./cmd/seed
|
||||
start-dev:
|
||||
deps:
|
||||
- task: start-api
|
||||
- task: start-engine
|
||||
- task: start-frontend
|
||||
- task: prisma-studio
|
||||
start-api:
|
||||
cmds:
|
||||
- sh ./hack/dev/start-api.sh
|
||||
start-engine:
|
||||
cmds:
|
||||
- sh ./hack/dev/start-engine.sh
|
||||
start-frontend:
|
||||
cmds:
|
||||
- sh ./hack/dev/start-frontend.sh
|
||||
generate:
|
||||
cmds:
|
||||
- task: generate-api
|
||||
- task: generate-prisma
|
||||
- task: generate-proto
|
||||
- task: generate-sqlc
|
||||
generate-api:
|
||||
cmds:
|
||||
- task: generate-api-server
|
||||
- task: generate-api-client
|
||||
generate-certs:
|
||||
cmds:
|
||||
- sh ./hack/dev/generate-temporal-certs.sh ./hack/dev/certs
|
||||
generate-api-server:
|
||||
cmds:
|
||||
- sh ./hack/oas/generate-server.sh
|
||||
silent: true
|
||||
generate-api-client:
|
||||
cmds:
|
||||
- sh ./hack/oas/generate-client.sh
|
||||
silent: true
|
||||
generate-prisma:
|
||||
cmds:
|
||||
- go run github.com/steebchen/prisma-client-go generate
|
||||
generate-proto:
|
||||
cmds:
|
||||
- sh ./hack/dev/proto.sh
|
||||
generate-sqlc:
|
||||
cmds:
|
||||
- npx prisma migrate diff --from-empty --to-schema-datasource prisma/schema.prisma --script > internal/repository/prisma/dbsqlc/schema.sql
|
||||
- sqlc generate --file internal/repository/prisma/dbsqlc/sqlc.yaml
|
||||
kill-query-engines:
|
||||
cmds:
|
||||
- ps -A | grep 'prisma-query-engine-darwin-arm64' | grep -v grep | awk '{print $1}' | xargs kill -9 $1
|
||||
prisma-studio:
|
||||
cmds:
|
||||
- sh ./hack/dev/run-npx-with-env.sh prisma studio
|
||||
141
api-contracts/dispatcher/dispatcher.proto
Normal file
141
api-contracts/dispatcher/dispatcher.proto
Normal file
@@ -0,0 +1,141 @@
|
||||
syntax = "proto3";
|
||||
|
||||
option go_package = "github.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts";
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
service Dispatcher {
|
||||
rpc Register(WorkerRegisterRequest) returns (WorkerRegisterResponse) {}
|
||||
|
||||
rpc Listen(WorkerListenRequest) returns (stream AssignedAction) {}
|
||||
|
||||
rpc SendActionEvent(ActionEvent) returns (ActionEventResponse) {}
|
||||
|
||||
rpc Unsubscribe(WorkerUnsubscribeRequest) returns (WorkerUnsubscribeResponse) {}
|
||||
}
|
||||
|
||||
message WorkerRegisterRequest {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the name of the worker
|
||||
string workerName = 2;
|
||||
|
||||
// a list of actions that this worker can run
|
||||
repeated string actions = 3;
|
||||
}
|
||||
|
||||
message WorkerRegisterResponse {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
|
||||
// the name of the worker
|
||||
string workerName = 3;
|
||||
}
|
||||
|
||||
enum ActionType {
|
||||
START_STEP_RUN = 0;
|
||||
CANCEL_STEP_RUN = 1;
|
||||
}
|
||||
|
||||
message AssignedAction {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the job id
|
||||
string jobId = 2;
|
||||
|
||||
// the job name
|
||||
string jobName = 3;
|
||||
|
||||
// the job run id
|
||||
string jobRunId = 4;
|
||||
|
||||
// the step id
|
||||
string stepId = 5;
|
||||
|
||||
// the step run id
|
||||
string stepRunId = 6;
|
||||
|
||||
// the action id
|
||||
string actionId = 7;
|
||||
|
||||
// the action type
|
||||
ActionType actionType = 8;
|
||||
|
||||
// the action payload
|
||||
string actionPayload = 9;
|
||||
}
|
||||
|
||||
message WorkerListenRequest {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
}
|
||||
|
||||
message WorkerUnsubscribeRequest {
|
||||
// the tenant id to unsubscribe from
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
}
|
||||
|
||||
message WorkerUnsubscribeResponse {
|
||||
// the tenant id to unsubscribe from
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
}
|
||||
|
||||
enum ActionEventType {
|
||||
STEP_EVENT_TYPE_UNKNOWN = 0;
|
||||
STEP_EVENT_TYPE_STARTED = 1;
|
||||
STEP_EVENT_TYPE_COMPLETED = 2;
|
||||
STEP_EVENT_TYPE_FAILED = 3;
|
||||
}
|
||||
|
||||
message ActionEvent {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
|
||||
// the id of the job
|
||||
string jobId = 3;
|
||||
|
||||
// the job run id
|
||||
string jobRunId = 4;
|
||||
|
||||
// the id of the step
|
||||
string stepId = 5;
|
||||
|
||||
// the step run id
|
||||
string stepRunId = 6;
|
||||
|
||||
// the action id
|
||||
string actionId = 7;
|
||||
|
||||
google.protobuf.Timestamp eventTimestamp = 8;
|
||||
|
||||
// the step event type
|
||||
ActionEventType eventType = 9;
|
||||
|
||||
// the event payload
|
||||
string eventPayload = 10;
|
||||
}
|
||||
|
||||
message ActionEventResponse {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the worker
|
||||
string workerId = 2;
|
||||
}
|
||||
68
api-contracts/events/events.proto
Normal file
68
api-contracts/events/events.proto
Normal file
@@ -0,0 +1,68 @@
|
||||
syntax = "proto3";
|
||||
|
||||
option go_package = "github.com/hatchet-dev/hatchet/internal/services/dispatcher/contracts";
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
|
||||
service EventsService {
|
||||
rpc Push(PushEventRequest) returns (Event) {}
|
||||
|
||||
rpc List(ListEventRequest) returns (ListEventResponse) {}
|
||||
|
||||
rpc ReplaySingleEvent(ReplayEventRequest) returns (Event) {}
|
||||
}
|
||||
|
||||
message Event {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the id of the event
|
||||
string eventId = 2;
|
||||
|
||||
// the key for the event
|
||||
string key = 3;
|
||||
|
||||
// the payload for the event
|
||||
string payload = 4;
|
||||
|
||||
// when the event was generated
|
||||
google.protobuf.Timestamp eventTimestamp = 5;
|
||||
}
|
||||
|
||||
message PushEventRequest {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the key for the event
|
||||
string key = 2;
|
||||
|
||||
// the payload for the event
|
||||
string payload = 3;
|
||||
|
||||
// when the event was generated
|
||||
google.protobuf.Timestamp eventTimestamp = 4;
|
||||
}
|
||||
|
||||
message ListEventRequest {
|
||||
// (required) the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// (optional) the number of events to skip
|
||||
int32 offset = 2;
|
||||
|
||||
// (optional) the key for the event
|
||||
string key = 3;
|
||||
}
|
||||
|
||||
message ListEventResponse {
|
||||
// the events
|
||||
repeated Event events = 1;
|
||||
}
|
||||
|
||||
message ReplayEventRequest {
|
||||
// the tenant id
|
||||
string tenantId = 1;
|
||||
|
||||
// the event id to replay
|
||||
string eventId = 2;
|
||||
}
|
||||
88
api-contracts/openapi/components/schemas/_index.yaml
Normal file
88
api-contracts/openapi/components/schemas/_index.yaml
Normal file
@@ -0,0 +1,88 @@
|
||||
APIErrors:
|
||||
$ref: "./metadata.yaml#/APIErrors"
|
||||
APIError:
|
||||
$ref: "./metadata.yaml#/APIError"
|
||||
PaginationResponse:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
APIResourceMeta:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
User:
|
||||
$ref: "./user.yaml#/User"
|
||||
UserTenantPublic:
|
||||
$ref: "./user.yaml#/UserTenantPublic"
|
||||
UserLoginRequest:
|
||||
$ref: "./user.yaml#/UserLoginRequest"
|
||||
UserRegisterRequest:
|
||||
$ref: "./user.yaml#/UserRegisterRequest"
|
||||
UserTenantMembershipsList:
|
||||
$ref: "./user.yaml#/UserTenantMembershipsList"
|
||||
Tenant:
|
||||
$ref: "./tenant.yaml#/Tenant"
|
||||
TenantMember:
|
||||
$ref: "./tenant.yaml#/TenantMember"
|
||||
TenantMemberRole:
|
||||
$ref: "./tenant.yaml#/TenantMemberRole"
|
||||
TenantList:
|
||||
$ref: "./tenant.yaml#/TenantList"
|
||||
CreateTenantRequest:
|
||||
$ref: "./tenant.yaml#/CreateTenantRequest"
|
||||
Event:
|
||||
$ref: "./event.yaml#/Event"
|
||||
EventData:
|
||||
$ref: "./event.yaml#/EventData"
|
||||
EventWorkflowRunSummary:
|
||||
$ref: "./event.yaml#/EventWorkflowRunSummary"
|
||||
EventOrderByField:
|
||||
$ref: "./event.yaml#/EventOrderByField"
|
||||
EventOrderByDirection:
|
||||
$ref: "./event.yaml#/EventOrderByDirection"
|
||||
EventKeyList:
|
||||
$ref: "./event.yaml#/EventKeyList"
|
||||
EventKey:
|
||||
$ref: "./event.yaml#/EventKey"
|
||||
EventList:
|
||||
$ref: "./event.yaml#/EventList"
|
||||
ReplayEventRequest:
|
||||
$ref: "./event.yaml#/ReplayEventRequest"
|
||||
Workflow:
|
||||
$ref: "./workflow.yaml#/Workflow"
|
||||
WorkflowVersionMeta:
|
||||
$ref: "./workflow.yaml#/WorkflowVersionMeta"
|
||||
WorkflowVersion:
|
||||
$ref: "./workflow.yaml#/WorkflowVersion"
|
||||
WorkflowVersionDefinition:
|
||||
$ref: "./workflow.yaml#/WorkflowVersionDefinition"
|
||||
WorkflowTag:
|
||||
$ref: "./workflow.yaml#/WorkflowTag"
|
||||
WorkflowList:
|
||||
$ref: "./workflow.yaml#/WorkflowList"
|
||||
WorkflowTriggers:
|
||||
$ref: "./workflow.yaml#/WorkflowTriggers"
|
||||
WorkflowTriggerEventRef:
|
||||
$ref: "./workflow.yaml#/WorkflowTriggerEventRef"
|
||||
WorkflowTriggerCronRef:
|
||||
$ref: "./workflow.yaml#/WorkflowTriggerCronRef"
|
||||
Job:
|
||||
$ref: "./workflow.yaml#/Job"
|
||||
Step:
|
||||
$ref: "./workflow.yaml#/Step"
|
||||
WorkflowRun:
|
||||
$ref: "./workflow_run.yaml#/WorkflowRun"
|
||||
WorkflowRunList:
|
||||
$ref: "./workflow_run.yaml#/WorkflowRunList"
|
||||
WorkflowRunStatus:
|
||||
$ref: "./workflow_run.yaml#/WorkflowRunStatus"
|
||||
JobRunStatus:
|
||||
$ref: "./workflow_run.yaml#/JobRunStatus"
|
||||
StepRunStatus:
|
||||
$ref: "./workflow_run.yaml#/StepRunStatus"
|
||||
JobRun:
|
||||
$ref: "./workflow_run.yaml#/JobRun"
|
||||
WorkflowRunTriggeredBy:
|
||||
$ref: "./workflow_run.yaml#/WorkflowRunTriggeredBy"
|
||||
StepRun:
|
||||
$ref: "./workflow_run.yaml#/StepRun"
|
||||
WorkerList:
|
||||
$ref: "./worker.yaml#/WorkerList"
|
||||
Worker:
|
||||
$ref: "./worker.yaml#/Worker"
|
||||
93
api-contracts/openapi/components/schemas/event.yaml
Normal file
93
api-contracts/openapi/components/schemas/event.yaml
Normal file
@@ -0,0 +1,93 @@
|
||||
Event:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
key:
|
||||
type: string
|
||||
description: The key for the event.
|
||||
tenant:
|
||||
$ref: "./_index.yaml#/Tenant"
|
||||
description: The tenant associated with this event.
|
||||
tenantId:
|
||||
type: string
|
||||
description: The ID of the tenant associated with this event.
|
||||
workflowRunSummary:
|
||||
$ref: "#/EventWorkflowRunSummary"
|
||||
description: The workflow run summary for this event.
|
||||
required:
|
||||
- metadata
|
||||
- key
|
||||
- tenantId
|
||||
|
||||
EventData:
|
||||
properties:
|
||||
data:
|
||||
type: string
|
||||
description: The data for the event (JSON bytes).
|
||||
required:
|
||||
- data
|
||||
|
||||
ReplayEventRequest:
|
||||
properties:
|
||||
eventIds:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
example: bb214807-246e-43a5-a25d-41761d1cff9e
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
format: uuid
|
||||
required:
|
||||
- eventIds
|
||||
|
||||
EventWorkflowRunSummary:
|
||||
properties:
|
||||
pending:
|
||||
type: integer
|
||||
format: int64
|
||||
description: The number of pending runs.
|
||||
running:
|
||||
type: integer
|
||||
format: int64
|
||||
description: The number of running runs.
|
||||
succeeded:
|
||||
type: integer
|
||||
format: int64
|
||||
description: The number of succeeded runs.
|
||||
failed:
|
||||
type: integer
|
||||
format: int64
|
||||
description: The number of failed runs.
|
||||
|
||||
EventKeyList:
|
||||
properties:
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
rows:
|
||||
items:
|
||||
$ref: "#/EventKey"
|
||||
type: array
|
||||
|
||||
EventKey:
|
||||
type: string
|
||||
description: The key for the event.
|
||||
|
||||
EventList:
|
||||
properties:
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
rows:
|
||||
items:
|
||||
$ref: "#/Event"
|
||||
type: array
|
||||
|
||||
EventOrderByField:
|
||||
type: string
|
||||
enum:
|
||||
- createdAt
|
||||
|
||||
EventOrderByDirection:
|
||||
type: string
|
||||
enum:
|
||||
- asc
|
||||
- desc
|
||||
77
api-contracts/openapi/components/schemas/metadata.yaml
Normal file
77
api-contracts/openapi/components/schemas/metadata.yaml
Normal file
@@ -0,0 +1,77 @@
|
||||
APIError:
|
||||
type: object
|
||||
properties:
|
||||
code:
|
||||
type: integer
|
||||
description: a custom Hatchet error code
|
||||
format: uint64
|
||||
example: 1400
|
||||
field:
|
||||
type: string
|
||||
description: the field that this error is associated with, if applicable
|
||||
example: name
|
||||
description:
|
||||
type: string
|
||||
description: a description for this error
|
||||
example: A descriptive error message
|
||||
docs_link:
|
||||
type: string
|
||||
description: "a link to the documentation for this error, if it exists"
|
||||
example: github.com/hatchet-dev/hatchet
|
||||
required:
|
||||
- description
|
||||
APIErrors:
|
||||
type: object
|
||||
properties:
|
||||
errors:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/APIError"
|
||||
required:
|
||||
- errors
|
||||
APIResourceMeta:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
description: "the id of this resource, in UUID format"
|
||||
example: bb214807-246e-43a5-a25d-41761d1cff9e
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
format: uuid
|
||||
createdAt:
|
||||
type: string
|
||||
description: the time that this resource was created
|
||||
format: date-time
|
||||
example: 2022-12-13T15:06:48.888358-05:00
|
||||
updatedAt:
|
||||
type: string
|
||||
description: the time that this resource was last updated
|
||||
format: date-time
|
||||
example: 2022-12-13T15:06:48.888358-05:00
|
||||
required:
|
||||
- id
|
||||
- createdAt
|
||||
- updatedAt
|
||||
PaginationResponse:
|
||||
type: object
|
||||
properties:
|
||||
current_page:
|
||||
type: integer
|
||||
description: the current page
|
||||
format: int64
|
||||
example: 2
|
||||
next_page:
|
||||
type: integer
|
||||
description: the next page
|
||||
format: int64
|
||||
example: 3
|
||||
num_pages:
|
||||
type: integer
|
||||
description: the total number of pages for listing
|
||||
format: int64
|
||||
example: 10
|
||||
example:
|
||||
next_page: 3
|
||||
num_pages: 10
|
||||
current_page: 2
|
||||
68
api-contracts/openapi/components/schemas/tenant.yaml
Normal file
68
api-contracts/openapi/components/schemas/tenant.yaml
Normal file
@@ -0,0 +1,68 @@
|
||||
Tenant:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
name:
|
||||
type: string
|
||||
description: The name of the tenant.
|
||||
slug:
|
||||
type: string
|
||||
description: The slug of the tenant.
|
||||
required:
|
||||
- metadata
|
||||
- name
|
||||
- slug
|
||||
type: object
|
||||
|
||||
CreateTenantRequest:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the tenant.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required"
|
||||
slug:
|
||||
type: string
|
||||
description: The slug of the tenant.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required,hatchetName"
|
||||
required:
|
||||
- name
|
||||
- slug
|
||||
type: object
|
||||
|
||||
TenantMember:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
user:
|
||||
$ref: "./_index.yaml#/UserTenantPublic"
|
||||
description: The user associated with this tenant member.
|
||||
role:
|
||||
$ref: "#/TenantMemberRole"
|
||||
description: The role of the user in the tenant.
|
||||
tenant:
|
||||
$ref: "#/Tenant"
|
||||
description: The tenant associated with this tenant member.
|
||||
required:
|
||||
- metadata
|
||||
- user
|
||||
- role
|
||||
type: object
|
||||
|
||||
TenantMemberRole:
|
||||
enum:
|
||||
- "OWNER"
|
||||
- "ADMIN"
|
||||
- "MEMBER"
|
||||
type: string
|
||||
|
||||
TenantList:
|
||||
properties:
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
rows:
|
||||
items:
|
||||
$ref: "#/Tenant"
|
||||
type: array
|
||||
x-go-name: Rows
|
||||
82
api-contracts/openapi/components/schemas/user.yaml
Normal file
82
api-contracts/openapi/components/schemas/user.yaml
Normal file
@@ -0,0 +1,82 @@
|
||||
User:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
name:
|
||||
type: string
|
||||
description: The display name of the user.
|
||||
email:
|
||||
type: string
|
||||
format: email
|
||||
description: The email address of the user.
|
||||
emailVerified:
|
||||
type: boolean
|
||||
description: Whether the user has verified their email address.
|
||||
required:
|
||||
- metadata
|
||||
- email
|
||||
- emailVerified
|
||||
type: object
|
||||
|
||||
UserTenantPublic:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
format: email
|
||||
description: The email address of the user.
|
||||
name:
|
||||
type: string
|
||||
description: The display name of the user.
|
||||
required:
|
||||
- email
|
||||
type: object
|
||||
|
||||
UserLoginRequest:
|
||||
properties:
|
||||
email:
|
||||
type: string
|
||||
format: email
|
||||
description: The email address of the user.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required,email"
|
||||
password:
|
||||
type: string
|
||||
description: The password of the user.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required,password"
|
||||
required:
|
||||
- email
|
||||
- password
|
||||
type: object
|
||||
|
||||
UserRegisterRequest:
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the user.
|
||||
email:
|
||||
type: string
|
||||
format: email
|
||||
description: The email address of the user.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required,email"
|
||||
password:
|
||||
type: string
|
||||
description: The password of the user.
|
||||
x-oapi-codegen-extra-tags:
|
||||
validate: "required,password"
|
||||
required:
|
||||
- name
|
||||
- email
|
||||
- password
|
||||
type: object
|
||||
|
||||
UserTenantMembershipsList:
|
||||
properties:
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
rows:
|
||||
items:
|
||||
$ref: "./_index.yaml#/TenantMember"
|
||||
type: array
|
||||
x-go-name: Rows
|
||||
35
api-contracts/openapi/components/schemas/worker.yaml
Normal file
35
api-contracts/openapi/components/schemas/worker.yaml
Normal file
@@ -0,0 +1,35 @@
|
||||
Worker:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
name:
|
||||
type: string
|
||||
description: The name of the worker.
|
||||
lastHeartbeatAt:
|
||||
type: string
|
||||
description: The time this worker last sent a heartbeat.
|
||||
format: date-time
|
||||
example: 2022-12-13T15:06:48.888358-05:00
|
||||
actions:
|
||||
type: array
|
||||
description: The actions this worker can perform.
|
||||
items:
|
||||
type: string
|
||||
recentStepRuns:
|
||||
type: array
|
||||
description: The recent step runs for this worker.
|
||||
items:
|
||||
$ref: "./_index.yaml#/StepRun"
|
||||
required:
|
||||
- metadata
|
||||
- name
|
||||
type: object
|
||||
|
||||
WorkerList:
|
||||
properties:
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
rows:
|
||||
items:
|
||||
$ref: "#/Worker"
|
||||
type: array
|
||||
201
api-contracts/openapi/components/schemas/workflow.yaml
Normal file
201
api-contracts/openapi/components/schemas/workflow.yaml
Normal file
@@ -0,0 +1,201 @@
|
||||
Workflow:
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
name:
|
||||
type: string
|
||||
description: The name of the workflow.
|
||||
description:
|
||||
type: string
|
||||
description: The description of the workflow.
|
||||
versions:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/WorkflowVersionMeta"
|
||||
tags:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/WorkflowTag"
|
||||
description: The tags of the workflow.
|
||||
lastRun:
|
||||
$ref: "./_index.yaml#/WorkflowRun"
|
||||
jobs:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/Job"
|
||||
description: The jobs of the workflow.
|
||||
required:
|
||||
- metadata
|
||||
- name
|
||||
type: object
|
||||
|
||||
WorkflowTag:
|
||||
type: object
|
||||
properties:
|
||||
name:
|
||||
type: string
|
||||
description: The name of the workflow.
|
||||
color:
|
||||
type: string
|
||||
description: The description of the workflow.
|
||||
required:
|
||||
- name
|
||||
- color
|
||||
|
||||
WorkflowList:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
rows:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/Workflow"
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
|
||||
WorkflowVersionMeta:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
version:
|
||||
type: string
|
||||
description: The version of the workflow.
|
||||
order:
|
||||
type: integer
|
||||
format: int32
|
||||
workflowId:
|
||||
type: string
|
||||
workflow:
|
||||
$ref: "#/Workflow"
|
||||
required:
|
||||
- metadata
|
||||
- version
|
||||
- order
|
||||
- workflowId
|
||||
|
||||
WorkflowVersion:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
version:
|
||||
type: string
|
||||
description: The version of the workflow.
|
||||
order:
|
||||
type: integer
|
||||
format: int32
|
||||
workflowId:
|
||||
type: string
|
||||
workflow:
|
||||
$ref: "#/Workflow"
|
||||
triggers:
|
||||
$ref: "#/WorkflowTriggers"
|
||||
jobs:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/Job"
|
||||
required:
|
||||
- metadata
|
||||
- version
|
||||
- order
|
||||
- workflowId
|
||||
|
||||
WorkflowVersionDefinition:
|
||||
type: object
|
||||
properties:
|
||||
rawDefinition:
|
||||
type: string
|
||||
description: The raw YAML definition of the workflow.
|
||||
required:
|
||||
- rawDefinition
|
||||
|
||||
WorkflowTriggers:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
workflow_version_id:
|
||||
type: string
|
||||
tenant_id:
|
||||
type: string
|
||||
events:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/WorkflowTriggerEventRef"
|
||||
crons:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/WorkflowTriggerCronRef"
|
||||
|
||||
WorkflowTriggerEventRef:
|
||||
type: object
|
||||
properties:
|
||||
parent_id:
|
||||
type: string
|
||||
event_key:
|
||||
type: string
|
||||
|
||||
WorkflowTriggerCronRef:
|
||||
type: object
|
||||
properties:
|
||||
parent_id:
|
||||
type: string
|
||||
cron:
|
||||
type: string
|
||||
|
||||
Job:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
tenantId:
|
||||
type: string
|
||||
versionId:
|
||||
type: string
|
||||
name:
|
||||
type: string
|
||||
description:
|
||||
type: string
|
||||
description: The description of the job.
|
||||
steps:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/Step"
|
||||
timeout:
|
||||
type: string
|
||||
description: The timeout of the job.
|
||||
required:
|
||||
- metadata
|
||||
- tenantId
|
||||
- versionId
|
||||
- name
|
||||
- steps
|
||||
|
||||
Step:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
readableId:
|
||||
type: string
|
||||
description: The readable id of the step.
|
||||
tenantId:
|
||||
type: string
|
||||
jobId:
|
||||
type: string
|
||||
action:
|
||||
type: string
|
||||
timeout:
|
||||
type: string
|
||||
description: The timeout of the step.
|
||||
nextId:
|
||||
type: string
|
||||
required:
|
||||
- metadata
|
||||
- readableId
|
||||
- tenantId
|
||||
- jobId
|
||||
- action
|
||||
- nextId
|
||||
193
api-contracts/openapi/components/schemas/workflow_run.yaml
Normal file
193
api-contracts/openapi/components/schemas/workflow_run.yaml
Normal file
@@ -0,0 +1,193 @@
|
||||
WorkflowRun:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
tenantId:
|
||||
type: string
|
||||
workflowVersionId:
|
||||
type: string
|
||||
workflowVersion:
|
||||
$ref: "./_index.yaml#/WorkflowVersion"
|
||||
status:
|
||||
$ref: "#/WorkflowRunStatus"
|
||||
jobRuns:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/JobRun"
|
||||
triggeredBy:
|
||||
$ref: "#/WorkflowRunTriggeredBy"
|
||||
input:
|
||||
type: object
|
||||
additionalProperties: true
|
||||
error:
|
||||
type: string
|
||||
startedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
finishedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
required:
|
||||
- metadata
|
||||
- tenantId
|
||||
- workflowVersionId
|
||||
- status
|
||||
- triggeredBy
|
||||
|
||||
WorkflowRunList:
|
||||
type: object
|
||||
properties:
|
||||
rows:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/WorkflowRun"
|
||||
pagination:
|
||||
$ref: "./metadata.yaml#/PaginationResponse"
|
||||
|
||||
StepRunStatus:
|
||||
type: string
|
||||
enum:
|
||||
- PENDING
|
||||
- PENDING_ASSIGNMENT
|
||||
- ASSIGNED
|
||||
- RUNNING
|
||||
- SUCCEEDED
|
||||
- FAILED
|
||||
- CANCELLED
|
||||
|
||||
JobRunStatus:
|
||||
type: string
|
||||
enum:
|
||||
- PENDING
|
||||
- RUNNING
|
||||
- SUCCEEDED
|
||||
- FAILED
|
||||
- CANCELLED
|
||||
|
||||
WorkflowRunStatus:
|
||||
type: string
|
||||
enum:
|
||||
- PENDING
|
||||
- RUNNING
|
||||
- SUCCEEDED
|
||||
- FAILED
|
||||
- CANCELLED
|
||||
|
||||
WorkflowRunTriggeredBy:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
parentId:
|
||||
type: string
|
||||
eventId:
|
||||
type: string
|
||||
event:
|
||||
$ref: "./_index.yaml#/Event"
|
||||
cronParentId:
|
||||
type: string
|
||||
cronSchedule:
|
||||
type: string
|
||||
required:
|
||||
- metadata
|
||||
- parentId
|
||||
|
||||
JobRun:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
tenantId:
|
||||
type: string
|
||||
workflowRunId:
|
||||
type: string
|
||||
jobId:
|
||||
type: string
|
||||
job:
|
||||
$ref: "./_index.yaml#/Job"
|
||||
tickerId:
|
||||
type: string
|
||||
stepRuns:
|
||||
type: array
|
||||
items:
|
||||
$ref: "#/StepRun"
|
||||
status:
|
||||
$ref: "#/JobRunStatus"
|
||||
result:
|
||||
type: object
|
||||
startedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
finishedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
timeoutAt:
|
||||
type: string
|
||||
format: date-time
|
||||
cancelledAt:
|
||||
type: string
|
||||
format: date-time
|
||||
cancelledReason:
|
||||
type: string
|
||||
cancelledError:
|
||||
type: string
|
||||
required:
|
||||
- metadata
|
||||
- tenantId
|
||||
- workflowRunId
|
||||
- jobId
|
||||
- status
|
||||
|
||||
StepRun:
|
||||
type: object
|
||||
properties:
|
||||
metadata:
|
||||
$ref: "./metadata.yaml#/APIResourceMeta"
|
||||
tenantId:
|
||||
type: string
|
||||
jobRunId:
|
||||
type: string
|
||||
stepId:
|
||||
type: string
|
||||
step:
|
||||
$ref: "./_index.yaml#/Step"
|
||||
nextId:
|
||||
type: string
|
||||
prevId:
|
||||
type: string
|
||||
workerId:
|
||||
type: string
|
||||
status:
|
||||
$ref: "#/StepRunStatus"
|
||||
input:
|
||||
type: object
|
||||
requeueAfter:
|
||||
type: string
|
||||
format: date-time
|
||||
result:
|
||||
type: object
|
||||
error:
|
||||
type: string
|
||||
startedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
finishedAt:
|
||||
type: string
|
||||
format: date-time
|
||||
timeoutAt:
|
||||
type: string
|
||||
format: date-time
|
||||
cancelledAt:
|
||||
type: string
|
||||
format: date-time
|
||||
cancelledReason:
|
||||
type: string
|
||||
cancelledError:
|
||||
type: string
|
||||
required:
|
||||
- metadata
|
||||
- tenantId
|
||||
- jobRunId
|
||||
- stepId
|
||||
- status
|
||||
192
api-contracts/openapi/paths/event/event.yaml
Normal file
192
api-contracts/openapi/paths/event/event.yaml
Normal file
@@ -0,0 +1,192 @@
|
||||
withTenant:
|
||||
get:
|
||||
x-resources: ["tenant"]
|
||||
description: Lists all events for a tenant.
|
||||
operationId: event:list
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The number to skip
|
||||
in: query
|
||||
name: offset
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
- description: The number to limit by
|
||||
in: query
|
||||
name: limit
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
- description: A list of keys to filter by
|
||||
in: query
|
||||
name: keys
|
||||
required: false
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventKey"
|
||||
- description: What to order by
|
||||
in: query
|
||||
name: orderByField
|
||||
required: false
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventOrderByField"
|
||||
- description: The order direction
|
||||
in: query
|
||||
name: orderByDirection
|
||||
required: false
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventOrderByDirection"
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventList"
|
||||
description: Successfully listed the events
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: List events
|
||||
tags:
|
||||
- Event
|
||||
|
||||
eventData:
|
||||
get:
|
||||
x-resources: ["tenant", "event"]
|
||||
description: Get the data for an event.
|
||||
operationId: event-data:get
|
||||
parameters:
|
||||
- description: The event id
|
||||
in: path
|
||||
name: event
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventData"
|
||||
description: Successfully retrieved the event data
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get event data
|
||||
tags:
|
||||
- Event
|
||||
|
||||
keys:
|
||||
get:
|
||||
x-resources: ["tenant"]
|
||||
description: Lists all event keys for a tenant.
|
||||
operationId: event-key:list
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventKeyList"
|
||||
description: Successfully listed the event keys
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: List event keys
|
||||
tags:
|
||||
- Event
|
||||
|
||||
replayEvents:
|
||||
post:
|
||||
x-resources: ["tenant"]
|
||||
description: Replays a list of events.
|
||||
operationId: event:update:replay
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/ReplayEventRequest"
|
||||
description: The event ids to replay
|
||||
required: true
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/EventList"
|
||||
description: Successfully replayed the events
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Replay events
|
||||
tags:
|
||||
- Event
|
||||
34
api-contracts/openapi/paths/tenant/tenant.yaml
Normal file
34
api-contracts/openapi/paths/tenant/tenant.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
tenants:
|
||||
post:
|
||||
x-resources: []
|
||||
description: Creates a new tenant
|
||||
operationId: tenant:create
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/CreateTenantRequest"
|
||||
description: The tenant to create
|
||||
required: true
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/Tenant"
|
||||
description: Successfully created the tenant
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIError"
|
||||
description: Forbidden
|
||||
summary: Create tenant
|
||||
tags:
|
||||
- Tenant
|
||||
172
api-contracts/openapi/paths/user/user.yaml
Normal file
172
api-contracts/openapi/paths/user/user.yaml
Normal file
@@ -0,0 +1,172 @@
|
||||
login:
|
||||
post:
|
||||
description: Logs in a user.
|
||||
operationId: user:update:login
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/UserLoginRequest"
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/User"
|
||||
description: Successfully logged in
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Unauthorized
|
||||
"405":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Method not allowed
|
||||
security: []
|
||||
summary: Login user
|
||||
tags:
|
||||
- User
|
||||
current:
|
||||
get:
|
||||
description: Gets the current user
|
||||
operationId: user:get:current
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/User"
|
||||
description: Successfully retrieved the user
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Unauthorized
|
||||
"405":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Method not allowed
|
||||
security:
|
||||
- cookieAuth: []
|
||||
summary: Get current user
|
||||
tags:
|
||||
- User
|
||||
register:
|
||||
post:
|
||||
description: Registers a user.
|
||||
operationId: user:create
|
||||
requestBody:
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/UserRegisterRequest"
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/User"
|
||||
description: Successfully registered the user
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Unauthorized
|
||||
"405":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Method not allowed
|
||||
security: []
|
||||
summary: Register user
|
||||
tags:
|
||||
- User
|
||||
logout:
|
||||
post:
|
||||
description: Logs out a user.
|
||||
operationId: user:update:logout
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/User"
|
||||
description: Successfully logged out
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"401":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Unauthorized
|
||||
"405":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Method not allowed
|
||||
security:
|
||||
- cookieAuth: []
|
||||
summary: Logout user
|
||||
tags:
|
||||
- User
|
||||
memberships:
|
||||
get:
|
||||
description: Lists all tenant memberships for the current user
|
||||
operationId: tenant-memberships:list
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/UserTenantMembershipsList"
|
||||
description: Successfully listed the tenant memberships
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
security:
|
||||
- cookieAuth: []
|
||||
summary: List tenant memberships
|
||||
tags:
|
||||
- User
|
||||
75
api-contracts/openapi/paths/worker/worker.yaml
Normal file
75
api-contracts/openapi/paths/worker/worker.yaml
Normal file
@@ -0,0 +1,75 @@
|
||||
withTenant:
|
||||
get:
|
||||
x-resources: ["tenant"]
|
||||
description: Get all workers for a tenant
|
||||
operationId: worker:list
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkerList"
|
||||
description: Successfully retrieved the workflows
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get workers
|
||||
tags:
|
||||
- Worker
|
||||
|
||||
withWorker:
|
||||
get:
|
||||
x-resources: ["tenant", "worker"]
|
||||
description: Get a worker
|
||||
operationId: worker:get
|
||||
parameters:
|
||||
- description: The worker id
|
||||
in: path
|
||||
name: worker
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/Worker"
|
||||
description: Successfully retrieved the worker
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get worker
|
||||
tags:
|
||||
- Worker
|
||||
293
api-contracts/openapi/paths/workflow/workflow.yaml
Normal file
293
api-contracts/openapi/paths/workflow/workflow.yaml
Normal file
@@ -0,0 +1,293 @@
|
||||
withTenant:
|
||||
get:
|
||||
x-resources: ["tenant"]
|
||||
description: Get all workflows for a tenant
|
||||
operationId: workflow:list
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkflowList"
|
||||
description: Successfully retrieved the workflows
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get workflows
|
||||
tags:
|
||||
- Workflow
|
||||
withWorkflow:
|
||||
get:
|
||||
x-resources: ["tenant", "workflow"]
|
||||
description: Get a workflow for a tenant
|
||||
operationId: workflow:get
|
||||
parameters:
|
||||
- description: The workflow id
|
||||
in: path
|
||||
name: workflow
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/Workflow"
|
||||
description: Successfully retrieved the workflow
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get workflow
|
||||
tags:
|
||||
- Workflow
|
||||
workflowVersion:
|
||||
get:
|
||||
x-resources: ["tenant", "workflow"]
|
||||
description: Get a workflow version for a tenant
|
||||
operationId: workflow-version:get
|
||||
parameters:
|
||||
- description: The workflow id
|
||||
in: path
|
||||
name: workflow
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The workflow version. If not supplied, the latest version is fetched.
|
||||
in: query
|
||||
name: version
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkflowVersion"
|
||||
description: Successfully retrieved the workflow version
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Not found
|
||||
summary: Get workflow version
|
||||
tags:
|
||||
- Workflow
|
||||
workflowVersionDefinition:
|
||||
get:
|
||||
x-resources: ["tenant", "workflow"]
|
||||
description: Get a workflow version definition for a tenant
|
||||
operationId: workflow-version:get:definition
|
||||
parameters:
|
||||
- description: The workflow id
|
||||
in: path
|
||||
name: workflow
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The workflow version. If not supplied, the latest version is fetched.
|
||||
in: query
|
||||
name: version
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkflowVersionDefinition"
|
||||
description: Successfully retrieved the workflow version definition
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
"404":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Not found
|
||||
summary: Get workflow version definition
|
||||
tags:
|
||||
- Workflow
|
||||
workflowRuns:
|
||||
get:
|
||||
x-resources: ["tenant"]
|
||||
description: Get all workflow runs for a tenant
|
||||
operationId: workflow-run:list
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The number to skip
|
||||
in: query
|
||||
name: offset
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
- description: The number to limit by
|
||||
in: query
|
||||
name: limit
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
format: int64
|
||||
- description: The event id to get runs for.
|
||||
in: query
|
||||
name: eventId
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The workflow id to get runs for.
|
||||
in: query
|
||||
name: workflowId
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkflowRunList"
|
||||
description: Successfully retrieved the workflow runs
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get workflow runs
|
||||
tags:
|
||||
- Workflow
|
||||
workflowRun:
|
||||
get:
|
||||
x-resources: ["tenant", "workflow-run"]
|
||||
description: Get a workflow run for a tenant
|
||||
operationId: workflow-run:get
|
||||
parameters:
|
||||
- description: The tenant id
|
||||
in: path
|
||||
name: tenant
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
- description: The workflow run id
|
||||
in: path
|
||||
name: workflow-run
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
format: uuid
|
||||
minLength: 36
|
||||
maxLength: 36
|
||||
responses:
|
||||
"200":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/WorkflowRun"
|
||||
description: Successfully retrieved the workflow run
|
||||
"400":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: A malformed or bad request
|
||||
"403":
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "../../components/schemas/_index.yaml#/APIErrors"
|
||||
description: Forbidden
|
||||
summary: Get workflow run
|
||||
tags:
|
||||
- Workflow
|
||||
144
api-contracts/workflows/workflows.proto
Normal file
144
api-contracts/workflows/workflows.proto
Normal file
@@ -0,0 +1,144 @@
|
||||
syntax = "proto3";
|
||||
|
||||
option go_package = "github.com/hatchet-dev/hatchet/internal/services/admin/contracts";
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/wrappers.proto"; // For optional fields
|
||||
|
||||
// WorkflowService represents a set of RPCs for managing workflows.
|
||||
service WorkflowService {
|
||||
rpc ListWorkflows(ListWorkflowsRequest) returns (ListWorkflowsResponse);
|
||||
rpc PutWorkflow(PutWorkflowRequest) returns (WorkflowVersion);
|
||||
rpc GetWorkflowByName(GetWorkflowByNameRequest) returns (Workflow);
|
||||
rpc ListWorkflowsForEvent(ListWorkflowsForEventRequest) returns (ListWorkflowsResponse);
|
||||
rpc DeleteWorkflow(DeleteWorkflowRequest) returns (Workflow);
|
||||
}
|
||||
|
||||
message PutWorkflowRequest {
|
||||
string tenant_id = 1;
|
||||
CreateWorkflowVersionOpts opts = 2;
|
||||
}
|
||||
|
||||
// CreateWorkflowVersionOpts represents options to create a workflow version.
|
||||
message CreateWorkflowVersionOpts {
|
||||
string name = 1; // (required) the workflow name
|
||||
string description = 2; // (optional) the workflow description
|
||||
string version = 3; // (required) the workflow version
|
||||
repeated string event_triggers = 4; // (optional) event triggers for the workflow
|
||||
repeated string cron_triggers = 5; // (optional) cron triggers for the workflow
|
||||
repeated CreateWorkflowJobOpts jobs = 6; // (required) the workflow jobs
|
||||
}
|
||||
|
||||
// CreateWorkflowJobOpts represents options to create a workflow job.
|
||||
message CreateWorkflowJobOpts {
|
||||
string name = 1; // (required) the job name
|
||||
string description = 2; // (optional) the job description
|
||||
string timeout = 3; // (optional) the job timeout
|
||||
repeated CreateWorkflowStepOpts steps = 4; // (required) the job steps
|
||||
}
|
||||
|
||||
// CreateWorkflowStepOpts represents options to create a workflow step.
|
||||
message CreateWorkflowStepOpts {
|
||||
string readable_id = 1; // (required) the step name
|
||||
string action = 2; // (required) the step action id
|
||||
string timeout = 3; // (optional) the step timeout
|
||||
string inputs = 4; // (optional) the step inputs, assuming string representation of JSON
|
||||
}
|
||||
|
||||
// ListWorkflowsRequest is the request for ListWorkflows.
|
||||
message ListWorkflowsRequest {
|
||||
string tenant_id = 1;
|
||||
}
|
||||
|
||||
// ListWorkflowsResponse is the response for ListWorkflows.
|
||||
message ListWorkflowsResponse {
|
||||
repeated Workflow workflows = 1;
|
||||
}
|
||||
|
||||
// ListWorkflowsForEventRequest is the request for ListWorkflowsForEvent.
|
||||
message ListWorkflowsForEventRequest {
|
||||
string tenant_id = 1;
|
||||
string event_key = 2;
|
||||
}
|
||||
|
||||
// Workflow represents the Workflow model.
|
||||
message Workflow {
|
||||
string id = 1;
|
||||
google.protobuf.Timestamp created_at = 2;
|
||||
google.protobuf.Timestamp updated_at = 3;
|
||||
string tenant_id = 5;
|
||||
string name = 6;
|
||||
google.protobuf.StringValue description = 7; // Optional
|
||||
repeated WorkflowVersion versions = 8;
|
||||
}
|
||||
|
||||
// WorkflowVersion represents the WorkflowVersion model.
|
||||
message WorkflowVersion {
|
||||
string id = 1;
|
||||
google.protobuf.Timestamp created_at = 2;
|
||||
google.protobuf.Timestamp updated_at = 3;
|
||||
string version = 5;
|
||||
int32 order = 6;
|
||||
string workflow_id = 7;
|
||||
WorkflowTriggers triggers = 8;
|
||||
repeated Job jobs = 9;
|
||||
}
|
||||
|
||||
// WorkflowTriggers represents the WorkflowTriggers model.
|
||||
message WorkflowTriggers {
|
||||
string id = 1;
|
||||
google.protobuf.Timestamp created_at = 2;
|
||||
google.protobuf.Timestamp updated_at = 3;
|
||||
string workflow_version_id = 5;
|
||||
string tenant_id = 6;
|
||||
repeated WorkflowTriggerEventRef events = 7;
|
||||
repeated WorkflowTriggerCronRef crons = 8;
|
||||
}
|
||||
|
||||
// WorkflowTriggerEventRef represents the WorkflowTriggerEventRef model.
|
||||
message WorkflowTriggerEventRef {
|
||||
string parent_id = 1;
|
||||
string event_key = 2;
|
||||
}
|
||||
|
||||
// WorkflowTriggerCronRef represents the WorkflowTriggerCronRef model.
|
||||
message WorkflowTriggerCronRef {
|
||||
string parent_id = 1;
|
||||
string cron = 2;
|
||||
}
|
||||
|
||||
// Job represents the Job model.
|
||||
message Job {
|
||||
string id = 1;
|
||||
google.protobuf.Timestamp created_at = 2;
|
||||
google.protobuf.Timestamp updated_at = 3;
|
||||
string tenant_id = 5;
|
||||
string workflow_version_id = 6;
|
||||
string name = 7;
|
||||
google.protobuf.StringValue description = 8; // Optional
|
||||
repeated Step steps = 9;
|
||||
google.protobuf.StringValue timeout = 10; // Optional
|
||||
}
|
||||
|
||||
// Step represents the Step model.
|
||||
message Step {
|
||||
string id = 1;
|
||||
google.protobuf.Timestamp created_at = 2;
|
||||
google.protobuf.Timestamp updated_at = 3;
|
||||
google.protobuf.StringValue readable_id = 5; // Optional
|
||||
string tenant_id = 6;
|
||||
string job_id = 7;
|
||||
string action = 8;
|
||||
google.protobuf.StringValue timeout = 9; // Optional
|
||||
string next_id = 10; // Optional
|
||||
}
|
||||
|
||||
message DeleteWorkflowRequest {
|
||||
string tenant_id = 1;
|
||||
string workflow_id = 2;
|
||||
}
|
||||
|
||||
message GetWorkflowByNameRequest {
|
||||
string tenant_id = 1;
|
||||
string name = 2;
|
||||
}
|
||||
138
api/v1/server/authn/middleware.go
Normal file
138
api/v1/server/authn/middleware.go
Normal file
@@ -0,0 +1,138 @@
|
||||
package authn
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/middleware"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type AuthN struct {
|
||||
config *server.ServerConfig
|
||||
|
||||
helpers *SessionHelpers
|
||||
|
||||
l *zerolog.Logger
|
||||
}
|
||||
|
||||
func NewAuthN(config *server.ServerConfig) *AuthN {
|
||||
return &AuthN{
|
||||
config: config,
|
||||
helpers: NewSessionHelpers(config),
|
||||
l: config.Logger,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) Middleware(r *middleware.RouteInfo) echo.HandlerFunc {
|
||||
return func(c echo.Context) error {
|
||||
err := a.authenticate(c, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthN) authenticate(c echo.Context, r *middleware.RouteInfo) error {
|
||||
// if security is optional, continue
|
||||
if r.Security.IsOptional() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if r.Security.NoAuth() {
|
||||
return a.handleNoAuth(c)
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
if r.Security.CookieAuth() {
|
||||
err = a.handleCookieAuth(c)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err != nil && r.Security.BearerAuth() {
|
||||
err = a.handleBearerAuth(c)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (a *AuthN) handleNoAuth(c echo.Context) error {
|
||||
forbidden := echo.NewHTTPError(http.StatusForbidden, "Please provide valid credentials")
|
||||
|
||||
store := a.config.SessionStore
|
||||
|
||||
session, err := store.Get(c.Request(), store.GetName())
|
||||
|
||||
if err != nil {
|
||||
a.l.Debug().Err(err).Msg("error getting session")
|
||||
|
||||
return forbidden
|
||||
}
|
||||
|
||||
if auth, ok := session.Values["authenticated"].(bool); ok && auth {
|
||||
a.l.Debug().Msgf("user was authenticated when no security schemes permit auth")
|
||||
|
||||
return forbidden
|
||||
}
|
||||
|
||||
// set unauthenticated session in context
|
||||
c.Set("session", session)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AuthN) handleCookieAuth(c echo.Context) error {
|
||||
forbidden := echo.NewHTTPError(http.StatusForbidden, "Please provide valid credentials")
|
||||
|
||||
store := a.config.SessionStore
|
||||
|
||||
session, err := store.Get(c.Request(), store.GetName())
|
||||
if err != nil {
|
||||
err = a.helpers.SaveUnauthenticated(c)
|
||||
|
||||
if err != nil {
|
||||
a.l.Error().Err(err).Msg("error saving unauthenticated session")
|
||||
}
|
||||
|
||||
return forbidden
|
||||
}
|
||||
|
||||
if auth, ok := session.Values["authenticated"].(bool); !ok || !auth {
|
||||
// if the session is new, make sure we write a Set-Cookie header to the response
|
||||
if session.IsNew {
|
||||
saveNewSession(c, session)
|
||||
|
||||
c.Set("session", session)
|
||||
}
|
||||
|
||||
return forbidden
|
||||
}
|
||||
|
||||
// read the user id in the token
|
||||
userID, ok := session.Values["user_id"].(string)
|
||||
|
||||
if !ok {
|
||||
a.l.Debug().Msgf("could not cast user_id to string")
|
||||
|
||||
return forbidden
|
||||
}
|
||||
|
||||
user, err := a.config.Repository.User().GetUserByID(userID)
|
||||
|
||||
// set the user and session in context
|
||||
c.Set("user", user)
|
||||
c.Set("session", session)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AuthN) handleBearerAuth(c echo.Context) error {
|
||||
panic("implement me")
|
||||
}
|
||||
56
api/v1/server/authn/session_helpers.go
Normal file
56
api/v1/server/authn/session_helpers.go
Normal file
@@ -0,0 +1,56 @@
|
||||
package authn
|
||||
|
||||
import (
|
||||
"github.com/gorilla/sessions"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
type SessionHelpers struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewSessionHelpers(config *server.ServerConfig) *SessionHelpers {
|
||||
return &SessionHelpers{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SessionHelpers) SaveAuthenticated(c echo.Context, user *db.UserModel) error {
|
||||
session, err := s.config.SessionStore.Get(c.Request(), s.config.SessionStore.GetName())
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
session.Values["authenticated"] = true
|
||||
session.Values["user_id"] = user.ID
|
||||
|
||||
return session.Save(c.Request(), c.Response())
|
||||
}
|
||||
|
||||
func (s *SessionHelpers) SaveUnauthenticated(c echo.Context) error {
|
||||
session, err := s.config.SessionStore.Get(c.Request(), s.config.SessionStore.GetName())
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// unset all values
|
||||
session.Values = make(map[interface{}]interface{})
|
||||
session.Values["authenticated"] = false
|
||||
|
||||
// we set the maxage of the session so that the session gets deleted. This avoids cases
|
||||
// where the same cookie can get re-authed to a different user, which would be problematic
|
||||
// if the session values weren't properly cleared on logout.
|
||||
session.Options.MaxAge = -1
|
||||
|
||||
return session.Save(c.Request(), c.Response())
|
||||
}
|
||||
|
||||
func saveNewSession(c echo.Context, session *sessions.Session) error {
|
||||
session.Values["authenticated"] = false
|
||||
|
||||
return session.Save(c.Request(), c.Response())
|
||||
}
|
||||
73
api/v1/server/authz/middleware.go
Normal file
73
api/v1/server/authz/middleware.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package authz
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/middleware"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type AuthZ struct {
|
||||
config *server.ServerConfig
|
||||
|
||||
l *zerolog.Logger
|
||||
}
|
||||
|
||||
func NewAuthZ(config *server.ServerConfig) *AuthZ {
|
||||
return &AuthZ{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthZ) Middleware(r *middleware.RouteInfo) echo.HandlerFunc {
|
||||
return func(c echo.Context) error {
|
||||
err := a.authorize(c, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AuthZ) authorize(c echo.Context, r *middleware.RouteInfo) error {
|
||||
if r.Security.IsOptional() || r.Security.NoAuth() {
|
||||
return nil
|
||||
}
|
||||
|
||||
unauthorized := echo.NewHTTPError(http.StatusUnauthorized, "Not authorized to view this resource")
|
||||
|
||||
// if tenant is set in the context, verify that the user is a member of the tenant
|
||||
if tenant, ok := c.Get("tenant").(*db.TenantModel); ok {
|
||||
user, ok := c.Get("user").(*db.UserModel)
|
||||
|
||||
if !ok {
|
||||
a.l.Debug().Msgf("user not found in context")
|
||||
|
||||
return unauthorized
|
||||
}
|
||||
|
||||
// check if the user is a member of the tenant
|
||||
tenantMember, err := a.config.Repository.Tenant().GetTenantMemberByUserID(tenant.ID, user.ID)
|
||||
|
||||
if err != nil {
|
||||
a.l.Debug().Err(err).Msgf("error getting tenant member")
|
||||
|
||||
return unauthorized
|
||||
}
|
||||
|
||||
if tenantMember == nil {
|
||||
a.l.Debug().Msgf("user is not a member of the tenant")
|
||||
|
||||
return unauthorized
|
||||
}
|
||||
|
||||
// set the tenant member in the context
|
||||
c.Set("tenant-member", tenantMember)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
25
api/v1/server/handlers/events/get.go
Normal file
25
api/v1/server/handlers/events/get.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package events
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *EventService) EventDataGet(ctx echo.Context, request gen.EventDataGetRequestObject) (gen.EventDataGetResponseObject, error) {
|
||||
event := ctx.Get("event").(*db.EventModel)
|
||||
|
||||
var dataStr string
|
||||
|
||||
if dataType, ok := event.Data(); ok {
|
||||
dataStr = string(json.RawMessage(dataType))
|
||||
}
|
||||
|
||||
return gen.EventDataGet200JSONResponse(
|
||||
gen.EventData{
|
||||
Data: dataStr,
|
||||
},
|
||||
), nil
|
||||
}
|
||||
78
api/v1/server/handlers/events/list.go
Normal file
78
api/v1/server/handlers/events/list.go
Normal file
@@ -0,0 +1,78 @@
|
||||
package events
|
||||
|
||||
import (
|
||||
"math"
|
||||
"strings"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *EventService) EventList(ctx echo.Context, request gen.EventListRequestObject) (gen.EventListResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
limit := 50
|
||||
offset := 0
|
||||
|
||||
listOpts := &repository.ListEventOpts{
|
||||
Limit: &limit,
|
||||
Offset: &offset,
|
||||
}
|
||||
|
||||
if request.Params.Keys != nil {
|
||||
listOpts.Keys = *request.Params.Keys
|
||||
}
|
||||
|
||||
if request.Params.OrderByField != nil {
|
||||
listOpts.OrderBy = repository.StringPtr(string(*request.Params.OrderByField))
|
||||
}
|
||||
|
||||
if request.Params.OrderByDirection != nil {
|
||||
listOpts.OrderDirection = repository.StringPtr(strings.ToUpper(string(*request.Params.OrderByDirection)))
|
||||
}
|
||||
|
||||
if request.Params.Limit != nil {
|
||||
limit := int(*request.Params.Limit)
|
||||
listOpts.Limit = &limit
|
||||
}
|
||||
|
||||
if request.Params.Offset != nil {
|
||||
offset := int(*request.Params.Offset)
|
||||
listOpts.Offset = &offset
|
||||
}
|
||||
|
||||
listRes, err := t.config.Repository.Event().ListEvents(tenant.ID, listOpts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.Event, len(listRes.Rows))
|
||||
|
||||
for i, event := range listRes.Rows {
|
||||
rows[i] = *transformers.ToEventFromSQLC(event)
|
||||
}
|
||||
|
||||
// use the total rows and limit to calculate the total pages
|
||||
totalPages := int64(math.Ceil(float64(listRes.Count) / float64(limit)))
|
||||
currPage := 1 + int64(math.Ceil(float64(offset)/float64(limit)))
|
||||
nextPage := currPage + 1
|
||||
|
||||
if currPage == totalPages {
|
||||
nextPage = currPage
|
||||
}
|
||||
|
||||
return gen.EventList200JSONResponse(
|
||||
gen.EventList{
|
||||
Rows: &rows,
|
||||
Pagination: &gen.PaginationResponse{
|
||||
NumPages: &totalPages,
|
||||
NextPage: &nextPage,
|
||||
CurrentPage: &currPage,
|
||||
},
|
||||
},
|
||||
), nil
|
||||
}
|
||||
29
api/v1/server/handlers/events/list_keys.go
Normal file
29
api/v1/server/handlers/events/list_keys.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package events
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *EventService) EventKeyList(ctx echo.Context, request gen.EventKeyListRequestObject) (gen.EventKeyListResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
eventKeys, err := t.config.Repository.Event().ListEventKeys(tenant.ID)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.EventKey, len(eventKeys))
|
||||
|
||||
for i, eventKey := range eventKeys {
|
||||
rows[i] = gen.EventKey(eventKey)
|
||||
}
|
||||
|
||||
return gen.EventKeyList200JSONResponse(
|
||||
gen.EventKeyList{
|
||||
Rows: &rows,
|
||||
},
|
||||
), nil
|
||||
}
|
||||
57
api/v1/server/handlers/events/replay.go
Normal file
57
api/v1/server/handlers/events/replay.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package events
|
||||
|
||||
import (
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *EventService) EventUpdateReplay(ctx echo.Context, request gen.EventUpdateReplayRequestObject) (gen.EventUpdateReplayResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
eventIds := make([]string, len(request.Body.EventIds))
|
||||
|
||||
for i := range request.Body.EventIds {
|
||||
eventIds[i] = request.Body.EventIds[i].String()
|
||||
}
|
||||
|
||||
events, err := t.config.Repository.Event().ListEventsById(tenant.ID, eventIds)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
newEvents := make([]db.EventModel, len(events))
|
||||
|
||||
var allErrs error
|
||||
|
||||
for i := range events {
|
||||
event := events[i]
|
||||
|
||||
newEvent, err := t.config.Ingestor.IngestReplayedEvent(tenant.ID, &event)
|
||||
|
||||
if err != nil {
|
||||
allErrs = multierror.Append(allErrs, err)
|
||||
}
|
||||
|
||||
newEvents[i] = *newEvent
|
||||
}
|
||||
|
||||
if allErrs != nil {
|
||||
return nil, allErrs
|
||||
}
|
||||
|
||||
rows := make([]gen.Event, len(newEvents))
|
||||
|
||||
for i := range newEvents {
|
||||
rows[i] = *transformers.ToEvent(&newEvents[i])
|
||||
}
|
||||
|
||||
return gen.EventUpdateReplay200JSONResponse(
|
||||
gen.EventList{
|
||||
Rows: &rows,
|
||||
},
|
||||
), nil
|
||||
}
|
||||
15
api/v1/server/handlers/events/service.go
Normal file
15
api/v1/server/handlers/events/service.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package events
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
)
|
||||
|
||||
type EventService struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewEventService(config *server.ServerConfig) *EventService {
|
||||
return &EventService{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
63
api/v1/server/handlers/tenants/create.go
Normal file
63
api/v1/server/handlers/tenants/create.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package tenants
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *TenantService) TenantCreate(ctx echo.Context, request gen.TenantCreateRequestObject) (gen.TenantCreateResponseObject, error) {
|
||||
user := ctx.Get("user").(*db.UserModel)
|
||||
|
||||
// validate the request
|
||||
if apiErrors, err := t.config.Validator.ValidateAPI(request.Body); err != nil {
|
||||
return nil, err
|
||||
} else if apiErrors != nil {
|
||||
return gen.TenantCreate400JSONResponse(*apiErrors), nil
|
||||
}
|
||||
|
||||
// determine if a tenant with the slug already exists
|
||||
existingTenant, err := t.config.Repository.Tenant().GetTenantBySlug(string(request.Body.Slug))
|
||||
|
||||
if err != nil && !errors.Is(err, db.ErrNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingTenant != nil {
|
||||
// just return bad request
|
||||
return gen.TenantCreate400JSONResponse(
|
||||
apierrors.NewAPIErrors("Tenant with the slug already exists."),
|
||||
), nil
|
||||
}
|
||||
|
||||
createOpts := &repository.CreateTenantOpts{
|
||||
Slug: string(request.Body.Slug),
|
||||
Name: string(request.Body.Name),
|
||||
}
|
||||
|
||||
// write the user to the db
|
||||
tenant, err := t.config.Repository.Tenant().CreateTenant(createOpts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// add the user as an owner of the tenant
|
||||
_, err = t.config.Repository.Tenant().CreateTenantMember(tenant.ID, &repository.CreateTenantMemberOpts{
|
||||
UserId: user.ID,
|
||||
Role: "OWNER",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.TenantCreate200JSONResponse(
|
||||
*transformers.ToTenant(tenant),
|
||||
), nil
|
||||
}
|
||||
15
api/v1/server/handlers/tenants/service.go
Normal file
15
api/v1/server/handlers/tenants/service.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package tenants
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
)
|
||||
|
||||
type TenantService struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewTenantService(config *server.ServerConfig) *TenantService {
|
||||
return &TenantService{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
66
api/v1/server/handlers/users/create.go
Normal file
66
api/v1/server/handlers/users/create.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/authn"
|
||||
)
|
||||
|
||||
func (u *UserService) UserCreate(ctx echo.Context, request gen.UserCreateRequestObject) (gen.UserCreateResponseObject, error) {
|
||||
// validate the request
|
||||
if apiErrors, err := u.config.Validator.ValidateAPI(request.Body); err != nil {
|
||||
return nil, err
|
||||
} else if apiErrors != nil {
|
||||
return gen.UserCreate400JSONResponse(*apiErrors), nil
|
||||
}
|
||||
|
||||
// determine if the user exists before attempting to write the user
|
||||
existingUser, err := u.config.Repository.User().GetUserByEmail(string(request.Body.Email))
|
||||
|
||||
if err != nil && !errors.Is(err, db.ErrNotFound) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if existingUser != nil {
|
||||
// just return bad request
|
||||
return gen.UserCreate400JSONResponse(
|
||||
apierrors.NewAPIErrors("Email is already registered."),
|
||||
), nil
|
||||
}
|
||||
|
||||
hashedPw, err := repository.HashPassword(request.Body.Password)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
createOpts := &repository.CreateUserOpts{
|
||||
Email: string(request.Body.Email),
|
||||
EmailVerified: repository.BoolPtr(false),
|
||||
Name: repository.StringPtr(request.Body.Name),
|
||||
Password: *hashedPw,
|
||||
}
|
||||
|
||||
// write the user to the db
|
||||
user, err := u.config.Repository.User().CreateUser(createOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = authn.NewSessionHelpers(u.config).SaveAuthenticated(ctx, user)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.UserCreate200JSONResponse(
|
||||
*transformers.ToUser(user),
|
||||
), nil
|
||||
}
|
||||
16
api/v1/server/handlers/users/get_current.go
Normal file
16
api/v1/server/handlers/users/get_current.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (u *UserService) UserGetCurrent(ctx echo.Context, request gen.UserGetCurrentRequestObject) (gen.UserGetCurrentResponseObject, error) {
|
||||
user := ctx.Get("user").(*db.UserModel)
|
||||
|
||||
return gen.UserGetCurrent200JSONResponse(
|
||||
*transformers.ToUser(user),
|
||||
), nil
|
||||
}
|
||||
31
api/v1/server/handlers/users/list_memberships.go
Normal file
31
api/v1/server/handlers/users/list_memberships.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *UserService) TenantMembershipsList(ctx echo.Context, request gen.TenantMembershipsListRequestObject) (gen.TenantMembershipsListResponseObject, error) {
|
||||
user := ctx.Get("user").(*db.UserModel)
|
||||
|
||||
memberships, err := t.config.Repository.User().ListTenantMemberships(user.ID)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.TenantMember, len(memberships))
|
||||
|
||||
for i, membership := range memberships {
|
||||
membershipCp := membership
|
||||
rows[i] = *transformers.ToTenantMember(&membershipCp)
|
||||
}
|
||||
|
||||
return gen.TenantMembershipsList200JSONResponse(
|
||||
gen.UserTenantMembershipsList{
|
||||
Rows: &rows,
|
||||
},
|
||||
), nil
|
||||
}
|
||||
15
api/v1/server/handlers/users/service.go
Normal file
15
api/v1/server/handlers/users/service.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
)
|
||||
|
||||
type UserService struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewUserService(config *server.ServerConfig) *UserService {
|
||||
return &UserService{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
67
api/v1/server/handlers/users/update_login.go
Normal file
67
api/v1/server/handlers/users/update_login.go
Normal file
@@ -0,0 +1,67 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/authn"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
func (u *UserService) UserUpdateLogin(ctx echo.Context, request gen.UserUpdateLoginRequestObject) (gen.UserUpdateLoginResponseObject, error) {
|
||||
// check that the server supports local registration
|
||||
if !u.config.Auth.BasicAuthEnabled {
|
||||
return gen.UserUpdateLogin405JSONResponse(
|
||||
apierrors.NewAPIErrors("local registration is not enabled"),
|
||||
), nil
|
||||
}
|
||||
|
||||
// validate the request
|
||||
if apiErrors, err := u.config.Validator.ValidateAPI(request.Body); err != nil {
|
||||
return nil, err
|
||||
} else if apiErrors != nil {
|
||||
return gen.UserUpdateLogin400JSONResponse(*apiErrors), nil
|
||||
}
|
||||
|
||||
// determine if the user exists before attempting to write the user
|
||||
existingUser, err := u.config.Repository.User().GetUserByEmail(string(request.Body.Email))
|
||||
if err != nil {
|
||||
if errors.Is(err, db.ErrNotFound) {
|
||||
return gen.UserUpdateLogin400JSONResponse(apierrors.NewAPIErrors("user not found")), nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
userPass, err := u.config.Repository.User().GetUserPassword(existingUser.ID)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not get user password: %w", err)
|
||||
}
|
||||
|
||||
if verified, err := repository.VerifyPassword(userPass.Hash, request.Body.Password); !verified || err != nil {
|
||||
return gen.UserUpdateLogin400JSONResponse(apierrors.NewAPIErrors("invalid password")), nil
|
||||
}
|
||||
|
||||
err = authn.NewSessionHelpers(u.config).SaveAuthenticated(ctx, existingUser)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.UserUpdateLogin200JSONResponse(
|
||||
*transformers.ToUser(existingUser),
|
||||
), nil
|
||||
}
|
||||
|
||||
func verifyPassword(u *db.UserModel, pw string) (bool, error) {
|
||||
err := bcrypt.CompareHashAndPassword([]byte(u.RelationsUser.Password.Hash), []byte(pw))
|
||||
|
||||
return err == nil, err
|
||||
}
|
||||
21
api/v1/server/handlers/users/update_logout.go
Normal file
21
api/v1/server/handlers/users/update_logout.go
Normal file
@@ -0,0 +1,21 @@
|
||||
package users
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/authn"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (u *UserService) UserUpdateLogout(ctx echo.Context, request gen.UserUpdateLogoutRequestObject) (gen.UserUpdateLogoutResponseObject, error) {
|
||||
user := ctx.Get("user").(*db.UserModel)
|
||||
|
||||
if err := authn.NewSessionHelpers(u.config).SaveUnauthenticated(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.UserUpdateLogout200JSONResponse(
|
||||
*transformers.ToUser(user),
|
||||
), nil
|
||||
}
|
||||
30
api/v1/server/handlers/workers/get.go
Normal file
30
api/v1/server/handlers/workers/get.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package workers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkerService) WorkerGet(ctx echo.Context, request gen.WorkerGetRequestObject) (gen.WorkerGetResponseObject, error) {
|
||||
worker := ctx.Get("worker").(*db.WorkerModel)
|
||||
|
||||
stepRuns, err := t.config.Repository.Worker().ListRecentWorkerStepRuns(worker.TenantID, worker.ID)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
respStepRuns := make([]gen.StepRun, len(stepRuns))
|
||||
|
||||
for i := range stepRuns {
|
||||
respStepRuns[i] = *transformers.ToStepRun(&stepRuns[i])
|
||||
}
|
||||
|
||||
workerResp := *transformers.ToWorker(worker)
|
||||
|
||||
workerResp.RecentStepRuns = &respStepRuns
|
||||
|
||||
return gen.WorkerGet200JSONResponse(workerResp), nil
|
||||
}
|
||||
32
api/v1/server/handlers/workers/list.go
Normal file
32
api/v1/server/handlers/workers/list.go
Normal file
@@ -0,0 +1,32 @@
|
||||
package workers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkerService) WorkerList(ctx echo.Context, request gen.WorkerListRequestObject) (gen.WorkerListResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
workers, err := t.config.Repository.Worker().ListWorkers(tenant.ID, &repository.ListWorkersOpts{})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.Worker, len(workers))
|
||||
|
||||
for i, worker := range workers {
|
||||
workerCp := worker
|
||||
rows[i] = *transformers.ToWorker(workerCp.Worker)
|
||||
}
|
||||
|
||||
return gen.WorkerList200JSONResponse(
|
||||
gen.WorkerList{
|
||||
Rows: &rows,
|
||||
},
|
||||
), nil
|
||||
}
|
||||
15
api/v1/server/handlers/workers/service.go
Normal file
15
api/v1/server/handlers/workers/service.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package workers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
)
|
||||
|
||||
type WorkerService struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewWorkerService(config *server.ServerConfig) *WorkerService {
|
||||
return &WorkerService{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
20
api/v1/server/handlers/workflows/get.go
Normal file
20
api/v1/server/handlers/workflows/get.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowGet(ctx echo.Context, request gen.WorkflowGetRequestObject) (gen.WorkflowGetResponseObject, error) {
|
||||
workflow := ctx.Get("workflow").(*db.WorkflowModel)
|
||||
|
||||
resp, err := transformers.ToWorkflow(workflow, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.WorkflowGet200JSONResponse(*resp), nil
|
||||
}
|
||||
54
api/v1/server/handlers/workflows/get_definition.go
Normal file
54
api/v1/server/handlers/workflows/get_definition.go
Normal file
@@ -0,0 +1,54 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowVersionGetDefinition(ctx echo.Context, request gen.WorkflowVersionGetDefinitionRequestObject) (gen.WorkflowVersionGetDefinitionResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
workflow := ctx.Get("workflow").(*db.WorkflowModel)
|
||||
|
||||
var workflowVersionId string
|
||||
|
||||
if request.Params.Version != nil {
|
||||
workflowVersionId = request.Params.Version.String()
|
||||
} else {
|
||||
versions := workflow.Versions()
|
||||
|
||||
if len(versions) == 0 {
|
||||
return gen.WorkflowVersionGetDefinition400JSONResponse(
|
||||
apierrors.NewAPIErrors("workflow has no versions"),
|
||||
), nil
|
||||
}
|
||||
|
||||
workflowVersionId = versions[0].ID
|
||||
}
|
||||
|
||||
workflowVersion, err := t.config.Repository.Workflow().GetWorkflowVersionById(tenant.ID, workflowVersionId)
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, db.ErrNotFound) {
|
||||
return gen.WorkflowVersionGetDefinition404JSONResponse(
|
||||
apierrors.NewAPIErrors("version not found"),
|
||||
), nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rawDefinition, err := transformers.ToWorkflowYAMLBytes(workflow, workflowVersion)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.WorkflowVersionGetDefinition200JSONResponse(gen.WorkflowVersionDefinition{
|
||||
RawDefinition: string(rawDefinition),
|
||||
}), nil
|
||||
}
|
||||
22
api/v1/server/handlers/workflows/get_run.go
Normal file
22
api/v1/server/handlers/workflows/get_run.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowRunGet(ctx echo.Context, request gen.WorkflowRunGetRequestObject) (gen.WorkflowRunGetResponseObject, error) {
|
||||
run := ctx.Get("workflow-run").(*db.WorkflowRunModel)
|
||||
|
||||
resp, err := transformers.ToWorkflowRun(run)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.WorkflowRunGet200JSONResponse(
|
||||
*resp,
|
||||
), nil
|
||||
}
|
||||
52
api/v1/server/handlers/workflows/get_version.go
Normal file
52
api/v1/server/handlers/workflows/get_version.go
Normal file
@@ -0,0 +1,52 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowVersionGet(ctx echo.Context, request gen.WorkflowVersionGetRequestObject) (gen.WorkflowVersionGetResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
workflow := ctx.Get("workflow").(*db.WorkflowModel)
|
||||
|
||||
var workflowVersionId string
|
||||
|
||||
if request.Params.Version != nil {
|
||||
workflowVersionId = request.Params.Version.String()
|
||||
} else {
|
||||
versions := workflow.Versions()
|
||||
|
||||
if len(versions) == 0 {
|
||||
return gen.WorkflowVersionGet400JSONResponse(
|
||||
apierrors.NewAPIErrors("workflow has no versions"),
|
||||
), nil
|
||||
}
|
||||
|
||||
workflowVersionId = versions[0].ID
|
||||
}
|
||||
|
||||
workflowVersion, err := t.config.Repository.Workflow().GetWorkflowVersionById(tenant.ID, workflowVersionId)
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, db.ErrNotFound) {
|
||||
return gen.WorkflowVersionGet404JSONResponse(
|
||||
apierrors.NewAPIErrors("version not found"),
|
||||
), nil
|
||||
}
|
||||
|
||||
return nil, err
|
||||
}
|
||||
|
||||
resp, err := transformers.ToWorkflowVersion(workflow, workflowVersion)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gen.WorkflowVersionGet200JSONResponse(*resp), nil
|
||||
}
|
||||
61
api/v1/server/handlers/workflows/list.go
Normal file
61
api/v1/server/handlers/workflows/list.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowList(ctx echo.Context, request gen.WorkflowListRequestObject) (gen.WorkflowListResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
limit := 50
|
||||
offset := 0
|
||||
|
||||
listOpts := &repository.ListWorkflowsOpts{
|
||||
Limit: &limit,
|
||||
Offset: &offset,
|
||||
}
|
||||
|
||||
listResp, err := t.config.Repository.Workflow().ListWorkflows(tenant.ID, listOpts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.Workflow, len(listResp.Rows))
|
||||
|
||||
for i := range listResp.Rows {
|
||||
workflow, err := transformers.ToWorkflow(listResp.Rows[i].WorkflowModel, listResp.Rows[i].LatestRun)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows[i] = *workflow
|
||||
}
|
||||
|
||||
// use the total rows and limit to calculate the total pages
|
||||
totalPages := int64(math.Ceil(float64(listResp.Count) / float64(limit)))
|
||||
currPage := 1 + int64(math.Ceil(float64(offset)/float64(limit)))
|
||||
nextPage := currPage + 1
|
||||
|
||||
if currPage == totalPages {
|
||||
nextPage = currPage
|
||||
}
|
||||
|
||||
return gen.WorkflowList200JSONResponse(
|
||||
gen.WorkflowList{
|
||||
Rows: &rows,
|
||||
Pagination: &gen.PaginationResponse{
|
||||
NumPages: &totalPages,
|
||||
CurrentPage: &currPage,
|
||||
NextPage: &nextPage,
|
||||
},
|
||||
},
|
||||
), nil
|
||||
}
|
||||
76
api/v1/server/handlers/workflows/list_runs.go
Normal file
76
api/v1/server/handlers/workflows/list_runs.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"math"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
func (t *WorkflowService) WorkflowRunList(ctx echo.Context, request gen.WorkflowRunListRequestObject) (gen.WorkflowRunListResponseObject, error) {
|
||||
tenant := ctx.Get("tenant").(*db.TenantModel)
|
||||
|
||||
limit := 50
|
||||
offset := 0
|
||||
|
||||
listOpts := &repository.ListWorkflowRunsOpts{
|
||||
Limit: &limit,
|
||||
Offset: &offset,
|
||||
}
|
||||
|
||||
if request.Params.Limit != nil {
|
||||
limit := int(*request.Params.Limit)
|
||||
listOpts.Limit = &limit
|
||||
}
|
||||
|
||||
if request.Params.Offset != nil {
|
||||
offset := int(*request.Params.Offset)
|
||||
listOpts.Offset = &offset
|
||||
}
|
||||
|
||||
if request.Params.WorkflowId != nil {
|
||||
workflowIdStr := request.Params.WorkflowId.String()
|
||||
listOpts.WorkflowId = &workflowIdStr
|
||||
}
|
||||
|
||||
if request.Params.EventId != nil {
|
||||
eventIdStr := request.Params.EventId.String()
|
||||
listOpts.EventId = &eventIdStr
|
||||
}
|
||||
|
||||
workflowRuns, err := t.config.Repository.WorkflowRun().ListWorkflowRuns(tenant.ID, listOpts)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
rows := make([]gen.WorkflowRun, len(workflowRuns.Rows))
|
||||
|
||||
for i, workflow := range workflowRuns.Rows {
|
||||
workflowCp := workflow
|
||||
rows[i] = *transformers.ToWorkflowRunFromSQLC(workflowCp)
|
||||
}
|
||||
|
||||
// use the total rows and limit to calculate the total pages
|
||||
totalPages := int64(math.Ceil(float64(workflowRuns.Count) / float64(limit)))
|
||||
currPage := 1 + int64(math.Ceil(float64(offset)/float64(limit)))
|
||||
nextPage := currPage + 1
|
||||
|
||||
if currPage == totalPages {
|
||||
nextPage = currPage
|
||||
}
|
||||
|
||||
return gen.WorkflowRunList200JSONResponse(
|
||||
gen.WorkflowRunList{
|
||||
Rows: &rows,
|
||||
Pagination: &gen.PaginationResponse{
|
||||
NumPages: &totalPages,
|
||||
CurrentPage: &currPage,
|
||||
NextPage: &nextPage,
|
||||
},
|
||||
},
|
||||
), nil
|
||||
}
|
||||
15
api/v1/server/handlers/workflows/service.go
Normal file
15
api/v1/server/handlers/workflows/service.go
Normal file
@@ -0,0 +1,15 @@
|
||||
package workflows
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
)
|
||||
|
||||
type WorkflowService struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewWorkflowService(config *server.ServerConfig) *WorkflowService {
|
||||
return &WorkflowService{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
191
api/v1/server/middleware/middleware.go
Normal file
191
api/v1/server/middleware/middleware.go
Normal file
@@ -0,0 +1,191 @@
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/getkin/kin-openapi/openapi3"
|
||||
"github.com/getkin/kin-openapi/routers"
|
||||
"github.com/getkin/kin-openapi/routers/gorillamux"
|
||||
lru "github.com/hashicorp/golang-lru/v2"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
type SecurityRequirement interface {
|
||||
IsOptional() bool
|
||||
NoAuth() bool
|
||||
CookieAuth() bool
|
||||
BearerAuth() bool
|
||||
}
|
||||
|
||||
type RouteInfo struct {
|
||||
OperationID string
|
||||
Security SecurityRequirement
|
||||
Resources []string
|
||||
}
|
||||
|
||||
type securityRequirement struct {
|
||||
requirements []openapi3.SecurityRequirement
|
||||
|
||||
xSecurityOptional bool
|
||||
}
|
||||
|
||||
func (s *securityRequirement) IsOptional() bool {
|
||||
return s.xSecurityOptional
|
||||
}
|
||||
|
||||
func (s *securityRequirement) NoAuth() bool {
|
||||
return s.requirements == nil || len(s.requirements) == 0
|
||||
}
|
||||
|
||||
func (s *securityRequirement) CookieAuth() bool {
|
||||
if s.NoAuth() {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, requirement := range s.requirements {
|
||||
if _, ok := requirement["cookieAuth"]; ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (s *securityRequirement) BearerAuth() bool {
|
||||
if s.NoAuth() {
|
||||
return false
|
||||
}
|
||||
|
||||
for _, requirement := range s.requirements {
|
||||
if _, ok := requirement["bearerAuth"]; ok {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
type MiddlewareFunc func(r *RouteInfo) echo.HandlerFunc
|
||||
|
||||
type MiddlewareHandler struct {
|
||||
// cache for route info, since we don't want to parse the openapi spec on every request
|
||||
cache *lru.Cache[string, *RouteInfo]
|
||||
|
||||
// openapi spec
|
||||
swagger *openapi3.T
|
||||
|
||||
// registered middlewares
|
||||
mws []MiddlewareFunc
|
||||
}
|
||||
|
||||
func NewMiddlewareHandler(swagger *openapi3.T) (*MiddlewareHandler, error) {
|
||||
cache, err := lru.New[string, *RouteInfo](1000)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &MiddlewareHandler{
|
||||
cache: cache,
|
||||
swagger: swagger,
|
||||
mws: make([]MiddlewareFunc, 0),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (m *MiddlewareHandler) Use(mw MiddlewareFunc) {
|
||||
m.mws = append(m.mws, mw)
|
||||
}
|
||||
|
||||
func (m *MiddlewareHandler) Middleware() (echo.MiddlewareFunc, error) {
|
||||
router, err := gorillamux.NewRouter(m.swagger)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
f := func(next echo.HandlerFunc) echo.HandlerFunc {
|
||||
return func(c echo.Context) error {
|
||||
req := c.Request()
|
||||
|
||||
var routeInfo *RouteInfo
|
||||
var ok bool
|
||||
|
||||
// check the cache for a match, otherwise parse the openapi spec
|
||||
if routeInfo, ok = m.cache.Get(getCacheKey(req)); !ok {
|
||||
route, _, err := router.FindRoute(req)
|
||||
|
||||
// We failed to find a matching route for the request.
|
||||
if err != nil {
|
||||
switch e := err.(type) {
|
||||
case *routers.RouteError:
|
||||
// We've got a bad request, the path requested doesn't match
|
||||
// either server, or path, or something.
|
||||
return echo.NewHTTPError(http.StatusBadRequest, e.Reason)
|
||||
default:
|
||||
// This should never happen today, but if our upstream code changes,
|
||||
// we don't want to crash the server, so handle the unexpected error.
|
||||
return echo.NewHTTPError(http.StatusInternalServerError,
|
||||
fmt.Sprintf("error validating route: %s", err.Error()))
|
||||
}
|
||||
}
|
||||
|
||||
security := route.Operation.Security
|
||||
|
||||
// If there aren't any security requirements for the operation, use the global security requirements
|
||||
if security == nil {
|
||||
// Use the global security requirements.
|
||||
security = &route.Spec.Security
|
||||
}
|
||||
|
||||
var isOptional bool
|
||||
|
||||
// read x-security-optional from x-resources
|
||||
xSecurityOptional := route.Operation.Extensions["x-security-optional"]
|
||||
|
||||
if xSecurityOptional != nil {
|
||||
isOptional = xSecurityOptional.(bool)
|
||||
}
|
||||
|
||||
// read resources from x-resources
|
||||
var resources []string
|
||||
resourcesInt := route.Operation.Extensions["x-resources"]
|
||||
|
||||
if resourcesInt != nil {
|
||||
resourcesIntArr := resourcesInt.([]interface{})
|
||||
|
||||
resources = make([]string, len(resourcesIntArr))
|
||||
|
||||
for i, v := range resourcesIntArr {
|
||||
resources[i] = v.(string)
|
||||
}
|
||||
}
|
||||
|
||||
routeInfo = &RouteInfo{
|
||||
OperationID: route.Operation.OperationID,
|
||||
Security: &securityRequirement{
|
||||
requirements: *security,
|
||||
xSecurityOptional: isOptional,
|
||||
},
|
||||
Resources: resources,
|
||||
}
|
||||
|
||||
m.cache.Add(getCacheKey(req), routeInfo)
|
||||
}
|
||||
|
||||
for _, m := range m.mws {
|
||||
if err := m(routeInfo)(c); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return next(c)
|
||||
}
|
||||
}
|
||||
|
||||
return f, nil
|
||||
}
|
||||
|
||||
func getCacheKey(req *http.Request) string {
|
||||
return req.Method + ":" + req.URL.Path
|
||||
}
|
||||
174
api/v1/server/middleware/populator/populator.go
Normal file
174
api/v1/server/middleware/populator/populator.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package populator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/middleware"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
type ResourceGetterFunc func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error)
|
||||
|
||||
type Populator struct {
|
||||
// getters is a map of resource keys to getter methods
|
||||
getters map[string]ResourceGetterFunc
|
||||
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewPopulator(config *server.ServerConfig) *Populator {
|
||||
return &Populator{
|
||||
getters: make(map[string]ResourceGetterFunc),
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Populator) RegisterGetter(resourceKey string, getter ResourceGetterFunc) {
|
||||
p.getters[resourceKey] = getter
|
||||
}
|
||||
|
||||
func (p *Populator) Middleware(r *middleware.RouteInfo) echo.HandlerFunc {
|
||||
return func(c echo.Context) error {
|
||||
err := p.populate(c, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Populator) populate(c echo.Context, r *middleware.RouteInfo) error {
|
||||
if len(r.Resources) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Create a map of keys to identifiers in the form of strings
|
||||
keysToIds := make(map[string]string)
|
||||
|
||||
// Add params to the map
|
||||
for _, paramName := range c.ParamNames() {
|
||||
keysToIds[paramName] = c.Param(paramName)
|
||||
}
|
||||
|
||||
rootResource := &resource{}
|
||||
currResource := rootResource
|
||||
var prevResource *resource
|
||||
|
||||
// loop through the requested resources, adding params as ids
|
||||
for _, resourceKey := range r.Resources {
|
||||
currResource.ResourceKey = resourceKey
|
||||
|
||||
if resourceId, exists := keysToIds[resourceKey]; exists && resourceId != "" {
|
||||
currResource.ResourceID = resourceId
|
||||
}
|
||||
|
||||
if prevResource != nil {
|
||||
currResource.ParentID = prevResource.ResourceID
|
||||
prevResource.Children = append(prevResource.Children, currResource)
|
||||
}
|
||||
|
||||
prevResource = currResource
|
||||
currResource = &resource{}
|
||||
}
|
||||
|
||||
err := p.traverseNode(c, rootResource)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// loop through the resources again and add them to the context
|
||||
currResource = rootResource
|
||||
|
||||
for {
|
||||
if currResource.Resource != nil {
|
||||
c.Set(currResource.ResourceKey, currResource.Resource)
|
||||
}
|
||||
|
||||
if len(currResource.Children) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
currResource = currResource.Children[0]
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type resource struct {
|
||||
ResourceKey string
|
||||
ResourceID string
|
||||
Resource interface{}
|
||||
ParentID string
|
||||
Children []*resource
|
||||
}
|
||||
|
||||
func (p *Populator) traverseNode(c echo.Context, node *resource) error {
|
||||
populated := false
|
||||
|
||||
// determine if we have a resource locator to populate the node
|
||||
if node.ResourceID != "" {
|
||||
err := p.callGetter(node, node.ParentID, node.ResourceID)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not populate resource %s: %w", node.ResourceKey, err)
|
||||
}
|
||||
|
||||
populated = true
|
||||
}
|
||||
|
||||
if node.Children != nil {
|
||||
for _, child := range node.Children {
|
||||
if populated {
|
||||
child.ParentID = node.ResourceID
|
||||
}
|
||||
|
||||
err := p.traverseNode(c, child)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !populated && child.ParentID != "" {
|
||||
// use the parent locator to populate the resource
|
||||
err = p.callGetter(node, node.ParentID, child.ParentID)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
populated = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// if parent is not populated at this point, throw an error
|
||||
if !populated {
|
||||
return fmt.Errorf("resource %s could not be populated", node.ResourceKey)
|
||||
}
|
||||
|
||||
// if the resource is not nil, add to the context
|
||||
if node.Resource != nil {
|
||||
c.Set(node.ResourceKey, node.Resource)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Populator) callGetter(node *resource, parentId, id string) error {
|
||||
if _, exists := p.getters[node.ResourceKey]; !exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
res, uniqueParentId, err := p.getters[node.ResourceKey](p.config, parentId, id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
node.Resource = res
|
||||
node.ParentID = uniqueParentId
|
||||
|
||||
return nil
|
||||
}
|
||||
77
api/v1/server/middleware/populator/populator_test.go
Normal file
77
api/v1/server/middleware/populator/populator_test.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package populator
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/middleware"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
type oneToManyResource struct {
|
||||
ID string `json:"id"`
|
||||
ParentID string `json:"parent_id"`
|
||||
}
|
||||
|
||||
func oneToManyResourceGetter(config *server.ServerConfig, parentId, id string) (interface{}, string, error) {
|
||||
if parentId == "" {
|
||||
parentId := uuid.NewString()
|
||||
|
||||
return &oneToManyResource{
|
||||
ID: id,
|
||||
ParentID: parentId,
|
||||
}, parentId, nil
|
||||
}
|
||||
|
||||
return &oneToManyResource{
|
||||
ID: id,
|
||||
ParentID: parentId,
|
||||
}, parentId, nil
|
||||
}
|
||||
|
||||
type topLevelResource struct {
|
||||
ID string `json:"id"`
|
||||
}
|
||||
|
||||
func topLevelResourceGetter(config *server.ServerConfig, parentId, id string) (interface{}, string, error) {
|
||||
return &topLevelResource{
|
||||
ID: id,
|
||||
}, "", nil
|
||||
}
|
||||
|
||||
func TestPopulatorMiddleware(t *testing.T) {
|
||||
e := echo.New()
|
||||
req := httptest.NewRequest(http.MethodGet, "/", nil)
|
||||
rec := httptest.NewRecorder()
|
||||
c := e.NewContext(req, rec)
|
||||
|
||||
// Mock RouteInfo
|
||||
routeInfo := &middleware.RouteInfo{
|
||||
Resources: []string{"resource1", "resource2"},
|
||||
}
|
||||
|
||||
resource2Id := uuid.New().String()
|
||||
|
||||
// Setting params for the context
|
||||
c.SetParamNames("resource2")
|
||||
c.SetParamValues(resource2Id)
|
||||
|
||||
// Creating Populator with mock getter function
|
||||
populator := NewPopulator(&server.ServerConfig{})
|
||||
|
||||
populator.RegisterGetter("resource1", topLevelResourceGetter)
|
||||
populator.RegisterGetter("resource2", oneToManyResourceGetter)
|
||||
|
||||
// Using the Populator middleware
|
||||
middlewareFunc := populator.Middleware(routeInfo)
|
||||
err := middlewareFunc(c)
|
||||
|
||||
// Assertions
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, c.Get("resource1"))
|
||||
assert.NotNil(t, c.Get("resource2"))
|
||||
}
|
||||
13
api/v1/server/oas/apierrors/apierrors.go
Normal file
13
api/v1/server/oas/apierrors/apierrors.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package apierrors
|
||||
|
||||
import "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
|
||||
func NewAPIErrors(description string) gen.APIErrors {
|
||||
return gen.APIErrors{
|
||||
Errors: []gen.APIError{
|
||||
{
|
||||
Description: description,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
235
api/v1/server/oas/gen/codegen.yaml
Normal file
235
api/v1/server/oas/gen/codegen.yaml
Normal file
@@ -0,0 +1,235 @@
|
||||
package: gen
|
||||
output: ./api/v1/server/oas/gen/openapi.gen.go
|
||||
generate:
|
||||
models: true
|
||||
echo-server: true
|
||||
strict-server: true
|
||||
embedded-spec: true
|
||||
output-options:
|
||||
user-templates:
|
||||
"strict/strict-interface.tmpl": >-
|
||||
{{range .}}
|
||||
{{$opid := .OperationId -}}
|
||||
type {{$opid | ucFirst}}RequestObject struct {
|
||||
{{range .PathParams -}}
|
||||
{{.GoName | ucFirst}} {{.TypeDef}} {{.JsonTag}}
|
||||
{{end -}}
|
||||
{{if .RequiresParamObject -}}
|
||||
Params {{$opid}}Params
|
||||
{{end -}}
|
||||
{{if .HasMaskedRequestContentTypes -}}
|
||||
ContentType string
|
||||
{{end -}}
|
||||
{{$multipleBodies := gt (len .Bodies) 1 -}}
|
||||
{{range .Bodies -}}
|
||||
{{if $multipleBodies}}{{.NameTag}}{{end}}Body {{if eq .NameTag "Multipart"}}*multipart.Reader{{else if ne .NameTag ""}}*{{$opid}}{{.NameTag}}RequestBody{{else}}io.Reader{{end}}
|
||||
{{end -}}
|
||||
}
|
||||
|
||||
type {{$opid | ucFirst}}ResponseObject interface {
|
||||
Visit{{$opid}}Response(w http.ResponseWriter) error
|
||||
}
|
||||
|
||||
{{range .Responses}}
|
||||
{{$statusCode := .StatusCode -}}
|
||||
{{$hasHeaders := ne 0 (len .Headers) -}}
|
||||
{{$fixedStatusCode := .HasFixedStatusCode -}}
|
||||
{{$isRef := .IsRef -}}
|
||||
{{$ref := .Ref | ucFirstWithPkgName -}}
|
||||
{{$headers := .Headers -}}
|
||||
|
||||
{{if (and $hasHeaders (not $isRef)) -}}
|
||||
type {{$opid}}{{$statusCode}}ResponseHeaders struct {
|
||||
{{range .Headers -}}
|
||||
{{.GoName}} {{.Schema.TypeDecl}}
|
||||
{{end -}}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{range .Contents}}
|
||||
{{$receiverTypeName := printf "%s%s%s%s" $opid $statusCode .NameTagOrContentType "Response"}}
|
||||
{{if and $fixedStatusCode $isRef -}}
|
||||
type {{$receiverTypeName}} struct{ {{$ref}}{{.NameTagOrContentType}}Response }
|
||||
{{else if and (not $hasHeaders) ($fixedStatusCode) (.IsSupported) -}}
|
||||
type {{$receiverTypeName}} {{if eq .NameTag "Multipart"}}func(writer *multipart.Writer)error{{else if .IsSupported}}{{if .Schema.IsRef}}={{end}} {{.Schema.TypeDecl}}{{else}}io.Reader{{end}}
|
||||
{{else -}}
|
||||
type {{$receiverTypeName}} struct {
|
||||
Body {{if eq .NameTag "Multipart"}}func(writer *multipart.Writer)error{{else if .IsSupported}}{{.Schema.TypeDecl}}{{else}}io.Reader{{end}}
|
||||
{{if $hasHeaders -}}
|
||||
Headers {{if $isRef}}{{$ref}}{{else}}{{$opid}}{{$statusCode}}{{end}}ResponseHeaders
|
||||
{{end -}}
|
||||
|
||||
{{if not $fixedStatusCode -}}
|
||||
StatusCode int
|
||||
{{end -}}
|
||||
|
||||
{{if not .HasFixedContentType -}}
|
||||
ContentType string
|
||||
{{end -}}
|
||||
|
||||
{{if not .IsSupported -}}
|
||||
ContentLength int64
|
||||
{{end -}}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
func (response {{$receiverTypeName}}) Visit{{$opid}}Response(w http.ResponseWriter) error {
|
||||
{{range $headers -}}
|
||||
w.Header().Set("{{.Name}}", fmt.Sprint(response.Headers.{{.GoName}}))
|
||||
{{end -}}
|
||||
{{if eq .NameTag "Multipart" -}}
|
||||
writer := multipart.NewWriter(w)
|
||||
{{end -}}
|
||||
w.Header().Set("Content-Type", {{if eq .NameTag "Multipart"}}writer.FormDataContentType(){{else if .HasFixedContentType }}"{{.ContentType}}"{{else}}response.ContentType{{end}})
|
||||
{{if not .IsSupported -}}
|
||||
if response.ContentLength != 0 {
|
||||
w.Header().Set("Content-Length", fmt.Sprint(response.ContentLength))
|
||||
}
|
||||
{{end -}}
|
||||
w.WriteHeader({{if $fixedStatusCode}}{{$statusCode}}{{else}}response.StatusCode{{end}})
|
||||
{{$hasBodyVar := or ($hasHeaders) (not $fixedStatusCode) (not .IsSupported)}}
|
||||
{{if eq .NameTag "JSON" -}}
|
||||
return json.NewEncoder(w).Encode({{if $hasBodyVar}}response.Body{{else}}response{{end}})
|
||||
{{else if eq .NameTag "Text" -}}
|
||||
_, err := w.Write([]byte({{if $hasBodyVar}}response.Body{{else}}response{{end}}))
|
||||
return err
|
||||
{{else if eq .NameTag "Formdata" -}}
|
||||
if form, err := runtime.MarshalForm({{if $hasBodyVar}}response.Body{{else}}response{{end}}, nil); err != nil {
|
||||
return err
|
||||
} else {
|
||||
_, err := w.Write([]byte(form.Encode()))
|
||||
return err
|
||||
}
|
||||
{{else if eq .NameTag "Multipart" -}}
|
||||
defer writer.Close()
|
||||
return {{if $hasBodyVar}}response.Body{{else}}response{{end}}(writer);
|
||||
{{else -}}
|
||||
if closer, ok := response.Body.(io.ReadCloser); ok {
|
||||
defer closer.Close()
|
||||
}
|
||||
_, err := io.Copy(w, response.Body)
|
||||
return err
|
||||
{{end}}{{/* if eq .NameTag "JSON" */ -}}
|
||||
}
|
||||
{{end}}
|
||||
|
||||
{{if eq 0 (len .Contents) -}}
|
||||
{{if and $fixedStatusCode $isRef -}}
|
||||
type {{$opid}}{{$statusCode}}Response = {{$ref}}Response
|
||||
{{else -}}
|
||||
type {{$opid}}{{$statusCode}}Response struct {
|
||||
{{if $hasHeaders -}}
|
||||
Headers {{if $isRef}}{{$ref}}{{else}}{{$opid}}{{$statusCode}}{{end}}ResponseHeaders
|
||||
{{end}}
|
||||
{{if not $fixedStatusCode -}}
|
||||
StatusCode int
|
||||
{{end -}}
|
||||
}
|
||||
{{end -}}
|
||||
func (response {{$opid}}{{$statusCode}}Response) Visit{{$opid}}Response(w http.ResponseWriter) error {
|
||||
{{range $headers -}}
|
||||
w.Header().Set("{{.Name}}", fmt.Sprint(response.Headers.{{.GoName}}))
|
||||
{{end -}}
|
||||
w.WriteHeader({{if $fixedStatusCode}}{{$statusCode}}{{else}}response.StatusCode{{end}})
|
||||
return nil
|
||||
}
|
||||
{{end}}
|
||||
{{end}}
|
||||
{{end}}
|
||||
|
||||
type StrictServerInterface interface {
|
||||
{{range .}}
|
||||
{{$opid := .OperationId -}}
|
||||
{{$opid}}(ctx echo.Context, request {{$opid | ucFirst}}RequestObject) ({{$opid | ucFirst}}ResponseObject, error)
|
||||
{{end}}
|
||||
}
|
||||
"strict/strict-echo.tmpl": >-
|
||||
type StrictHandlerFunc func(ctx echo.Context, args interface{}) (interface{}, error)
|
||||
|
||||
type StrictMiddlewareFunc func(f StrictHandlerFunc, operationID string) StrictHandlerFunc
|
||||
|
||||
func NewStrictHandler(ssi StrictServerInterface, middlewares []StrictMiddlewareFunc) ServerInterface {
|
||||
return &strictHandler{ssi: ssi, middlewares: middlewares}
|
||||
}
|
||||
|
||||
type strictHandler struct {
|
||||
ssi StrictServerInterface
|
||||
middlewares []StrictMiddlewareFunc
|
||||
}
|
||||
|
||||
{{range .}}
|
||||
{{$opid := .OperationId}}
|
||||
// {{$opid}} operation middleware
|
||||
func (sh *strictHandler) {{.OperationId}}(ctx echo.Context{{genParamArgs .PathParams}}{{if .RequiresParamObject}}, params {{.OperationId}}Params{{end}}) error {
|
||||
var request {{$opid | ucFirst}}RequestObject
|
||||
|
||||
{{range .PathParams -}}
|
||||
{{$varName := .GoVariableName -}}
|
||||
request.{{$varName | ucFirst}} = {{$varName}}
|
||||
{{end -}}
|
||||
|
||||
{{if .RequiresParamObject -}}
|
||||
request.Params = params
|
||||
{{end -}}
|
||||
|
||||
{{ if .HasMaskedRequestContentTypes -}}
|
||||
request.ContentType = ctx.Request().Header.Get("Content-Type")
|
||||
{{end -}}
|
||||
|
||||
{{$multipleBodies := gt (len .Bodies) 1 -}}
|
||||
{{range .Bodies -}}
|
||||
{{if $multipleBodies}}if strings.HasPrefix(ctx.Request().Header.Get("Content-Type"), "{{.ContentType}}") { {{end}}
|
||||
{{if eq .NameTag "JSON" -}}
|
||||
var body {{$opid}}{{.NameTag}}RequestBody
|
||||
if err := ctx.Bind(&body); err != nil {
|
||||
return err
|
||||
}
|
||||
request.{{if $multipleBodies}}{{.NameTag}}{{end}}Body = &body
|
||||
{{else if eq .NameTag "Formdata" -}}
|
||||
if form, err := ctx.FormParams(); err == nil {
|
||||
var body {{$opid}}{{.NameTag}}RequestBody
|
||||
if err := runtime.BindForm(&body, form, nil, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
request.{{if $multipleBodies}}{{.NameTag}}{{end}}Body = &body
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
{{else if eq .NameTag "Multipart" -}}
|
||||
if reader, err := ctx.Request().MultipartReader(); err != nil {
|
||||
return err
|
||||
} else {
|
||||
request.{{if $multipleBodies}}{{.NameTag}}{{end}}Body = reader
|
||||
}
|
||||
{{else if eq .NameTag "Text" -}}
|
||||
data, err := io.ReadAll(ctx.Request().Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
body := {{$opid}}{{.NameTag}}RequestBody(data)
|
||||
request.{{if $multipleBodies}}{{.NameTag}}{{end}}Body = &body
|
||||
{{else -}}
|
||||
request.{{if $multipleBodies}}{{.NameTag}}{{end}}Body = ctx.Request().Body
|
||||
{{end}}{{/* if eq .NameTag "JSON" */ -}}
|
||||
{{if $multipleBodies}}}{{end}}
|
||||
{{end}}{{/* range .Bodies */}}
|
||||
|
||||
handler := func(ctx echo.Context, request interface{}) (interface{}, error){
|
||||
return sh.ssi.{{.OperationId}}(ctx, request.({{$opid | ucFirst}}RequestObject))
|
||||
}
|
||||
for _, middleware := range sh.middlewares {
|
||||
handler = middleware(handler, "{{.OperationId}}")
|
||||
}
|
||||
|
||||
response, err := handler(ctx, request)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
} else if validResponse, ok := response.({{$opid | ucFirst}}ResponseObject); ok {
|
||||
return validResponse.Visit{{$opid}}Response(ctx.Response())
|
||||
} else if response != nil {
|
||||
return fmt.Errorf("Unexpected response type: %T", response)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
{{end}}
|
||||
2381
api/v1/server/oas/gen/openapi.gen.go
Normal file
2381
api/v1/server/oas/gen/openapi.gen.go
Normal file
File diff suppressed because it is too large
Load Diff
44
api/v1/server/oas/transformers/event.go
Normal file
44
api/v1/server/oas/transformers/event.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc"
|
||||
|
||||
"github.com/jackc/pgx/v5/pgtype"
|
||||
)
|
||||
|
||||
func ToEvent(event *db.EventModel) *gen.Event {
|
||||
res := &gen.Event{
|
||||
Metadata: *toAPIMetadata(event.ID, event.CreatedAt, event.UpdatedAt),
|
||||
Key: event.Key,
|
||||
TenantId: event.TenantID,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToEventFromSQLC(eventRow *dbsqlc.ListEventsRow) *gen.Event {
|
||||
event := eventRow.Event
|
||||
|
||||
res := &gen.Event{
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(event.ID), event.CreatedAt.Time, event.UpdatedAt.Time),
|
||||
Key: event.Key,
|
||||
TenantId: pgUUIDToStr(event.TenantId),
|
||||
}
|
||||
|
||||
res.WorkflowRunSummary = &gen.EventWorkflowRunSummary{
|
||||
Failed: &eventRow.Failedruns,
|
||||
Running: &eventRow.Runningruns,
|
||||
Succeeded: &eventRow.Succeededruns,
|
||||
Pending: &eventRow.Pendingruns,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func pgUUIDToStr(uuid pgtype.UUID) string {
|
||||
return fmt.Sprintf("%x-%x-%x-%x-%x", uuid.Bytes[0:4], uuid.Bytes[4:6], uuid.Bytes[6:8], uuid.Bytes[8:10], uuid.Bytes[10:16])
|
||||
}
|
||||
16
api/v1/server/oas/transformers/metadata.go
Normal file
16
api/v1/server/oas/transformers/metadata.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
)
|
||||
|
||||
func toAPIMetadata(id string, createdAt, updatedAt time.Time) *gen.APIResourceMeta {
|
||||
return &gen.APIResourceMeta{
|
||||
Id: uuid.MustParse(id),
|
||||
CreatedAt: createdAt,
|
||||
UpdatedAt: updatedAt,
|
||||
}
|
||||
}
|
||||
14
api/v1/server/oas/transformers/tenant.go
Normal file
14
api/v1/server/oas/transformers/tenant.go
Normal file
@@ -0,0 +1,14 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
)
|
||||
|
||||
func ToTenant(tenant *db.TenantModel) *gen.Tenant {
|
||||
return &gen.Tenant{
|
||||
Metadata: *toAPIMetadata(tenant.ID, tenant.CreatedAt, tenant.UpdatedAt),
|
||||
Name: tenant.Name,
|
||||
Slug: tenant.Slug,
|
||||
}
|
||||
}
|
||||
49
api/v1/server/oas/transformers/user.go
Normal file
49
api/v1/server/oas/transformers/user.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/oapi-codegen/runtime/types"
|
||||
)
|
||||
|
||||
func ToUser(user *db.UserModel) *gen.User {
|
||||
var name *string
|
||||
|
||||
if dbName, ok := user.Name(); ok {
|
||||
name = &dbName
|
||||
}
|
||||
|
||||
return &gen.User{
|
||||
Metadata: *toAPIMetadata(user.ID, user.CreatedAt, user.UpdatedAt),
|
||||
Email: types.Email(user.Email),
|
||||
EmailVerified: user.EmailVerified,
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func ToUserTenantPublic(user *db.UserModel) *gen.UserTenantPublic {
|
||||
var name *string
|
||||
|
||||
if dbName, ok := user.Name(); ok {
|
||||
name = &dbName
|
||||
}
|
||||
|
||||
return &gen.UserTenantPublic{
|
||||
Email: types.Email(user.Email),
|
||||
Name: name,
|
||||
}
|
||||
}
|
||||
|
||||
func ToTenantMember(tenantMember *db.TenantMemberModel) *gen.TenantMember {
|
||||
res := &gen.TenantMember{
|
||||
Metadata: *toAPIMetadata(tenantMember.ID, tenantMember.CreatedAt, tenantMember.UpdatedAt),
|
||||
User: *ToUserTenantPublic(tenantMember.User()),
|
||||
Role: gen.TenantMemberRole(tenantMember.Role),
|
||||
}
|
||||
|
||||
if tenantMember.Tenant() != nil {
|
||||
res.Tenant = ToTenant(tenantMember.Tenant())
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
31
api/v1/server/oas/transformers/worker.go
Normal file
31
api/v1/server/oas/transformers/worker.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
)
|
||||
|
||||
func ToWorker(worker *db.WorkerModel) *gen.Worker {
|
||||
res := &gen.Worker{
|
||||
Metadata: *toAPIMetadata(worker.ID, worker.CreatedAt, worker.UpdatedAt),
|
||||
Name: worker.Name,
|
||||
}
|
||||
|
||||
if lastHeartbeatAt, ok := worker.LastHeartbeatAt(); ok {
|
||||
res.LastHeartbeatAt = &lastHeartbeatAt
|
||||
}
|
||||
|
||||
if worker.RelationsWorker.Actions != nil {
|
||||
if actions := worker.Actions(); actions != nil {
|
||||
apiActions := make([]string, len(actions))
|
||||
|
||||
for i, action := range actions {
|
||||
apiActions[i] = action.ID
|
||||
}
|
||||
|
||||
res.Actions = &apiActions
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
313
api/v1/server/oas/transformers/workflow.go
Normal file
313
api/v1/server/oas/transformers/workflow.go
Normal file
@@ -0,0 +1,313 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/datautils"
|
||||
"github.com/hatchet-dev/hatchet/internal/iter"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client/types"
|
||||
)
|
||||
|
||||
func ToWorkflow(workflow *db.WorkflowModel, lastRun *db.WorkflowRunModel) (*gen.Workflow, error) {
|
||||
res := &gen.Workflow{
|
||||
Metadata: *toAPIMetadata(workflow.ID, workflow.CreatedAt, workflow.UpdatedAt),
|
||||
Name: workflow.Name,
|
||||
}
|
||||
|
||||
if lastRun != nil {
|
||||
var err error
|
||||
res.LastRun, err = ToWorkflowRun(lastRun)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if description, ok := workflow.Description(); ok {
|
||||
res.Description = &description
|
||||
}
|
||||
|
||||
if workflow.RelationsWorkflow.Tags != nil {
|
||||
if tags := workflow.Tags(); tags != nil {
|
||||
apiTags := make([]gen.WorkflowTag, len(tags))
|
||||
|
||||
for i, tag := range tags {
|
||||
apiTags[i] = gen.WorkflowTag{
|
||||
Name: tag.Name,
|
||||
Color: tag.Color,
|
||||
}
|
||||
}
|
||||
|
||||
res.Tags = &apiTags
|
||||
}
|
||||
}
|
||||
|
||||
if workflow.RelationsWorkflow.Versions != nil {
|
||||
if versions := workflow.Versions(); versions != nil {
|
||||
apiVersions := make([]gen.WorkflowVersionMeta, len(versions))
|
||||
|
||||
for i, version := range versions {
|
||||
versionCp := version
|
||||
apiVersions[i] = *ToWorkflowVersionMeta(&versionCp)
|
||||
}
|
||||
|
||||
res.Versions = &apiVersions
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToWorkflowVersionMeta(version *db.WorkflowVersionModel) *gen.WorkflowVersionMeta {
|
||||
res := &gen.WorkflowVersionMeta{
|
||||
Metadata: *toAPIMetadata(version.ID, version.CreatedAt, version.UpdatedAt),
|
||||
Version: version.Version,
|
||||
WorkflowId: version.WorkflowID,
|
||||
Order: int32(version.Order),
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowVersion(workflow *db.WorkflowModel, version *db.WorkflowVersionModel) (*gen.WorkflowVersion, error) {
|
||||
res := &gen.WorkflowVersion{
|
||||
Metadata: *toAPIMetadata(version.ID, version.CreatedAt, version.UpdatedAt),
|
||||
Version: version.Version,
|
||||
WorkflowId: version.WorkflowID,
|
||||
Order: int32(version.Order),
|
||||
}
|
||||
|
||||
if version.RelationsWorkflowVersion.Jobs != nil {
|
||||
if jobs := version.Jobs(); jobs != nil {
|
||||
apiJobs := make([]gen.Job, len(jobs))
|
||||
|
||||
for i, job := range jobs {
|
||||
jobCp := job
|
||||
apiJob, err := ToJob(&jobCp)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
apiJobs[i] = *apiJob
|
||||
}
|
||||
|
||||
res.Jobs = &apiJobs
|
||||
}
|
||||
}
|
||||
|
||||
if workflow == nil {
|
||||
workflow = version.RelationsWorkflowVersion.Workflow
|
||||
}
|
||||
|
||||
if workflow != nil {
|
||||
var err error
|
||||
res.Workflow, err = ToWorkflow(workflow, nil)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToWorkflowYAMLBytes(workflow *db.WorkflowModel, version *db.WorkflowVersionModel) ([]byte, error) {
|
||||
res := &types.Workflow{
|
||||
Name: workflow.Name,
|
||||
Version: version.Version,
|
||||
}
|
||||
|
||||
if description, ok := workflow.Description(); ok {
|
||||
res.Description = description
|
||||
}
|
||||
|
||||
if triggers, ok := version.Triggers(); ok && triggers != nil {
|
||||
triggersResp := types.WorkflowTriggers{}
|
||||
|
||||
if crons := triggers.Crons(); crons != nil && len(crons) > 0 {
|
||||
triggersResp.Cron = make([]string, len(crons))
|
||||
|
||||
for i, cron := range crons {
|
||||
triggersResp.Cron[i] = cron.Cron
|
||||
}
|
||||
}
|
||||
|
||||
if events := triggers.Events(); events != nil && len(events) > 0 {
|
||||
triggersResp.Events = make([]string, len(events))
|
||||
|
||||
for i, event := range events {
|
||||
triggersResp.Events[i] = event.EventKey
|
||||
}
|
||||
}
|
||||
|
||||
res.Triggers = triggersResp
|
||||
}
|
||||
|
||||
if jobs := version.Jobs(); jobs != nil {
|
||||
res.Jobs = make(map[string]types.WorkflowJob, len(jobs))
|
||||
|
||||
for _, job := range jobs {
|
||||
jobCp := job
|
||||
|
||||
jobRes := types.WorkflowJob{}
|
||||
|
||||
if description, ok := jobCp.Description(); ok {
|
||||
jobRes.Description = description
|
||||
}
|
||||
|
||||
if timeout, ok := jobCp.Timeout(); ok {
|
||||
jobRes.Timeout = timeout
|
||||
}
|
||||
|
||||
if steps := jobCp.Steps(); steps != nil {
|
||||
jobRes.Steps = make([]types.WorkflowStep, 0)
|
||||
|
||||
iter, err := iter.New(steps)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not create step iterator: %w", err)
|
||||
}
|
||||
|
||||
for {
|
||||
step, ok := iter.Next()
|
||||
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
|
||||
stepRes := types.WorkflowStep{
|
||||
ID: step.ID,
|
||||
ActionID: step.ActionID,
|
||||
}
|
||||
|
||||
if readableId, ok := step.ReadableID(); ok {
|
||||
stepRes.ID = readableId
|
||||
}
|
||||
|
||||
if timeout, ok := step.Timeout(); ok {
|
||||
stepRes.Timeout = timeout
|
||||
}
|
||||
|
||||
if inputs, ok := step.Inputs(); ok {
|
||||
withMap := map[string]interface{}{}
|
||||
err := datautils.FromJSONType(&inputs, &withMap)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stepRes.With = withMap
|
||||
}
|
||||
|
||||
jobRes.Steps = append(jobRes.Steps, stepRes)
|
||||
}
|
||||
|
||||
res.Jobs[jobCp.Name] = jobRes
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return types.ToYAML(context.Background(), res)
|
||||
}
|
||||
|
||||
func ToJob(job *db.JobModel) (*gen.Job, error) {
|
||||
res := &gen.Job{
|
||||
Metadata: *toAPIMetadata(job.ID, job.CreatedAt, job.UpdatedAt),
|
||||
Name: job.Name,
|
||||
TenantId: job.TenantID,
|
||||
VersionId: job.WorkflowVersionID,
|
||||
}
|
||||
|
||||
if description, ok := job.Description(); ok {
|
||||
res.Description = &description
|
||||
}
|
||||
|
||||
if timeout, ok := job.Timeout(); ok {
|
||||
res.Timeout = &timeout
|
||||
}
|
||||
|
||||
if steps := job.Steps(); steps != nil {
|
||||
apiSteps := make([]gen.Step, 0)
|
||||
|
||||
iter, err := iter.New(steps)
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("could not create step iterator: %w", err)
|
||||
}
|
||||
|
||||
for {
|
||||
step, ok := iter.Next()
|
||||
|
||||
if !ok {
|
||||
break
|
||||
}
|
||||
|
||||
apiSteps = append(apiSteps, *ToStep(step))
|
||||
}
|
||||
|
||||
res.Steps = apiSteps
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToStep(step *db.StepModel) *gen.Step {
|
||||
res := &gen.Step{
|
||||
Metadata: *toAPIMetadata(step.ID, step.CreatedAt, step.UpdatedAt),
|
||||
Action: step.ActionID,
|
||||
JobId: step.JobID,
|
||||
TenantId: step.TenantID,
|
||||
}
|
||||
|
||||
if readableId, ok := step.ReadableID(); ok {
|
||||
res.ReadableId = readableId
|
||||
}
|
||||
|
||||
if timeout, ok := step.Timeout(); ok {
|
||||
res.Timeout = &timeout
|
||||
}
|
||||
|
||||
if next, ok := step.NextID(); ok {
|
||||
res.NextId = next
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowFromSQLC(row *dbsqlc.Workflow) *gen.Workflow {
|
||||
res := &gen.Workflow{
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(row.ID), row.CreatedAt.Time, row.UpdatedAt.Time),
|
||||
Name: row.Name,
|
||||
Description: &row.Description.String,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowVersionMetaFromSQLC(row *dbsqlc.WorkflowVersion, workflow *gen.Workflow) *gen.WorkflowVersionMeta {
|
||||
res := &gen.WorkflowVersionMeta{
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(row.ID), row.CreatedAt.Time, row.UpdatedAt.Time),
|
||||
Version: row.Version,
|
||||
WorkflowId: pgUUIDToStr(row.WorkflowId),
|
||||
Order: int32(row.Order),
|
||||
Workflow: workflow,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowVersionFromSQLC(row *dbsqlc.WorkflowVersion, workflow *gen.Workflow) *gen.WorkflowVersion {
|
||||
res := &gen.WorkflowVersion{
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(row.ID), row.CreatedAt.Time, row.UpdatedAt.Time),
|
||||
Version: row.Version,
|
||||
WorkflowId: pgUUIDToStr(row.WorkflowId),
|
||||
Order: int32(row.Order),
|
||||
Workflow: workflow,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
240
api/v1/server/oas/transformers/workflow_run.go
Normal file
240
api/v1/server/oas/transformers/workflow_run.go
Normal file
@@ -0,0 +1,240 @@
|
||||
package transformers
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/dbsqlc"
|
||||
)
|
||||
|
||||
func ToWorkflowRun(run *db.WorkflowRunModel) (*gen.WorkflowRun, error) {
|
||||
res := &gen.WorkflowRun{
|
||||
Metadata: *toAPIMetadata(run.ID, run.CreatedAt, run.UpdatedAt),
|
||||
TenantId: run.TenantID,
|
||||
Status: gen.WorkflowRunStatus(run.Status),
|
||||
WorkflowVersionId: run.WorkflowVersionID,
|
||||
}
|
||||
|
||||
if startedAt, ok := run.StartedAt(); ok && !startedAt.IsZero() {
|
||||
res.StartedAt = &startedAt
|
||||
}
|
||||
|
||||
if finishedAt, ok := run.FinishedAt(); ok && !finishedAt.IsZero() {
|
||||
res.FinishedAt = &finishedAt
|
||||
}
|
||||
|
||||
if runErr, ok := run.Error(); ok {
|
||||
res.Error = &runErr
|
||||
}
|
||||
|
||||
if run.RelationsWorkflowRun.TriggeredBy != nil {
|
||||
if triggeredBy, ok := run.TriggeredBy(); ok {
|
||||
res.TriggeredBy = *ToWorkflowRunTriggeredBy(triggeredBy)
|
||||
}
|
||||
}
|
||||
|
||||
if run.RelationsWorkflowRun.WorkflowVersion != nil {
|
||||
workflowVersion := run.WorkflowVersion()
|
||||
|
||||
resWorkflowVersion, err := ToWorkflowVersion(nil, workflowVersion)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
res.WorkflowVersion = resWorkflowVersion
|
||||
}
|
||||
|
||||
if run.RelationsWorkflowRun.JobRuns != nil {
|
||||
jobRuns := make([]gen.JobRun, 0)
|
||||
|
||||
for _, jobRun := range run.JobRuns() {
|
||||
jobRunCp := jobRun
|
||||
genJobRun, err := ToJobRun(&jobRunCp)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
jobRuns = append(jobRuns, *genJobRun)
|
||||
}
|
||||
|
||||
res.JobRuns = &jobRuns
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToJobRun(jobRun *db.JobRunModel) (*gen.JobRun, error) {
|
||||
res := &gen.JobRun{
|
||||
Metadata: *toAPIMetadata(jobRun.ID, jobRun.CreatedAt, jobRun.UpdatedAt),
|
||||
Status: gen.JobRunStatus(jobRun.Status),
|
||||
JobId: jobRun.JobID,
|
||||
TenantId: jobRun.TenantID,
|
||||
}
|
||||
|
||||
if startedAt, ok := jobRun.StartedAt(); ok && !startedAt.IsZero() {
|
||||
res.StartedAt = &startedAt
|
||||
}
|
||||
|
||||
if finishedAt, ok := jobRun.FinishedAt(); ok && !finishedAt.IsZero() {
|
||||
res.FinishedAt = &finishedAt
|
||||
}
|
||||
|
||||
if cancelledAt, ok := jobRun.CancelledAt(); ok && !cancelledAt.IsZero() {
|
||||
res.CancelledAt = &cancelledAt
|
||||
}
|
||||
|
||||
if cancelledError, ok := jobRun.CancelledError(); ok {
|
||||
res.CancelledError = &cancelledError
|
||||
}
|
||||
|
||||
if cancelledReason, ok := jobRun.CancelledReason(); ok {
|
||||
res.CancelledReason = &cancelledReason
|
||||
}
|
||||
|
||||
if timeoutAt, ok := jobRun.TimeoutAt(); ok && !timeoutAt.IsZero() {
|
||||
res.TimeoutAt = &timeoutAt
|
||||
}
|
||||
|
||||
if jobRun.RelationsJobRun.Job != nil {
|
||||
var err error
|
||||
job := jobRun.Job()
|
||||
res.Job, err = ToJob(job)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
orderedStepRuns := jobRun.StepRuns()
|
||||
|
||||
sort.SliceStable(orderedStepRuns, func(i, j int) bool {
|
||||
return orderedStepRuns[i].Order < orderedStepRuns[j].Order
|
||||
})
|
||||
|
||||
stepRuns := make([]gen.StepRun, 0)
|
||||
|
||||
for _, stepRun := range orderedStepRuns {
|
||||
stepRunCp := stepRun
|
||||
stepRuns = append(stepRuns, *ToStepRun(&stepRunCp))
|
||||
}
|
||||
|
||||
res.StepRuns = &stepRuns
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func ToStepRun(stepRun *db.StepRunModel) *gen.StepRun {
|
||||
res := &gen.StepRun{
|
||||
Metadata: *toAPIMetadata(stepRun.ID, stepRun.CreatedAt, stepRun.UpdatedAt),
|
||||
Status: gen.StepRunStatus(stepRun.Status),
|
||||
StepId: stepRun.StepID,
|
||||
TenantId: stepRun.TenantID,
|
||||
JobRunId: stepRun.JobRunID,
|
||||
}
|
||||
|
||||
if startedAt, ok := stepRun.StartedAt(); ok && !startedAt.IsZero() {
|
||||
res.StartedAt = &startedAt
|
||||
}
|
||||
|
||||
if finishedAt, ok := stepRun.FinishedAt(); ok && !finishedAt.IsZero() {
|
||||
res.FinishedAt = &finishedAt
|
||||
}
|
||||
|
||||
if cancelledAt, ok := stepRun.CancelledAt(); ok && !cancelledAt.IsZero() {
|
||||
res.CancelledAt = &cancelledAt
|
||||
}
|
||||
|
||||
if cancelledError, ok := stepRun.CancelledError(); ok {
|
||||
res.CancelledError = &cancelledError
|
||||
}
|
||||
|
||||
if cancelledReason, ok := stepRun.CancelledReason(); ok {
|
||||
res.CancelledReason = &cancelledReason
|
||||
}
|
||||
|
||||
if runErr, ok := stepRun.Error(); ok {
|
||||
res.Error = &runErr
|
||||
}
|
||||
|
||||
if timeoutAt, ok := stepRun.TimeoutAt(); ok && !timeoutAt.IsZero() {
|
||||
res.TimeoutAt = &timeoutAt
|
||||
}
|
||||
|
||||
if workerId, ok := stepRun.WorkerID(); ok {
|
||||
res.WorkerId = &workerId
|
||||
}
|
||||
|
||||
if stepRun.RelationsStepRun.Step != nil {
|
||||
step := stepRun.Step()
|
||||
|
||||
res.Step = ToStep(step)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowRunTriggeredBy(triggeredBy *db.WorkflowRunTriggeredByModel) *gen.WorkflowRunTriggeredBy {
|
||||
res := &gen.WorkflowRunTriggeredBy{
|
||||
Metadata: *toAPIMetadata(triggeredBy.ID, triggeredBy.CreatedAt, triggeredBy.UpdatedAt),
|
||||
ParentId: triggeredBy.ParentID,
|
||||
}
|
||||
|
||||
if event, ok := triggeredBy.Event(); ok {
|
||||
res.Event = ToEvent(event)
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func ToWorkflowRunFromSQLC(row *dbsqlc.ListWorkflowRunsRow) *gen.WorkflowRun {
|
||||
run := row.WorkflowRun
|
||||
runTriggeredBy := row.WorkflowRunTriggeredBy
|
||||
|
||||
workflow := ToWorkflowFromSQLC(&row.Workflow)
|
||||
workflowVersion := ToWorkflowVersionFromSQLC(&row.WorkflowVersion, workflow)
|
||||
var startedAt *time.Time
|
||||
|
||||
if !run.StartedAt.Time.IsZero() {
|
||||
startedAt = &run.StartedAt.Time
|
||||
}
|
||||
|
||||
var finishedAt *time.Time
|
||||
|
||||
if !run.FinishedAt.Time.IsZero() {
|
||||
finishedAt = &run.FinishedAt.Time
|
||||
}
|
||||
|
||||
var event *gen.Event
|
||||
|
||||
if row.ID.Valid && row.Key.Valid {
|
||||
event = &gen.Event{
|
||||
Key: row.Key.String,
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(row.ID), row.CreatedAt.Time, row.UpdatedAt.Time),
|
||||
}
|
||||
}
|
||||
|
||||
triggeredBy := &gen.WorkflowRunTriggeredBy{
|
||||
Metadata: *toAPIMetadata(pgUUIDToStr(runTriggeredBy.ID), runTriggeredBy.CreatedAt.Time, runTriggeredBy.UpdatedAt.Time),
|
||||
ParentId: runTriggeredBy.ParentId,
|
||||
}
|
||||
|
||||
if event != nil {
|
||||
triggeredBy.Event = event
|
||||
}
|
||||
|
||||
res := &gen.WorkflowRun{
|
||||
Metadata: *toAPIMetadata(run.ID, run.CreatedAt.Time, run.UpdatedAt.Time),
|
||||
TenantId: pgUUIDToStr(run.TenantId),
|
||||
StartedAt: startedAt,
|
||||
FinishedAt: finishedAt,
|
||||
Status: gen.WorkflowRunStatus(run.Status),
|
||||
WorkflowVersionId: pgUUIDToStr(run.WorkflowVersionId),
|
||||
WorkflowVersion: workflowVersion,
|
||||
TriggeredBy: *triggeredBy,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
167
api/v1/server/run/run.go
Normal file
167
api/v1/server/run/run.go
Normal file
@@ -0,0 +1,167 @@
|
||||
package run
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/authn"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/authz"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/handlers/events"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/handlers/tenants"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/handlers/users"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/handlers/workers"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/handlers/workflows"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/middleware/populator"
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/oas/gen"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/server"
|
||||
"github.com/labstack/echo/v4"
|
||||
"github.com/labstack/echo/v4/middleware"
|
||||
|
||||
hatchetmiddleware "github.com/hatchet-dev/hatchet/api/v1/server/middleware"
|
||||
)
|
||||
|
||||
type apiService struct {
|
||||
*users.UserService
|
||||
*tenants.TenantService
|
||||
*events.EventService
|
||||
*workflows.WorkflowService
|
||||
*workers.WorkerService
|
||||
}
|
||||
|
||||
func newAPIService(config *server.ServerConfig) *apiService {
|
||||
return &apiService{
|
||||
UserService: users.NewUserService(config),
|
||||
TenantService: tenants.NewTenantService(config),
|
||||
EventService: events.NewEventService(config),
|
||||
WorkflowService: workflows.NewWorkflowService(config),
|
||||
WorkerService: workers.NewWorkerService(config),
|
||||
}
|
||||
}
|
||||
|
||||
type APIServer struct {
|
||||
config *server.ServerConfig
|
||||
}
|
||||
|
||||
func NewAPIServer(config *server.ServerConfig) *APIServer {
|
||||
return &APIServer{
|
||||
config: config,
|
||||
}
|
||||
}
|
||||
|
||||
func (t *APIServer) Run() error {
|
||||
oaspec, err := gen.GetSwagger()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e := echo.New()
|
||||
|
||||
// application middleware
|
||||
populatorMW := populator.NewPopulator(t.config)
|
||||
|
||||
populatorMW.RegisterGetter("tenant", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) {
|
||||
tenant, err := config.Repository.Tenant().GetTenantByID(id)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return tenant, "", nil
|
||||
})
|
||||
|
||||
populatorMW.RegisterGetter("workflow", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) {
|
||||
workflow, err := config.Repository.Workflow().GetWorkflowById(id)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return workflow, workflow.TenantID, nil
|
||||
})
|
||||
|
||||
populatorMW.RegisterGetter("workflow-run", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) {
|
||||
workflowRun, err := config.Repository.WorkflowRun().GetWorkflowRunById(parentId, id)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return workflowRun, workflowRun.TenantID, nil
|
||||
})
|
||||
|
||||
populatorMW.RegisterGetter("event", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) {
|
||||
event, err := config.Repository.Event().GetEventById(id)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return event, event.TenantID, nil
|
||||
})
|
||||
|
||||
populatorMW.RegisterGetter("worker", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) {
|
||||
worker, err := config.Repository.Worker().GetWorkerById(id)
|
||||
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return worker, worker.TenantID, nil
|
||||
})
|
||||
|
||||
authnMW := authn.NewAuthN(t.config)
|
||||
authzMW := authz.NewAuthZ(t.config)
|
||||
|
||||
mw, err := hatchetmiddleware.NewMiddlewareHandler(oaspec)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mw.Use(populatorMW.Middleware)
|
||||
mw.Use(authnMW.Middleware)
|
||||
mw.Use(authzMW.Middleware)
|
||||
|
||||
allHatchetMiddleware, err := mw.Middleware()
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// register echo middleware
|
||||
e.Use(
|
||||
middleware.Logger(),
|
||||
middleware.Recover(),
|
||||
allHatchetMiddleware,
|
||||
)
|
||||
|
||||
service := newAPIService(t.config)
|
||||
|
||||
myStrictApiHandler := gen.NewStrictHandler(service, []gen.StrictMiddlewareFunc{})
|
||||
|
||||
gen.RegisterHandlers(e, myStrictApiHandler)
|
||||
|
||||
if err := e.Start(fmt.Sprintf(":%d", t.config.Runtime.Port)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// func IDGetter[T any](getter func(id string) (T, error), parentGetter func(val T) string) populator.PopulatorFunc {
|
||||
// return func(config *server.ServerConfig, parent *populator.PopulatedResourceNode, id string) (res *populator.PopulatorResult, err error) {
|
||||
// gotVal, err := getter(id)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// res = &populator.PopulatorResult{
|
||||
// Resource: gotVal,
|
||||
// }
|
||||
|
||||
// if parentGetter != nil {
|
||||
// res.ParentID = parentGetter(gotVal)
|
||||
// }
|
||||
|
||||
// return res, nil
|
||||
// }
|
||||
// }
|
||||
73
api/v1/server/serverutils/param_context.go
Normal file
73
api/v1/server/serverutils/param_context.go
Normal file
@@ -0,0 +1,73 @@
|
||||
package serverutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/labstack/echo/v4"
|
||||
)
|
||||
|
||||
// ParamContext represents a subset of echo.Context to make testing easier
|
||||
type ParamContext interface {
|
||||
// Get retrieves data from the context.
|
||||
Get(key string) interface{}
|
||||
|
||||
// Set saves data in the context.
|
||||
Set(key string, val interface{})
|
||||
|
||||
// Param returns path parameter by name.
|
||||
Param(name string) string
|
||||
|
||||
// ParamNames returns path parameter names.
|
||||
ParamNames() []string
|
||||
|
||||
// ParamValues returns path parameter values.
|
||||
ParamValues() []string
|
||||
}
|
||||
|
||||
// RequestContext wraps echo.Context but Get and Set set values on the request context, rather than the
|
||||
// echo context. This is necessary when we use the oapi-gen StrictServerInterface, since the implemented
|
||||
// methods are passed a request context, not an echo context. Thus, our middleware needs to write to the
|
||||
// request context as well.
|
||||
type RequestContext struct {
|
||||
echo.Context
|
||||
}
|
||||
|
||||
func NewRequestContext(ctx echo.Context) *RequestContext {
|
||||
return &RequestContext{ctx}
|
||||
}
|
||||
|
||||
func (e *RequestContext) Get(key string) interface{} {
|
||||
return e.Request().Context().Value(key)
|
||||
}
|
||||
|
||||
func (e *RequestContext) Set(key string, val interface{}) {
|
||||
r := e.Request()
|
||||
|
||||
ctx := context.WithValue(e.Request().Context(), key, val)
|
||||
|
||||
e.SetRequest(r.Clone(ctx))
|
||||
}
|
||||
|
||||
type GoContext struct {
|
||||
context.Context
|
||||
|
||||
echoContext echo.Context
|
||||
}
|
||||
|
||||
func NewGoContext(ctx context.Context, echoContext echo.Context) *GoContext {
|
||||
return &GoContext{ctx, echoContext}
|
||||
}
|
||||
|
||||
func (g *GoContext) Value(key any) interface{} {
|
||||
// first search echo context
|
||||
if keyStr, ok := key.(string); ok {
|
||||
val := g.echoContext.Get(keyStr)
|
||||
|
||||
if val != nil {
|
||||
return val
|
||||
}
|
||||
}
|
||||
|
||||
// then search go context
|
||||
return g.Context.Value(key)
|
||||
}
|
||||
53
api/v1/server/serverutils/testutils/param_context.go
Normal file
53
api/v1/server/serverutils/testutils/param_context.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package testutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/serverutils"
|
||||
)
|
||||
|
||||
type TestContext struct {
|
||||
ctx context.Context
|
||||
params map[string]string
|
||||
}
|
||||
|
||||
func GetTestContext(params map[string]string) serverutils.ParamContext {
|
||||
return &TestContext{context.Background(), params}
|
||||
}
|
||||
|
||||
// Get retrieves data from the context.
|
||||
func (t *TestContext) Get(key string) any {
|
||||
return t.ctx.Value(key)
|
||||
}
|
||||
|
||||
// Set saves data in the context.
|
||||
func (t *TestContext) Set(key string, val any) {
|
||||
t.ctx = context.WithValue(t.ctx, key, val)
|
||||
}
|
||||
|
||||
// Param returns path parameter by name.
|
||||
func (t *TestContext) Param(name string) string {
|
||||
return t.params[name]
|
||||
}
|
||||
|
||||
// ParamNames returns path parameter names.
|
||||
func (t *TestContext) ParamNames() []string {
|
||||
names := []string{}
|
||||
|
||||
for name := range t.params {
|
||||
names = append(names, name)
|
||||
}
|
||||
|
||||
return names
|
||||
}
|
||||
|
||||
// ParamValues returns path parameter values.
|
||||
func (t *TestContext) ParamValues() []string {
|
||||
values := []string{}
|
||||
|
||||
for _, val := range t.params {
|
||||
values = append(values, val)
|
||||
}
|
||||
|
||||
return values
|
||||
}
|
||||
34
cmd/cmdutils/interrupt.go
Normal file
34
cmd/cmdutils/interrupt.go
Normal file
@@ -0,0 +1,34 @@
|
||||
// Adapted from: https://github.com/hatchet-dev/hatchet/blob/3c2c13168afa1af68d4baaf5ed02c9d49c5f0323/cmd/cmdutils/interrupt.go
|
||||
package cmdutils
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"os/signal"
|
||||
"syscall"
|
||||
)
|
||||
|
||||
func InterruptChan() <-chan interface{} {
|
||||
c := make(chan os.Signal, 1)
|
||||
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
|
||||
|
||||
ret := make(chan interface{}, 1)
|
||||
go func() {
|
||||
s := <-c
|
||||
ret <- s
|
||||
close(ret)
|
||||
}()
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
func InterruptContext(interruptChan <-chan interface{}) (context.Context, context.CancelFunc) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
|
||||
go func() {
|
||||
<-interruptChan
|
||||
cancel()
|
||||
}()
|
||||
|
||||
return ctx, cancel
|
||||
}
|
||||
25
cmd/hatchet-api/main.go
Normal file
25
cmd/hatchet-api/main.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"github.com/hatchet-dev/hatchet/api/v1/server/run"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/loader"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// init the repository
|
||||
cf := &loader.ConfigLoader{}
|
||||
|
||||
sc, err := cf.LoadServerConfig()
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
runner := run.NewAPIServer(sc)
|
||||
|
||||
err = runner.Run()
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
187
cmd/hatchet-engine/main.go
Normal file
187
cmd/hatchet-engine/main.go
Normal file
@@ -0,0 +1,187 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/internal/config/loader"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/admin"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/dispatcher"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/eventscontroller"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/grpc"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/ingestor"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/jobscontroller"
|
||||
"github.com/hatchet-dev/hatchet/internal/services/ticker"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// init the repository
|
||||
cf := &loader.ConfigLoader{}
|
||||
|
||||
sc, err := cf.LoadServerConfig()
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
errCh := make(chan error)
|
||||
interruptChan := cmdutils.InterruptChan()
|
||||
ctx, cancel := cmdutils.InterruptContext(interruptChan)
|
||||
wg := sync.WaitGroup{}
|
||||
|
||||
if sc.HasService("grpc") {
|
||||
wg.Add(1)
|
||||
|
||||
// create the dispatcher
|
||||
d, err := dispatcher.New(
|
||||
dispatcher.WithTaskQueue(sc.TaskQueue),
|
||||
dispatcher.WithRepository(sc.Repository),
|
||||
dispatcher.WithLogger(sc.Logger),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
err := d.Start(ctx)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
// create the event ingestor
|
||||
ei, err := ingestor.NewIngestor(
|
||||
ingestor.WithEventRepository(
|
||||
sc.Repository.Event(),
|
||||
),
|
||||
ingestor.WithTaskQueue(sc.TaskQueue),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
adminSvc, err := admin.NewAdminService(
|
||||
admin.WithRepository(sc.Repository),
|
||||
admin.WithTaskQueue(sc.TaskQueue),
|
||||
)
|
||||
|
||||
// create the grpc server
|
||||
s, err := grpc.NewServer(
|
||||
grpc.WithIngestor(ei),
|
||||
grpc.WithDispatcher(d),
|
||||
grpc.WithAdmin(adminSvc),
|
||||
grpc.WithLogger(sc.Logger),
|
||||
grpc.WithTLSConfig(sc.TLSConfig),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
err = s.Start(ctx)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
if sc.HasService("eventscontroller") {
|
||||
// create separate events controller process
|
||||
go func() {
|
||||
ec, err := eventscontroller.New(
|
||||
eventscontroller.WithTaskQueue(sc.TaskQueue),
|
||||
eventscontroller.WithRepository(sc.Repository),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
err = ec.Start(ctx)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
if sc.HasService("jobscontroller") {
|
||||
// create separate jobs controller process
|
||||
go func() {
|
||||
jc, err := jobscontroller.New(
|
||||
jobscontroller.WithTaskQueue(sc.TaskQueue),
|
||||
jobscontroller.WithRepository(sc.Repository),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
err = jc.Start(ctx)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
if sc.HasService("ticker") {
|
||||
// create a ticker
|
||||
go func() {
|
||||
t, err := ticker.New(
|
||||
ticker.WithTaskQueue(sc.TaskQueue),
|
||||
ticker.WithRepository(sc.Repository),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
return
|
||||
}
|
||||
|
||||
err = t.Start(ctx)
|
||||
|
||||
if err != nil {
|
||||
errCh <- err
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
Loop:
|
||||
for {
|
||||
select {
|
||||
case err := <-errCh:
|
||||
fmt.Fprintf(os.Stderr, "%s", err)
|
||||
|
||||
// exit with non-zero exit code
|
||||
os.Exit(1)
|
||||
|
||||
break Loop
|
||||
case <-interruptChan:
|
||||
break Loop
|
||||
}
|
||||
}
|
||||
|
||||
cancel()
|
||||
|
||||
wg.Wait()
|
||||
|
||||
err = sc.Disconnect()
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
159
cmd/seed/main.go
Normal file
159
cmd/seed/main.go
Normal file
@@ -0,0 +1,159 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/internal/config/loader"
|
||||
"github.com/hatchet-dev/hatchet/internal/datautils"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository"
|
||||
"github.com/hatchet-dev/hatchet/internal/repository/prisma/db"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// init the repository
|
||||
cf := &loader.ConfigLoader{}
|
||||
|
||||
// load the config
|
||||
dc, err := cf.LoadDatabaseConfig()
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// seed an example user
|
||||
hashedPw, err := repository.HashPassword("Admin123!!")
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
user, err := dc.Repository.User().GetUserByEmail("admin@example.com")
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, db.ErrNotFound) {
|
||||
user, err = dc.Repository.User().CreateUser(&repository.CreateUserOpts{
|
||||
Email: "admin@example.com",
|
||||
Name: repository.StringPtr("Admin"),
|
||||
EmailVerified: repository.BoolPtr(true),
|
||||
Password: *hashedPw,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
tenant, err := dc.Repository.Tenant().GetTenantBySlug("default")
|
||||
|
||||
if err != nil {
|
||||
if errors.Is(err, db.ErrNotFound) {
|
||||
// seed an example tenant
|
||||
// initialize a tenant
|
||||
tenant, err = dc.Repository.Tenant().CreateTenant(&repository.CreateTenantOpts{
|
||||
Name: "Default",
|
||||
Slug: "default",
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
fmt.Println("created tenant", tenant.ID)
|
||||
|
||||
// add the user to the tenant
|
||||
_, err = dc.Repository.Tenant().CreateTenantMember(tenant.ID, &repository.CreateTenantMemberOpts{
|
||||
Role: "OWNER",
|
||||
UserId: user.ID,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// seed example workflows
|
||||
firstInput, _ := datautils.ToJSONType(map[string]interface{}{
|
||||
"message": "Username is {{ .username }}",
|
||||
})
|
||||
|
||||
secondInput, _ := datautils.ToJSONType(map[string]interface{}{
|
||||
"message": "Above message is: {{ .steps.echo1.outputs.message }}",
|
||||
})
|
||||
|
||||
thirdInput, _ := datautils.ToJSONType(map[string]interface{}{
|
||||
"message": "Above message is: {{ .steps.echo2.outputs.message }}",
|
||||
})
|
||||
|
||||
_, err = dc.Repository.Workflow().CreateNewWorkflow(tenant.ID, &repository.CreateWorkflowVersionOpts{
|
||||
Name: "test-workflow",
|
||||
Description: repository.StringPtr("This is a test workflow."),
|
||||
Version: "v0.1.0",
|
||||
EventTriggers: []string{
|
||||
"user:create",
|
||||
},
|
||||
Tags: []repository.CreateWorkflowTagOpts{
|
||||
{
|
||||
Name: "Preview",
|
||||
},
|
||||
},
|
||||
Jobs: []repository.CreateWorkflowJobOpts{
|
||||
{
|
||||
Name: "job-name",
|
||||
Steps: []repository.CreateWorkflowStepOpts{
|
||||
{
|
||||
ReadableId: "echo1",
|
||||
Action: "echo:echo",
|
||||
Inputs: firstInput,
|
||||
},
|
||||
{
|
||||
ReadableId: "echo2",
|
||||
Action: "echo:echo",
|
||||
Inputs: secondInput,
|
||||
},
|
||||
{
|
||||
ReadableId: "echo3",
|
||||
Action: "echo:echo",
|
||||
Inputs: thirdInput,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
workflows, err := dc.Repository.Workflow().ListWorkflowsForEvent(tenant.ID, "user:create")
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, workflow := range workflows {
|
||||
fmt.Println("created workflow", workflow.ID, workflow.Workflow().Name, workflow.Version)
|
||||
}
|
||||
|
||||
// seed example events
|
||||
generateEvents(dc.Repository, tenant.ID)
|
||||
}
|
||||
|
||||
func generateEvents(repo repository.Repository, tenantId string) {
|
||||
for i := 0; i < 600; i++ {
|
||||
_, err := repo.Event().CreateEvent(&repository.CreateEventOpts{
|
||||
TenantId: tenantId,
|
||||
Key: fmt.Sprintf("user-%d:create", i),
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
36
docker-compose.yml
Normal file
36
docker-compose.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
version: "3.8"
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:latest
|
||||
command: postgres -c 'max_connections=200'
|
||||
restart: always
|
||||
environment:
|
||||
- POSTGRES_USER=hatchet
|
||||
- POSTGRES_PASSWORD=hatchet
|
||||
- POSTGRES_DB=hatchet
|
||||
ports:
|
||||
- "5433:5432"
|
||||
volumes:
|
||||
- hatchet_postgres_data:/var/lib/postgresql/data
|
||||
rabbitmq:
|
||||
image: "rabbitmq:3-management"
|
||||
hostname: "rabbitmq"
|
||||
ports:
|
||||
- "5672:5672" # RabbitMQ
|
||||
- "15672:15672" # Management UI
|
||||
environment:
|
||||
RABBITMQ_DEFAULT_USER: "user"
|
||||
RABBITMQ_DEFAULT_PASS: "password"
|
||||
volumes:
|
||||
- "hatchet_rabbitmq_data:/var/lib/rabbitmq"
|
||||
- "hatchet_rabbitmq.conf:/etc/rabbitmq/rabbitmq.conf" # Configuration file mount
|
||||
healthcheck:
|
||||
test: ["CMD", "rabbitmqctl", "status"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
|
||||
volumes:
|
||||
hatchet_postgres_data:
|
||||
hatchet_rabbitmq_data:
|
||||
hatchet_rabbitmq.conf:
|
||||
11
examples/cron/.hatchet/cron-workflow.yaml
Normal file
11
examples/cron/.hatchet/cron-workflow.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
name: "cron-workflow"
|
||||
version: v0.1.0
|
||||
triggers:
|
||||
crons:
|
||||
- "* * * * *"
|
||||
jobs:
|
||||
print-user:
|
||||
steps:
|
||||
- id: print1
|
||||
action: print:print
|
||||
timeout: 60s
|
||||
53
examples/cron/main.go
Normal file
53
examples/cron/main.go
Normal file
@@ -0,0 +1,53 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type printInput struct{}
|
||||
|
||||
func main() {
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create a worker. This automatically reads in a TemporalClient from .env and workflow files from the .hatchet
|
||||
// directory, but this can be customized with the `worker.WithTemporalClient` and `worker.WithWorkflowFiles` options.
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = worker.RegisterAction("print:print", func(ctx context.Context, input *printInput) (result any, err error) {
|
||||
fmt.Println("called print:print")
|
||||
|
||||
return map[string]interface{}{}, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
err = worker.Start(interruptCtx)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
11
examples/requeue/.hatchet/job-requeue-workflow.yaml
Normal file
11
examples/requeue/.hatchet/job-requeue-workflow.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
name: "test-step-requeue"
|
||||
version: v0.2.0
|
||||
triggers:
|
||||
events:
|
||||
- example:event
|
||||
jobs:
|
||||
requeue-job:
|
||||
steps:
|
||||
- id: requeue
|
||||
action: requeue:requeue
|
||||
timeout: 10s
|
||||
80
examples/requeue/main.go
Normal file
80
examples/requeue/main.go
Normal file
@@ -0,0 +1,80 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type sampleEvent struct{}
|
||||
|
||||
type requeueInput struct{}
|
||||
|
||||
func main() {
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create a worker. This automatically reads in a TemporalClient from .env and workflow files from the .hatchet
|
||||
// directory, but this can be customized with the `worker.WithTemporalClient` and `worker.WithWorkflowFiles` options.
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = worker.RegisterAction("requeue:requeue", func(ctx context.Context, input *requeueInput) (result any, err error) {
|
||||
return map[string]interface{}{}, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
event := sampleEvent{}
|
||||
|
||||
// push an event
|
||||
err = client.Event().Push(
|
||||
context.Background(),
|
||||
"example:event",
|
||||
event,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
go func() {
|
||||
// wait to register the worker for 10 seconds, to let the requeuer kick in
|
||||
time.Sleep(10 * time.Second)
|
||||
|
||||
err = worker.Start(interruptCtx)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-interruptCtx.Done():
|
||||
return
|
||||
default:
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
23
examples/simple/.hatchet/sample-workflow.yaml
Normal file
23
examples/simple/.hatchet/sample-workflow.yaml
Normal file
@@ -0,0 +1,23 @@
|
||||
name: "post-user-sign-up"
|
||||
version: v0.2.0
|
||||
triggers:
|
||||
events:
|
||||
- user:create
|
||||
jobs:
|
||||
print-user:
|
||||
steps:
|
||||
- id: echo1
|
||||
action: echo:echo
|
||||
timeout: 60s
|
||||
with:
|
||||
message: "Username is {{ .input.username }}"
|
||||
- id: echo2
|
||||
action: echo:echo
|
||||
timeout: 60s
|
||||
with:
|
||||
message: "Above message is: {{ .steps.echo1.message }}"
|
||||
- id: echo3
|
||||
action: echo:echo
|
||||
timeout: 60s
|
||||
with:
|
||||
message: "Above message is: {{ .steps.echo2.message }}"
|
||||
38
examples/simple/README.md
Normal file
38
examples/simple/README.md
Normal file
@@ -0,0 +1,38 @@
|
||||
## Simple Workflow Example
|
||||
|
||||
This example runs the [sample-workflow.yaml](./.hatchet/sample-workflow.yaml).
|
||||
|
||||
## Explanation
|
||||
|
||||
This folder contains a demo example of a workflow that simply echoes the input message as an output. The workflow file showcases the following features:
|
||||
|
||||
- Running a simple job with a set of dependent steps
|
||||
- Variable references within step arguments -- each subsequent step in a workflow can call `.steps.<step_id>.outputs` to access output arguments
|
||||
|
||||
## How to run
|
||||
|
||||
Navigate to this directory and run the following steps:
|
||||
|
||||
1. Make sure you have a Hatchet server running (see the instructions [here](../../README.md)). After running `task seed`, grab the tenant ID which is output to the console.
|
||||
2. Set your environment variables -- if you're using the bundled Temporal server, this will look like:
|
||||
|
||||
```sh
|
||||
cat > .env <<EOF
|
||||
HATCHET_CLIENT_TENANT_ID=<tenant-id-from-seed-command>
|
||||
HATCHET_CLIENT_TLS_ROOT_CA_FILE=../../hack/dev/certs/ca.cert
|
||||
HATCHET_CLIENT_TLS_CERT_FILE=../../hack/dev/certs/client-worker.pem
|
||||
HATCHET_CLIENT_TLS_KEY_FILE=../../hack/dev/certs/client-worker.key
|
||||
HATCHET_CLIENT_TLS_SERVER_NAME=cluster
|
||||
EOF
|
||||
```
|
||||
|
||||
3. Run the following within this directory:
|
||||
|
||||
```sh
|
||||
/bin/bash -c '
|
||||
set -a
|
||||
. .env
|
||||
set +a
|
||||
|
||||
go run main.go';
|
||||
```
|
||||
93
examples/simple/main.go
Normal file
93
examples/simple/main.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type userCreateEvent struct {
|
||||
Username string `json:"username"`
|
||||
UserId string `json:"user_id"`
|
||||
Data map[string]string `json:"data"`
|
||||
}
|
||||
|
||||
type actionInput struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func main() {
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create a worker. This automatically reads in a TemporalClient from .env and workflow files from the .hatchet
|
||||
// directory, but this can be customized with the `worker.WithTemporalClient` and `worker.WithWorkflowFiles` options.
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = worker.RegisterAction("echo:echo", func(ctx context.Context, input *actionInput) (result any, err error) {
|
||||
return map[string]interface{}{
|
||||
"message": input.Message,
|
||||
}, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
go func() {
|
||||
err = worker.Start(interruptCtx)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
cancel()
|
||||
}()
|
||||
|
||||
testEvent := userCreateEvent{
|
||||
Username: "echo-test",
|
||||
UserId: "1234",
|
||||
Data: map[string]string{
|
||||
"test": "test",
|
||||
},
|
||||
}
|
||||
|
||||
// push an event
|
||||
err = client.Event().Push(
|
||||
context.Background(),
|
||||
"user:create",
|
||||
testEvent,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-interruptCtx.Done():
|
||||
return
|
||||
default:
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
29
examples/slack/.hatchet/slack-channel.yaml
Normal file
29
examples/slack/.hatchet/slack-channel.yaml
Normal file
@@ -0,0 +1,29 @@
|
||||
name: "post-user-sign-up"
|
||||
version: 0.1.0
|
||||
triggers:
|
||||
events:
|
||||
- user:create
|
||||
jobs:
|
||||
create-slack-channel:
|
||||
steps:
|
||||
- name: Create onboarding channel
|
||||
action: slack:create-channel
|
||||
id: createChannel
|
||||
timeout: 60s
|
||||
with:
|
||||
channelName: "{{ .input.username }}-onboarding"
|
||||
- name: Add user to channel
|
||||
action: slack:add-users-to-channel
|
||||
id: addUserToChannel
|
||||
timeout: 60s
|
||||
with:
|
||||
channelId: "{{ .steps.createChannel.channelId }}"
|
||||
userIds:
|
||||
- "$SLACK_USER_ID"
|
||||
- name: Send message to channel
|
||||
action: slack:send-message
|
||||
id: sendMessageToChannel
|
||||
timeout: 60s
|
||||
with:
|
||||
channelId: "{{ .steps.createChannel.channelId }}"
|
||||
message: "Welcome to your dedicated onboarding channel, {{ .input.username }}!"
|
||||
47
examples/slack/README.md
Normal file
47
examples/slack/README.md
Normal file
@@ -0,0 +1,47 @@
|
||||
## Simple Workflow Example
|
||||
|
||||
This example runs the [slack-channel.yaml](./.hatchet/slack-channel.yaml).
|
||||
|
||||
## Explanation
|
||||
|
||||
This folder contains a demo example of a workflow that creates a Slack channel, adds a default user to that Slack channel, and send an initial message to the channel. The workflow file showcases the following features:
|
||||
|
||||
- Running a simple job with a set of dependent steps
|
||||
- Variable references within step arguments -- each subsequent step in a workflow can call `.steps.<step_id>.outputs` to access output arguments
|
||||
|
||||
While the `main.go` file showcases the following features:
|
||||
|
||||
- Using an existing integration called `SlackIntegration` which provides several actions to perform
|
||||
- Providing a custom workflow file (as the workflow file needs to be populated with an env var `$SLACK_USER_ID`)
|
||||
|
||||
## How to run
|
||||
|
||||
Navigate to this directory and run the following steps:
|
||||
|
||||
1. Make sure you have a Hatchet server running (see the instructions [here](../../README.md)). After running `task seed`, grab the tenant ID which is output to the console.
|
||||
2. Set your environment variables -- if you're using the bundled Temporal server, this will look like:
|
||||
|
||||
```sh
|
||||
cat > .env <<EOF
|
||||
SLACK_USER_ID=<TODO>
|
||||
SLACK_TOKEN=<TODO>
|
||||
SLACK_TEAM_ID=<TODO>
|
||||
|
||||
HATCHET_CLIENT_TENANT_ID=<tenant-id-from-seed-command>
|
||||
HATCHET_CLIENT_TLS_ROOT_CA_FILE=../../hack/dev/certs/ca.cert
|
||||
HATCHET_CLIENT_TLS_CERT_FILE=../../hack/dev/certs/client-worker.pem
|
||||
HATCHET_CLIENT_TLS_KEY_FILE=../../hack/dev/certs/client-worker.key
|
||||
HATCHET_CLIENT_TLS_SERVER_NAME=cluster
|
||||
EOF
|
||||
```
|
||||
|
||||
3. Run the following within this directory:
|
||||
|
||||
```sh
|
||||
/bin/bash -c '
|
||||
set -a
|
||||
. .env
|
||||
set +a
|
||||
|
||||
go run main.go';
|
||||
```
|
||||
118
examples/slack/main.go
Normal file
118
examples/slack/main.go
Normal file
@@ -0,0 +1,118 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client/types"
|
||||
"github.com/hatchet-dev/hatchet/pkg/integrations/slack"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type userCreateEvent struct {
|
||||
Username string `json:"username"`
|
||||
}
|
||||
|
||||
type actionInput struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
//go:embed .hatchet/slack-channel.yaml
|
||||
var SlackChannelWorkflow []byte
|
||||
|
||||
func init() {
|
||||
// initialize the slack channel workflow with SLACK_USER_ID
|
||||
slackUserId := os.Getenv("SLACK_USER_ID")
|
||||
|
||||
if slackUserId == "" {
|
||||
panic("SLACK_USER_ID environment variable must be set")
|
||||
}
|
||||
|
||||
slackFileWithReplacedEnv := strings.Replace(string(SlackChannelWorkflow), "$SLACK_USER_ID", slackUserId, 1)
|
||||
|
||||
SlackChannelWorkflow = []byte(slackFileWithReplacedEnv)
|
||||
}
|
||||
|
||||
func main() {
|
||||
// read the slack workflow
|
||||
slackWorkflowFile, err := types.ParseYAML(context.Background(), SlackChannelWorkflow)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// render the slack workflow using the environment variable SLACK_USER_ID
|
||||
slackToken := os.Getenv("SLACK_TOKEN")
|
||||
slackTeamId := os.Getenv("SLACK_TEAM_ID")
|
||||
|
||||
if slackToken == "" {
|
||||
panic("SLACK_TOKEN environment variable must be set")
|
||||
}
|
||||
|
||||
if slackTeamId == "" {
|
||||
panic("SLACK_TEAM_ID environment variable must be set")
|
||||
}
|
||||
|
||||
slackInt := slack.NewSlackIntegration(slackToken, slackTeamId, true)
|
||||
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
client.WithWorkflows([]*types.Workflow{
|
||||
&slackWorkflowFile,
|
||||
}),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create a worker. This automatically reads in a TemporalClient from .env and workflow files from the .hatchet
|
||||
// directory, but this can be customized with the `worker.WithTemporalClient` and `worker.WithWorkflowFiles` options.
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
worker.WithIntegration(
|
||||
slackInt,
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
go worker.Start(interruptCtx)
|
||||
|
||||
testEvent := userCreateEvent{
|
||||
Username: "testing1233344",
|
||||
}
|
||||
|
||||
// push an event
|
||||
err = client.Event().Push(
|
||||
context.Background(),
|
||||
"user:create",
|
||||
testEvent,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-interruptCtx.Done():
|
||||
return
|
||||
default:
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
12
examples/timeout/.hatchet/job-timeout-workflow.yaml
Normal file
12
examples/timeout/.hatchet/job-timeout-workflow.yaml
Normal file
@@ -0,0 +1,12 @@
|
||||
name: "test-job-timeout"
|
||||
version: v0.1.0
|
||||
triggers:
|
||||
events:
|
||||
- user:create
|
||||
jobs:
|
||||
timeout-job:
|
||||
timeout: 3s
|
||||
steps:
|
||||
- id: timeout
|
||||
action: timeout:timeout
|
||||
timeout: 10s
|
||||
11
examples/timeout/.hatchet/step-timeout-workflow.yaml
Normal file
11
examples/timeout/.hatchet/step-timeout-workflow.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
name: "test-step-timeout"
|
||||
version: v0.1.0
|
||||
triggers:
|
||||
events:
|
||||
- user:create
|
||||
jobs:
|
||||
timeout-job:
|
||||
steps:
|
||||
- id: timeout
|
||||
action: timeout:timeout
|
||||
timeout: 5s
|
||||
83
examples/timeout/main.go
Normal file
83
examples/timeout/main.go
Normal file
@@ -0,0 +1,83 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/hatchet-dev/hatchet/cmd/cmdutils"
|
||||
"github.com/hatchet-dev/hatchet/pkg/client"
|
||||
"github.com/hatchet-dev/hatchet/pkg/worker"
|
||||
)
|
||||
|
||||
type sampleEvent struct{}
|
||||
|
||||
type timeoutInput struct{}
|
||||
|
||||
func main() {
|
||||
client, err := client.New(
|
||||
client.InitWorkflows(),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
// Create a worker. This automatically reads in a TemporalClient from .env and workflow files from the .hatchet
|
||||
// directory, but this can be customized with the `worker.WithTemporalClient` and `worker.WithWorkflowFiles` options.
|
||||
worker, err := worker.NewWorker(
|
||||
worker.WithDispatcherClient(
|
||||
client.Dispatcher(),
|
||||
),
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
err = worker.RegisterAction("timeout:timeout", func(ctx context.Context, input *timeoutInput) (result any, err error) {
|
||||
// wait for context done signal
|
||||
timeStart := time.Now()
|
||||
<-ctx.Done()
|
||||
fmt.Println("context cancelled in ", time.Since(timeStart).Seconds(), " seconds")
|
||||
|
||||
return map[string]interface{}{}, nil
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
interruptCtx, cancel := cmdutils.InterruptContext(cmdutils.InterruptChan())
|
||||
defer cancel()
|
||||
|
||||
go func() {
|
||||
err = worker.Start(interruptCtx)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}()
|
||||
|
||||
event := sampleEvent{}
|
||||
|
||||
// push an event
|
||||
err = client.Event().Push(
|
||||
context.Background(),
|
||||
"user:create",
|
||||
event,
|
||||
)
|
||||
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-interruptCtx.Done():
|
||||
return
|
||||
default:
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
}
|
||||
}
|
||||
18
frontend/app/.eslintrc.cjs
Normal file
18
frontend/app/.eslintrc.cjs
Normal file
@@ -0,0 +1,18 @@
|
||||
module.exports = {
|
||||
root: true,
|
||||
env: { browser: true, es2020: true },
|
||||
extends: [
|
||||
'eslint:recommended',
|
||||
'plugin:@typescript-eslint/recommended',
|
||||
'plugin:react-hooks/recommended',
|
||||
],
|
||||
ignorePatterns: ['dist', '.eslintrc.cjs'],
|
||||
parser: '@typescript-eslint/parser',
|
||||
plugins: ['react-refresh'],
|
||||
rules: {
|
||||
'react-refresh/only-export-components': [
|
||||
'warn',
|
||||
{ allowConstantExport: true },
|
||||
],
|
||||
},
|
||||
}
|
||||
24
frontend/app/.gitignore
vendored
Normal file
24
frontend/app/.gitignore
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
node_modules
|
||||
dist
|
||||
dist-ssr
|
||||
*.local
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
30
frontend/app/README.md
Normal file
30
frontend/app/README.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# React + TypeScript + Vite
|
||||
|
||||
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
|
||||
|
||||
Currently, two official plugins are available:
|
||||
|
||||
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react/README.md) uses [Babel](https://babeljs.io/) for Fast Refresh
|
||||
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
|
||||
|
||||
## Expanding the ESLint configuration
|
||||
|
||||
If you are developing a production application, we recommend updating the configuration to enable type aware lint rules:
|
||||
|
||||
- Configure the top-level `parserOptions` property like this:
|
||||
|
||||
```js
|
||||
export default {
|
||||
// other rules...
|
||||
parserOptions: {
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module',
|
||||
project: ['./tsconfig.json', './tsconfig.node.json'],
|
||||
tsconfigRootDir: __dirname,
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
- Replace `plugin:@typescript-eslint/recommended` to `plugin:@typescript-eslint/recommended-type-checked` or `plugin:@typescript-eslint/strict-type-checked`
|
||||
- Optionally add `plugin:@typescript-eslint/stylistic-type-checked`
|
||||
- Install [eslint-plugin-react](https://github.com/jsx-eslint/eslint-plugin-react) and add `plugin:react/recommended` & `plugin:react/jsx-runtime` to the `extends` list
|
||||
16
frontend/app/components.json
Normal file
16
frontend/app/components.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "new-york",
|
||||
"rsc": false,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.js",
|
||||
"css": "src/index.css",
|
||||
"baseColor": "slate",
|
||||
"cssVariables": true
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/lib/utils"
|
||||
}
|
||||
}
|
||||
23
frontend/app/index.html
Normal file
23
frontend/app/index.html
Normal file
@@ -0,0 +1,23 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<title>Hatchet</title>
|
||||
<meta
|
||||
name="description"
|
||||
content="Infrastructure management tool."
|
||||
/>
|
||||
<link
|
||||
href="https://fonts.googleapis.com/css?family=Ubuntu:400,500,700"
|
||||
rel="stylesheet"
|
||||
/>
|
||||
<meta property="og:title" content="Hatchet" />
|
||||
<link rel="icon" type="image/png" href="/favicon.ico">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Hatchet</title>
|
||||
</head>
|
||||
<body>
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
6292
frontend/app/package-lock.json
generated
Normal file
6292
frontend/app/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
78
frontend/app/package.json
Normal file
78
frontend/app/package.json
Normal file
@@ -0,0 +1,78 @@
|
||||
{
|
||||
"name": "app",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc && vite build",
|
||||
"lint": "eslint . --ext ts,tsx --report-unused-disable-directives --max-warnings 0",
|
||||
"preview": "vite preview"
|
||||
},
|
||||
"dependencies": {
|
||||
"@heroicons/react": "^2.0.18",
|
||||
"@hookform/resolvers": "^3.3.2",
|
||||
"@lukemorales/query-key-factory": "^1.3.2",
|
||||
"@radix-ui/react-accordion": "^1.1.2",
|
||||
"@radix-ui/react-avatar": "^1.0.4",
|
||||
"@radix-ui/react-checkbox": "^1.0.4",
|
||||
"@radix-ui/react-dialog": "^1.0.5",
|
||||
"@radix-ui/react-dropdown-menu": "^2.0.6",
|
||||
"@radix-ui/react-hover-card": "^1.0.7",
|
||||
"@radix-ui/react-icons": "^1.3.0",
|
||||
"@radix-ui/react-label": "^2.0.2",
|
||||
"@radix-ui/react-menubar": "^1.0.4",
|
||||
"@radix-ui/react-popover": "^1.0.7",
|
||||
"@radix-ui/react-select": "^2.0.0",
|
||||
"@radix-ui/react-separator": "^1.0.3",
|
||||
"@radix-ui/react-slot": "^1.0.2",
|
||||
"@radix-ui/react-toast": "^1.1.5",
|
||||
"@tanstack/react-query": "^5.12.1",
|
||||
"@tanstack/react-table": "^8.10.7",
|
||||
"@visx/axis": "^3.5.0",
|
||||
"@visx/brush": "^3.6.0",
|
||||
"@visx/curve": "^3.3.0",
|
||||
"@visx/gradient": "^3.3.0",
|
||||
"@visx/group": "^3.3.0",
|
||||
"@visx/mock-data": "^3.3.0",
|
||||
"@visx/pattern": "^3.3.0",
|
||||
"@visx/responsive": "^3.3.0",
|
||||
"@visx/scale": "^3.5.0",
|
||||
"@visx/shape": "^3.5.0",
|
||||
"@visx/vendor": "^3.5.0",
|
||||
"axios": "^1.6.2",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.0.0",
|
||||
"cmdk": "^0.2.0",
|
||||
"jotai": "^2.6.0",
|
||||
"prism-react-renderer": "^2.3.0",
|
||||
"qs": "^6.11.2",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0",
|
||||
"react-hook-form": "^7.48.2",
|
||||
"react-router-dom": "^6.20.0",
|
||||
"react-syntax-highlighter": "^15.5.0",
|
||||
"tailwind-merge": "^2.0.0",
|
||||
"tailwindcss-animate": "^1.0.7",
|
||||
"tiny-invariant": "^1.3.1",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.10.1",
|
||||
"@types/qs": "^6.9.10",
|
||||
"@types/react": "^18.2.37",
|
||||
"@types/react-dom": "^18.2.15",
|
||||
"@types/react-syntax-highlighter": "^15.5.11",
|
||||
"@typescript-eslint/eslint-plugin": "^6.10.0",
|
||||
"@typescript-eslint/parser": "^6.10.0",
|
||||
"@vitejs/plugin-react": "^4.2.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"eslint": "^8.53.0",
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.4",
|
||||
"postcss": "^8.4.31",
|
||||
"tailwindcss": "^3.3.5",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.0.0"
|
||||
}
|
||||
}
|
||||
6
frontend/app/postcss.config.js
Normal file
6
frontend/app/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
BIN
frontend/app/public/favicon.ico
Normal file
BIN
frontend/app/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
BIN
frontend/app/src/assets/hatchet.png
Normal file
BIN
frontend/app/src/assets/hatchet.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.3 KiB |
BIN
frontend/app/src/assets/hatchet_logo.png
Normal file
BIN
frontend/app/src/assets/hatchet_logo.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 8.0 KiB |
@@ -0,0 +1,99 @@
|
||||
import React from "react";
|
||||
import { Group } from "@visx/group";
|
||||
import { AreaClosed } from "@visx/shape";
|
||||
import { AxisLeft, AxisBottom, AxisScale } from "@visx/axis";
|
||||
import { LinearGradient } from "@visx/gradient";
|
||||
import { curveMonotoneX } from "@visx/curve";
|
||||
import { AppleStock } from "@visx/mock-data/lib/mocks/appleStock";
|
||||
|
||||
// Initialize some variables
|
||||
const axisColor = "#fff";
|
||||
const axisBottomTickLabelProps = {
|
||||
textAnchor: "middle" as const,
|
||||
fontFamily: "Arial",
|
||||
fontSize: 10,
|
||||
fill: axisColor,
|
||||
};
|
||||
const axisLeftTickLabelProps = {
|
||||
dx: "-0.25em",
|
||||
dy: "0.25em",
|
||||
fontFamily: "Arial",
|
||||
fontSize: 10,
|
||||
textAnchor: "end" as const,
|
||||
fill: axisColor,
|
||||
};
|
||||
|
||||
// accessors
|
||||
const getDate = (d: AppleStock) => new Date(d.date);
|
||||
const getStockValue = (d: AppleStock) => d.close;
|
||||
|
||||
export default function AreaChart({
|
||||
data,
|
||||
gradientColor,
|
||||
width,
|
||||
yMax,
|
||||
margin,
|
||||
xScale,
|
||||
yScale,
|
||||
hideBottomAxis = false,
|
||||
hideLeftAxis = false,
|
||||
top,
|
||||
left,
|
||||
children,
|
||||
}: {
|
||||
data: AppleStock[];
|
||||
gradientColor: string;
|
||||
xScale: AxisScale<number>;
|
||||
yScale: AxisScale<number>;
|
||||
width: number;
|
||||
yMax: number;
|
||||
margin: { top: number; right: number; bottom: number; left: number };
|
||||
hideBottomAxis?: boolean;
|
||||
hideLeftAxis?: boolean;
|
||||
top?: number;
|
||||
left?: number;
|
||||
children?: React.ReactNode;
|
||||
}) {
|
||||
if (width < 10) return null;
|
||||
return (
|
||||
<Group left={left || margin.left} top={top || margin.top}>
|
||||
<LinearGradient
|
||||
id="gradient"
|
||||
from={gradientColor}
|
||||
fromOpacity={1}
|
||||
to={gradientColor}
|
||||
toOpacity={0.2}
|
||||
/>
|
||||
<AreaClosed<AppleStock>
|
||||
data={data}
|
||||
x={(d) => xScale(getDate(d)) || 0}
|
||||
y={(d) => yScale(getStockValue(d)) || 0}
|
||||
yScale={yScale}
|
||||
strokeWidth={1}
|
||||
stroke="url(#gradient)"
|
||||
fill="url(#gradient)"
|
||||
curve={curveMonotoneX}
|
||||
/>
|
||||
{!hideBottomAxis && (
|
||||
<AxisBottom
|
||||
top={yMax}
|
||||
scale={xScale}
|
||||
numTicks={width > 520 ? 10 : 5}
|
||||
stroke={axisColor}
|
||||
tickStroke={axisColor}
|
||||
tickLabelProps={axisBottomTickLabelProps}
|
||||
/>
|
||||
)}
|
||||
{!hideLeftAxis && (
|
||||
<AxisLeft
|
||||
scale={yScale}
|
||||
numTicks={5}
|
||||
stroke={axisColor}
|
||||
tickStroke={axisColor}
|
||||
tickLabelProps={axisLeftTickLabelProps}
|
||||
/>
|
||||
)}
|
||||
{children}
|
||||
</Group>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,253 @@
|
||||
/* eslint-disable @typescript-eslint/no-use-before-define */
|
||||
import React, { useRef, useState, useMemo } from "react";
|
||||
import { scaleTime, scaleLinear } from "@visx/scale";
|
||||
import appleStock, { AppleStock } from "@visx/mock-data/lib/mocks/appleStock";
|
||||
import { Brush } from "@visx/brush";
|
||||
import { Bounds } from "@visx/brush/lib/types";
|
||||
import BaseBrush, {
|
||||
BaseBrushState,
|
||||
UpdateBrush,
|
||||
} from "@visx/brush/lib/BaseBrush";
|
||||
import { PatternLines } from "@visx/pattern";
|
||||
import { Group } from "@visx/group";
|
||||
import { LinearGradient } from "@visx/gradient";
|
||||
import { max, extent } from "@visx/vendor/d3-array";
|
||||
import { BrushHandleRenderProps } from "@visx/brush/lib/BrushHandle";
|
||||
import AreaChart from "./area-chart";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
// Initialize some variables
|
||||
const stock = appleStock.slice(1000);
|
||||
const brushMargin = { top: 10, bottom: 15, left: 50, right: 20 };
|
||||
const chartSeparation = 30;
|
||||
const PATTERN_ID = "brush_pattern";
|
||||
const GRADIENT_ID = "brush_gradient";
|
||||
export const accentColor = "#ffffff44";
|
||||
export const background = "#1E293B";
|
||||
export const background2 = "#8c77e0";
|
||||
const selectedBrushStyle = {
|
||||
fill: `url(#${PATTERN_ID})`,
|
||||
stroke: "white",
|
||||
};
|
||||
|
||||
// accessors
|
||||
const getDate = (d: AppleStock) => new Date(d.date);
|
||||
const getStockValue = (d: AppleStock) => d.close;
|
||||
|
||||
export type BrushProps = {
|
||||
width: number;
|
||||
height: number;
|
||||
margin?: { top: number; right: number; bottom: number; left: number };
|
||||
compact?: boolean;
|
||||
};
|
||||
|
||||
function BrushChart({
|
||||
compact = false,
|
||||
width,
|
||||
height,
|
||||
margin = {
|
||||
top: 20,
|
||||
left: 50,
|
||||
bottom: 20,
|
||||
right: 20,
|
||||
},
|
||||
}: BrushProps) {
|
||||
const brushRef = useRef<BaseBrush | null>(null);
|
||||
const [filteredStock, setFilteredStock] = useState(stock);
|
||||
|
||||
const onBrushChange = (domain: Bounds | null) => {
|
||||
if (!domain) return;
|
||||
const { x0, x1, y0, y1 } = domain;
|
||||
const stockCopy = stock.filter((s) => {
|
||||
const x = getDate(s).getTime();
|
||||
const y = getStockValue(s);
|
||||
return x > x0 && x < x1 && y > y0 && y < y1;
|
||||
});
|
||||
setFilteredStock(stockCopy);
|
||||
};
|
||||
|
||||
const innerHeight = height - margin.top - margin.bottom;
|
||||
const topChartBottomMargin = compact
|
||||
? chartSeparation / 2
|
||||
: chartSeparation + 10;
|
||||
const topChartHeight = 0.8 * innerHeight - topChartBottomMargin;
|
||||
const bottomChartHeight = innerHeight - topChartHeight - chartSeparation;
|
||||
|
||||
// bounds
|
||||
const xMax = Math.max(width - margin.left - margin.right, 0);
|
||||
const yMax = Math.max(topChartHeight, 0);
|
||||
const xBrushMax = Math.max(width - brushMargin.left - brushMargin.right, 0);
|
||||
const yBrushMax = Math.max(
|
||||
bottomChartHeight - brushMargin.top - brushMargin.bottom,
|
||||
0
|
||||
);
|
||||
|
||||
// scales
|
||||
const dateScale = useMemo(
|
||||
() =>
|
||||
scaleTime<number>({
|
||||
range: [0, xMax],
|
||||
domain: extent(filteredStock, getDate) as [Date, Date],
|
||||
}),
|
||||
[xMax, filteredStock]
|
||||
);
|
||||
const stockScale = useMemo(
|
||||
() =>
|
||||
scaleLinear<number>({
|
||||
range: [yMax, 0],
|
||||
domain: [0, max(filteredStock, getStockValue) || 0],
|
||||
nice: true,
|
||||
}),
|
||||
[yMax, filteredStock]
|
||||
);
|
||||
const brushDateScale = useMemo(
|
||||
() =>
|
||||
scaleTime<number>({
|
||||
range: [0, xBrushMax],
|
||||
domain: extent(stock, getDate) as [Date, Date],
|
||||
}),
|
||||
[xBrushMax]
|
||||
);
|
||||
const brushStockScale = useMemo(
|
||||
() =>
|
||||
scaleLinear({
|
||||
range: [yBrushMax, 0],
|
||||
domain: [0, max(stock, getStockValue) || 0],
|
||||
nice: true,
|
||||
}),
|
||||
[yBrushMax]
|
||||
);
|
||||
|
||||
const initialBrushPosition = useMemo(
|
||||
() => ({
|
||||
start: { x: brushDateScale(getDate(stock[50])) },
|
||||
end: { x: brushDateScale(getDate(stock[100])) },
|
||||
}),
|
||||
[brushDateScale]
|
||||
);
|
||||
|
||||
// event handlers
|
||||
const handleClearClick = () => {
|
||||
if (brushRef?.current) {
|
||||
setFilteredStock(stock);
|
||||
brushRef.current.reset();
|
||||
}
|
||||
};
|
||||
|
||||
const handleResetClick = () => {
|
||||
if (brushRef?.current) {
|
||||
const updater: UpdateBrush = (prevBrush) => {
|
||||
const newExtent = brushRef.current!.getExtent(
|
||||
initialBrushPosition.start,
|
||||
initialBrushPosition.end
|
||||
);
|
||||
|
||||
const newState: BaseBrushState = {
|
||||
...prevBrush,
|
||||
start: { y: newExtent.y0, x: newExtent.x0 },
|
||||
end: { y: newExtent.y1, x: newExtent.x1 },
|
||||
extent: newExtent,
|
||||
};
|
||||
|
||||
return newState;
|
||||
};
|
||||
brushRef.current.updateBrush(updater);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div>
|
||||
<svg width={width} height={height}>
|
||||
{/* <LinearGradient
|
||||
id={GRADIENT_ID}
|
||||
from={background}
|
||||
to={background2}
|
||||
rotate={45}
|
||||
/> */}
|
||||
{/* <rect
|
||||
x={0}
|
||||
y={0}
|
||||
width={width}
|
||||
height={height}
|
||||
fill={background}
|
||||
rx={14}
|
||||
/> */}
|
||||
<AreaChart
|
||||
hideBottomAxis={compact}
|
||||
data={filteredStock}
|
||||
width={width}
|
||||
margin={{ ...margin, bottom: topChartBottomMargin }}
|
||||
yMax={yMax}
|
||||
xScale={dateScale}
|
||||
yScale={stockScale}
|
||||
gradientColor={background2}
|
||||
/>
|
||||
<AreaChart
|
||||
hideBottomAxis
|
||||
hideLeftAxis
|
||||
data={stock}
|
||||
width={width}
|
||||
yMax={yBrushMax}
|
||||
xScale={brushDateScale}
|
||||
yScale={brushStockScale}
|
||||
margin={brushMargin}
|
||||
top={topChartHeight + topChartBottomMargin + margin.top}
|
||||
gradientColor={background2}
|
||||
>
|
||||
<PatternLines
|
||||
id={PATTERN_ID}
|
||||
height={8}
|
||||
width={8}
|
||||
stroke={accentColor}
|
||||
strokeWidth={1}
|
||||
orientation={["diagonal"]}
|
||||
/>
|
||||
<Brush
|
||||
xScale={brushDateScale}
|
||||
yScale={brushStockScale}
|
||||
width={xBrushMax}
|
||||
height={yBrushMax}
|
||||
margin={brushMargin}
|
||||
handleSize={8}
|
||||
innerRef={brushRef}
|
||||
resizeTriggerAreas={["left", "right"]}
|
||||
brushDirection="horizontal"
|
||||
initialBrushPosition={initialBrushPosition}
|
||||
onChange={onBrushChange}
|
||||
onClick={() => setFilteredStock(stock)}
|
||||
selectedBoxStyle={selectedBrushStyle}
|
||||
useWindowMoveEvents
|
||||
renderBrushHandle={(props) => <BrushHandle {...props} />}
|
||||
/>
|
||||
</AreaChart>
|
||||
</svg>
|
||||
<Button variant="secondary" size="sm" onClick={handleClearClick}>
|
||||
Clear
|
||||
</Button>
|
||||
<Button variant="secondary" size="sm" onClick={handleResetClick}>
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
// We need to manually offset the handles for them to be rendered at the right position
|
||||
function BrushHandle({ x, height, isBrushActive }: BrushHandleRenderProps) {
|
||||
const pathWidth = 8;
|
||||
const pathHeight = 15;
|
||||
if (!isBrushActive) {
|
||||
return null;
|
||||
}
|
||||
return (
|
||||
<Group left={x + pathWidth / 2} top={(height - pathHeight) / 2}>
|
||||
<path
|
||||
fill="#f2f2f2"
|
||||
d="M -4.5 0.5 L 3.5 0.5 L 3.5 15.5 L -4.5 15.5 L -4.5 0.5 M -1.5 4 L -1.5 12 M 0.5 4 L 0.5 12"
|
||||
stroke="#999999"
|
||||
strokeWidth="1"
|
||||
style={{ cursor: "ew-resize" }}
|
||||
/>
|
||||
</Group>
|
||||
);
|
||||
}
|
||||
|
||||
export default BrushChart;
|
||||
@@ -0,0 +1,71 @@
|
||||
import {
|
||||
ArrowDownIcon,
|
||||
ArrowUpIcon,
|
||||
CaretSortIcon,
|
||||
EyeNoneIcon,
|
||||
} from "@radix-ui/react-icons";
|
||||
import { Column } from "@tanstack/react-table";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
|
||||
interface DataTableColumnHeaderProps<TData, TValue>
|
||||
extends React.HTMLAttributes<HTMLDivElement> {
|
||||
column: Column<TData, TValue>;
|
||||
title: string;
|
||||
}
|
||||
|
||||
export function DataTableColumnHeader<TData, TValue>({
|
||||
column,
|
||||
title,
|
||||
className,
|
||||
}: DataTableColumnHeaderProps<TData, TValue>) {
|
||||
if (!column.getCanSort()) {
|
||||
return <div className={cn(className, "text-xs")}>{title}</div>;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className={cn("flex items-center space-x-2", className)}>
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
className="-ml-3 h-8 data-[state=open]:bg-accent"
|
||||
>
|
||||
<span>{title}</span>
|
||||
{column.getIsSorted() === "desc" ? (
|
||||
<ArrowDownIcon className="ml-2 h-4 w-4" />
|
||||
) : column.getIsSorted() === "asc" ? (
|
||||
<ArrowUpIcon className="ml-2 h-4 w-4" />
|
||||
) : (
|
||||
<CaretSortIcon className="ml-2 h-4 w-4" />
|
||||
)}
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="start">
|
||||
<DropdownMenuItem onClick={() => column.toggleSorting(false)}>
|
||||
<ArrowUpIcon className="mr-2 h-3.5 w-3.5 text-muted-foreground/70" />
|
||||
Asc
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuItem onClick={() => column.toggleSorting(true)}>
|
||||
<ArrowDownIcon className="mr-2 h-3.5 w-3.5 text-muted-foreground/70" />
|
||||
Desc
|
||||
</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem onClick={() => column.toggleVisibility(false)}>
|
||||
<EyeNoneIcon className="mr-2 h-3.5 w-3.5 text-muted-foreground/70" />
|
||||
Hide
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,147 @@
|
||||
import * as React from "react";
|
||||
import { CheckIcon, PlusCircledIcon } from "@radix-ui/react-icons";
|
||||
import { Column } from "@tanstack/react-table";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Command,
|
||||
CommandEmpty,
|
||||
CommandGroup,
|
||||
CommandInput,
|
||||
CommandItem,
|
||||
CommandList,
|
||||
CommandSeparator,
|
||||
} from "@/components/ui/command";
|
||||
import {
|
||||
Popover,
|
||||
PopoverContent,
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
|
||||
interface DataTableFacetedFilterProps<TData, TValue> {
|
||||
column?: Column<TData, TValue>;
|
||||
title?: string;
|
||||
options: {
|
||||
label: string;
|
||||
value: string;
|
||||
icon?: React.ComponentType<{ className?: string }>;
|
||||
}[];
|
||||
}
|
||||
|
||||
export function DataTableFacetedFilter<TData, TValue>({
|
||||
column,
|
||||
title,
|
||||
options,
|
||||
}: DataTableFacetedFilterProps<TData, TValue>) {
|
||||
const facets = column?.getFacetedUniqueValues();
|
||||
const selectedValues = new Set(column?.getFilterValue() as string[]);
|
||||
|
||||
return (
|
||||
<Popover>
|
||||
<PopoverTrigger asChild>
|
||||
<Button variant="outline" size="sm" className="h-8 border-dashed">
|
||||
<PlusCircledIcon className="mr-2 h-4 w-4" />
|
||||
{title}
|
||||
{selectedValues?.size > 0 && (
|
||||
<>
|
||||
<Separator orientation="vertical" className="mx-2 h-4" />
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className="rounded-sm px-1 font-normal lg:hidden"
|
||||
>
|
||||
{selectedValues.size}
|
||||
</Badge>
|
||||
<div className="hidden space-x-1 lg:flex">
|
||||
{selectedValues.size > 2 ? (
|
||||
<Badge
|
||||
variant="secondary"
|
||||
className="rounded-sm px-1 font-normal"
|
||||
>
|
||||
{selectedValues.size} selected
|
||||
</Badge>
|
||||
) : (
|
||||
options
|
||||
.filter((option) => selectedValues.has(option.value))
|
||||
.map((option) => (
|
||||
<Badge
|
||||
variant="secondary"
|
||||
key={option.value}
|
||||
className="rounded-sm px-1 font-normal"
|
||||
>
|
||||
{option.label}
|
||||
</Badge>
|
||||
))
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
</PopoverTrigger>
|
||||
<PopoverContent className="w-[200px] p-0" align="start">
|
||||
<Command>
|
||||
<CommandInput placeholder={title} />
|
||||
<CommandList>
|
||||
<CommandEmpty>No results found.</CommandEmpty>
|
||||
<CommandGroup>
|
||||
{options.map((option) => {
|
||||
const isSelected = selectedValues.has(option.value);
|
||||
return (
|
||||
<CommandItem
|
||||
key={option.value}
|
||||
onSelect={() => {
|
||||
if (isSelected) {
|
||||
selectedValues.delete(option.value);
|
||||
} else {
|
||||
selectedValues.add(option.value);
|
||||
}
|
||||
const filterValues = Array.from(selectedValues);
|
||||
column?.setFilterValue(
|
||||
filterValues.length ? filterValues : undefined
|
||||
);
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className={cn(
|
||||
"mr-2 flex h-4 w-4 items-center justify-center rounded-sm border border-primary",
|
||||
isSelected
|
||||
? "bg-primary text-primary-foreground"
|
||||
: "opacity-50 [&_svg]:invisible"
|
||||
)}
|
||||
>
|
||||
<CheckIcon className={cn("h-4 w-4")} />
|
||||
</div>
|
||||
{option.icon && (
|
||||
<option.icon className="mr-2 h-4 w-4 text-muted-foreground" />
|
||||
)}
|
||||
<span>{option.label}</span>
|
||||
{facets?.get(option.value) && (
|
||||
<span className="ml-auto flex h-4 w-4 items-center justify-center font-mono text-xs">
|
||||
{facets.get(option.value)}
|
||||
</span>
|
||||
)}
|
||||
</CommandItem>
|
||||
);
|
||||
})}
|
||||
</CommandGroup>
|
||||
{selectedValues.size > 0 && (
|
||||
<>
|
||||
<CommandSeparator />
|
||||
<CommandGroup>
|
||||
<CommandItem
|
||||
onSelect={() => column?.setFilterValue(undefined)}
|
||||
className="justify-center text-center"
|
||||
>
|
||||
Clear filters
|
||||
</CommandItem>
|
||||
</CommandGroup>
|
||||
</>
|
||||
)}
|
||||
</CommandList>
|
||||
</Command>
|
||||
</PopoverContent>
|
||||
</Popover>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
import {
|
||||
ChevronLeftIcon,
|
||||
ChevronRightIcon,
|
||||
DoubleArrowLeftIcon,
|
||||
DoubleArrowRightIcon,
|
||||
} from "@radix-ui/react-icons";
|
||||
import { Table } from "@tanstack/react-table";
|
||||
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
|
||||
interface DataTablePaginationProps<TData> {
|
||||
table: Table<TData>;
|
||||
onSetPageSize?: (pageSize: number) => void;
|
||||
}
|
||||
|
||||
export function DataTablePagination<TData>({
|
||||
table,
|
||||
onSetPageSize,
|
||||
}: DataTablePaginationProps<TData>) {
|
||||
const pagination = table.getState().pagination;
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-between px-2">
|
||||
<div className="flex-1 text-sm text-muted-foreground">
|
||||
{table.getFilteredSelectedRowModel().rows.length} of{" "}
|
||||
{table.getFilteredRowModel().rows.length} row(s) selected.
|
||||
</div>
|
||||
<div className="flex items-center space-x-6 lg:space-x-8">
|
||||
<div className="flex items-center space-x-2">
|
||||
<p className="text-sm font-medium">Rows per page</p>
|
||||
<Select
|
||||
value={`${pagination.pageSize}`}
|
||||
onValueChange={(value) => {
|
||||
table.setPageSize(Number(value));
|
||||
onSetPageSize && onSetPageSize(Number(value));
|
||||
}}
|
||||
>
|
||||
<SelectTrigger className="h-8 w-[70px]">
|
||||
<SelectValue placeholder={pagination.pageSize} />
|
||||
</SelectTrigger>
|
||||
<SelectContent side="top">
|
||||
{[50, 100, 200, 500].map((pageSize) => (
|
||||
<SelectItem key={pageSize} value={`${pageSize}`}>
|
||||
{pageSize}
|
||||
</SelectItem>
|
||||
))}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
<div className="flex w-[100px] items-center justify-center text-sm font-medium">
|
||||
Page {pagination.pageIndex + 1} of {table.getPageCount()}
|
||||
</div>
|
||||
<div className="flex items-center space-x-2">
|
||||
<Button
|
||||
variant="outline"
|
||||
className="hidden h-8 w-8 p-0 lg:flex"
|
||||
onClick={() => table.setPageIndex(0)}
|
||||
disabled={!table.getCanPreviousPage()}
|
||||
>
|
||||
<span className="sr-only">Go to first page</span>
|
||||
<DoubleArrowLeftIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
className="h-8 w-8 p-0"
|
||||
onClick={() => table.previousPage()}
|
||||
disabled={!table.getCanPreviousPage()}
|
||||
>
|
||||
<span className="sr-only">Go to previous page</span>
|
||||
<ChevronLeftIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
className="h-8 w-8 p-0"
|
||||
onClick={() => table.nextPage()}
|
||||
disabled={!table.getCanNextPage()}
|
||||
>
|
||||
<span className="sr-only">Go to next page</span>
|
||||
<ChevronRightIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
<Button
|
||||
variant="outline"
|
||||
className="hidden h-8 w-8 p-0 lg:flex"
|
||||
onClick={() => table.setPageIndex(table.getPageCount() - 1)}
|
||||
disabled={!table.getCanNextPage()}
|
||||
>
|
||||
<span className="sr-only">Go to last page</span>
|
||||
<DoubleArrowRightIcon className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import { DotsHorizontalIcon } from "@radix-ui/react-icons";
|
||||
import { Row } from "@tanstack/react-table";
|
||||
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
DropdownMenu,
|
||||
DropdownMenuContent,
|
||||
DropdownMenuItem,
|
||||
DropdownMenuRadioGroup,
|
||||
DropdownMenuRadioItem,
|
||||
DropdownMenuSeparator,
|
||||
DropdownMenuShortcut,
|
||||
DropdownMenuSub,
|
||||
DropdownMenuSubContent,
|
||||
DropdownMenuSubTrigger,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
|
||||
import { IDGetter } from "./data-table";
|
||||
|
||||
interface Label {
|
||||
label: string;
|
||||
value: string;
|
||||
}
|
||||
|
||||
interface DataTableRowActionsProps<TData extends IDGetter> {
|
||||
row: Row<TData>;
|
||||
labels: Label[];
|
||||
}
|
||||
|
||||
export function DataTableRowActions<TData extends IDGetter>({
|
||||
row,
|
||||
labels,
|
||||
}: DataTableRowActionsProps<TData>) {
|
||||
return (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="flex h-8 w-8 p-0 data-[state=open]:bg-muted"
|
||||
>
|
||||
<DotsHorizontalIcon className="h-4 w-4" />
|
||||
<span className="sr-only">Open menu</span>
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
<DropdownMenuContent align="end" className="w-[160px]">
|
||||
<DropdownMenuItem>Edit</DropdownMenuItem>
|
||||
<DropdownMenuItem>Make a copy</DropdownMenuItem>
|
||||
<DropdownMenuItem>Favorite</DropdownMenuItem>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuSub>
|
||||
<DropdownMenuSubTrigger>Labels</DropdownMenuSubTrigger>
|
||||
<DropdownMenuSubContent>
|
||||
<DropdownMenuRadioGroup value={row.original.metadata.id}>
|
||||
{labels.map((label) => (
|
||||
<DropdownMenuRadioItem key={label.value} value={label.value}>
|
||||
{label.label}
|
||||
</DropdownMenuRadioItem>
|
||||
))}
|
||||
</DropdownMenuRadioGroup>
|
||||
</DropdownMenuSubContent>
|
||||
</DropdownMenuSub>
|
||||
<DropdownMenuSeparator />
|
||||
<DropdownMenuItem>
|
||||
Delete
|
||||
<DropdownMenuShortcut>⌘⌫</DropdownMenuShortcut>
|
||||
</DropdownMenuItem>
|
||||
</DropdownMenuContent>
|
||||
</DropdownMenu>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import { Cross2Icon } from "@radix-ui/react-icons";
|
||||
import { Table } from "@tanstack/react-table";
|
||||
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { DataTableViewOptions } from "./data-table-view-options";
|
||||
|
||||
import { DataTableFacetedFilter } from "./data-table-faceted-filter";
|
||||
|
||||
export interface FilterOption {
|
||||
label: string;
|
||||
value: string;
|
||||
icon?: React.ComponentType<{ className?: string }>;
|
||||
}
|
||||
|
||||
export type ToolbarFilters = {
|
||||
columnId: string;
|
||||
title: string;
|
||||
options: FilterOption[];
|
||||
}[];
|
||||
|
||||
interface DataTableToolbarProps<TData> {
|
||||
table: Table<TData>;
|
||||
filters: ToolbarFilters;
|
||||
actions: JSX.Element[];
|
||||
}
|
||||
|
||||
export function DataTableToolbar<TData>({
|
||||
table,
|
||||
filters,
|
||||
actions,
|
||||
}: DataTableToolbarProps<TData>) {
|
||||
const isFiltered = table.getState().columnFilters.length > 0;
|
||||
|
||||
return (
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex flex-1 items-center space-x-2">
|
||||
{/* <Input
|
||||
placeholder="Filter tasks..."
|
||||
value={(table.getColumn("title")?.getFilterValue() as string) ?? ""}
|
||||
onChange={(event) =>
|
||||
table.getColumn("title")?.setFilterValue(event.target.value)
|
||||
}
|
||||
className="h-8 w-[150px] lg:w-[250px]"
|
||||
/> */}
|
||||
{filters.map(
|
||||
(filter) =>
|
||||
table.getColumn(filter.columnId) && (
|
||||
<DataTableFacetedFilter
|
||||
key={filter.columnId}
|
||||
column={table.getColumn(filter.columnId)}
|
||||
title={filter.title}
|
||||
options={filter.options}
|
||||
/>
|
||||
)
|
||||
)}
|
||||
{isFiltered && (
|
||||
<Button
|
||||
variant="ghost"
|
||||
onClick={() => table.resetColumnFilters()}
|
||||
className="h-8 px-2 lg:px-3"
|
||||
>
|
||||
Reset
|
||||
<Cross2Icon className="ml-2 h-4 w-4" />
|
||||
</Button>
|
||||
)}
|
||||
</div>
|
||||
<div className="flex flex-row gap-4">
|
||||
{actions && actions.length > 0 && actions}
|
||||
<DataTableViewOptions table={table} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user