diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index e6787d059..849598830 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -33,6 +33,37 @@ jobs:
working-directory: frontend/docs
run: npm run lint:check
+ search-quality:
+ runs-on: ubicloud-standard-2
+ steps:
+ - name: Clone repository
+ uses: actions/checkout@v6
+ - name: Setup pnpm
+ uses: pnpm/action-setup@v4
+ with:
+ version: 10.16.1
+ run_install: false
+ - name: Setup Node.js
+ uses: actions/setup-node@v6
+ with:
+ cache: pnpm
+ cache-dependency-path: frontend/docs/pnpm-lock.yaml
+ - name: Install dependencies
+ working-directory: frontend/docs
+ run: pnpm install --frozen-lockfile
+ - uses: actions/setup-python@v6
+ with:
+ python-version: '3.14'
+ - name: Generate snippets
+ working-directory: frontend/snippets
+ run: python3 generate.py
+ - name: Generate search index
+ working-directory: frontend/docs
+ run: pnpm run generate-llms
+ - name: Run search quality tests
+ working-directory: frontend/docs
+ run: pnpm run test-search
+
build:
runs-on: ubicloud-standard-2
steps:
diff --git a/.github/workflows/gen-examples.yml b/.github/workflows/gen-examples.yml
index 71737aae5..c59ed5e82 100644
--- a/.github/workflows/gen-examples.yml
+++ b/.github/workflows/gen-examples.yml
@@ -11,12 +11,27 @@ jobs:
- name: Checkout repository
uses: actions/checkout@v6
+ - name: Install Task
+ uses: arduino/setup-task@v2
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Setup Go
+ uses: actions/setup-go@v6
+ with:
+ go-version: "1.25"
+
+ - name: Setup pnpm
+ uses: pnpm/action-setup@v4
+ with:
+ version: 10.16.1
+ run_install: false
+
- uses: actions/setup-python@v6
with:
python-version: '3.14'
- name: Generate snippets
- working-directory: frontend/snippets
- run: python3 generate.py
+ run: task install-dependencies pre-commit-install generate-docs -v
- name: Check for changes in examples directory
id: verify-changed-files
diff --git a/.github/workflows/osv-scanner.yml b/.github/workflows/osv-scanner.yml
index 4e0d970b1..fcfdaee5d 100644
--- a/.github/workflows/osv-scanner.yml
+++ b/.github/workflows/osv-scanner.yml
@@ -22,7 +22,7 @@ jobs:
./
scan-pr:
if: ${{ github.event_name == 'pull_request' }}
- uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@v2.3.2"
+ uses: "google/osv-scanner-action/.github/workflows/osv-scanner-reusable-pr.yml@v2.3.3"
with:
scan-args: |-
-r
diff --git a/.github/workflows/sdk-python.yml b/.github/workflows/sdk-python.yml
index 9d099a633..b3edc36c5 100644
--- a/.github/workflows/sdk-python.yml
+++ b/.github/workflows/sdk-python.yml
@@ -90,7 +90,6 @@ jobs:
run: |
export DATABASE_URL="postgresql://hatchet:hatchet@127.0.0.1:5431/hatchet"
go run ./cmd/hatchet-migrate
- task generate-go
- name: Setup
working-directory: .
diff --git a/.github/workflows/sdk-ruby.yml b/.github/workflows/sdk-ruby.yml
new file mode 100644
index 000000000..85a47540a
--- /dev/null
+++ b/.github/workflows/sdk-ruby.yml
@@ -0,0 +1,223 @@
+name: ruby
+on:
+ workflow_dispatch:
+ pull_request:
+ paths:
+ - ".github/**"
+ - "api/**"
+ - "api-contracts/**"
+ - "internal/**"
+ - "pkg/**"
+ - "sdks/ruby/**"
+ push:
+ branches:
+ - main
+ paths:
+ - "sdks/ruby/**"
+
+defaults:
+ run:
+ working-directory: ./sdks/ruby/src
+
+jobs:
+ lint:
+ runs-on: ubicloud-standard-4
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v6
+
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: "3.2"
+ bundler-cache: true
+ working-directory: ./sdks/ruby/src
+
+ - name: Run RuboCop
+ run: bundle exec rubocop
+
+ - name: Run RBS validate
+ run: rbs -I sig validate
+
+ - name: Test gem build
+ run: gem build hatchet-sdk.gemspec
+
+ test:
+ runs-on: ubicloud-standard-4
+ strategy:
+ matrix:
+ ruby-version: ${{ github.event_name == 'pull_request' && fromJSON('["3.2"]') || fromJSON('["3.2", "3.3"]') }}
+ optimistic-scheduling: ["true", "false"]
+ timeout-minutes: 20
+ steps:
+ - uses: actions/checkout@v6
+
+ - name: Install Protoc
+ uses: arduino/setup-protoc@v3
+ with:
+ version: "25.1"
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Install Task
+ uses: arduino/setup-task@v2
+ with:
+ repo-token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Setup Go
+ uses: actions/setup-go@v6
+ with:
+ go-version: "1.25"
+
+ - name: Start Docker dependencies
+ working-directory: .
+ run: docker compose up -d
+
+ - name: Generate
+ working-directory: .
+ run: |
+ export DATABASE_URL="postgresql://hatchet:hatchet@127.0.0.1:5431/hatchet"
+ go run ./cmd/hatchet-migrate
+
+ - name: Setup
+ working-directory: .
+ run: |
+ export SEED_DEVELOPMENT=true
+ export SERVER_PORT=8080
+ export SERVER_URL=http://localhost:8080
+ export SERVER_AUTH_COOKIE_DOMAIN=localhost
+ export SERVER_AUTH_COOKIE_INSECURE=true
+ export SERVER_DEFAULT_ENGINE_VERSION=V1
+ export SERVER_MSGQUEUE_RABBITMQ_URL="amqp://user:password@localhost:5672/"
+ export SERVER_OPTIMISTIC_SCHEDULING_ENABLED=${{ matrix.optimistic-scheduling }}
+
+ go run ./cmd/hatchet-admin quickstart
+
+ go run ./cmd/hatchet-engine --config ./generated/ > engine.log 2>&1 &
+ go run ./cmd/hatchet-api --config ./generated/ > api.log 2>&1 &
+
+ sleep 30
+
+ - name: Set up Ruby ${{ matrix.ruby-version }}
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true
+ working-directory: ./sdks/ruby/src
+
+ - name: Display Ruby version
+ run: ruby -v
+
+ - name: Generate Env File
+ working-directory: .
+ run: |
+ echo "HATCHET_CLIENT_TOKEN=$(go run ./cmd/hatchet-admin token create --config ./generated/ --tenant-id 707d0855-80ab-4e1f-a156-f1c4546cbf52)" >> $GITHUB_ENV
+ echo "HATCHET_CLIENT_TLS_ROOT_CA_FILE=../../../certs/ca.cert" >> $GITHUB_ENV
+ echo "HATCHET_CLIENT_WORKER_HEALTHCHECK_ENABLED=true" >> $GITHUB_ENV
+
+ - name: Set HATCHET_CLIENT_NAMESPACE
+ run: |
+ RUBY_VER=$(ruby -e "puts \"rb#{RUBY_VERSION.gsub('.','')[0..1]}\"")
+ SHORT_SHA=$(git rev-parse --short HEAD)
+ echo "HATCHET_CLIENT_NAMESPACE=${RUBY_VER}-${SHORT_SHA}" >> $GITHUB_ENV
+
+ - name: Run unit tests
+ run: |
+ echo "Using HATCHET_CLIENT_NAMESPACE: $HATCHET_CLIENT_NAMESPACE"
+ bundle exec rspec --format documentation --tag ~integration
+
+ - name: Run integration tests
+ run: bundle exec rspec spec/integration/ --format documentation --tag integration
+
+ - name: Set up Ruby for examples
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: ${{ matrix.ruby-version }}
+ bundler-cache: true
+ working-directory: ./sdks/ruby/examples
+
+ - name: Start example worker
+ working-directory: ./sdks/ruby/examples
+ run: bundle exec ruby worker.rb > worker.log 2>&1 &
+
+ - name: Wait for worker health
+ run: |
+ for i in $(seq 1 30); do
+ if curl -s http://localhost:8001/health > /dev/null 2>&1; then
+ echo "Worker is healthy after ${i}s"
+ exit 0
+ fi
+ sleep 1
+ done
+ echo "Worker failed to start within 30s"
+ cat ./sdks/ruby/examples/worker.log || true
+ exit 1
+
+ - name: Run e2e tests
+ working-directory: ./sdks/ruby/examples
+ run: bundle exec rspec -f d --fail-fast
+
+ - name: Upload worker logs
+ if: always()
+ uses: actions/upload-artifact@v6
+ with:
+ name: ${{ env.HATCHET_CLIENT_NAMESPACE }}-opt-${{ matrix.optimistic-scheduling }}-worker-logs
+ path: ./sdks/ruby/examples/worker.log
+
+ - name: Upload engine logs
+ if: always()
+ uses: actions/upload-artifact@v6
+ with:
+ name: ${{ env.HATCHET_CLIENT_NAMESPACE }}-opt-${{ matrix.optimistic-scheduling }}-engine-logs
+ path: engine.log
+
+ - name: Upload API logs
+ if: always()
+ uses: actions/upload-artifact@v6
+ with:
+ name: ${{ env.HATCHET_CLIENT_NAMESPACE }}-opt-${{ matrix.optimistic-scheduling }}-api-logs
+ path: api.log
+
+ publish:
+ runs-on: ubicloud-standard-4
+ needs: [lint, test]
+ if: github.ref == 'refs/heads/main'
+ permissions:
+ contents: write
+ id-token: write
+ steps:
+ - name: Checkout Repository
+ uses: actions/checkout@v6
+ with:
+ submodules: recursive
+
+ - name: Set up Ruby
+ uses: ruby/setup-ruby@v1
+ with:
+ ruby-version: "3.2"
+ bundler-cache: true
+ working-directory: ./sdks/ruby/src
+
+ - name: Check if version changed
+ id: version_check
+ run: |
+ NEW_VERSION=$(ruby -e "require_relative 'lib/hatchet/version'; puts Hatchet::VERSION")
+ CURRENT_VERSION=$(gem info hatchet-sdk --remote --exact 2>/dev/null | grep -oP 'hatchet-sdk \(\K[^)]+' || echo "0.0.0")
+
+ if [ "$CURRENT_VERSION" == "$NEW_VERSION" ]; then
+ echo "Version has not changed ($NEW_VERSION). Skipping publish."
+ echo "should_publish=false" >> "$GITHUB_OUTPUT"
+ else
+ echo "Publishing version $NEW_VERSION (current: $CURRENT_VERSION)"
+ echo "should_publish=true" >> "$GITHUB_OUTPUT"
+ fi
+
+ - name: Configure RubyGems credentials
+ if: steps.version_check.outputs.should_publish == 'true'
+ uses: rubygems/configure-rubygems-credentials@main
+
+ - name: Publish to RubyGems
+ if: steps.version_check.outputs.should_publish == 'true'
+ run: |
+ gem build hatchet-sdk.gemspec
+ NEW_VERSION=$(ruby -e "require_relative 'lib/hatchet/version'; puts Hatchet::VERSION")
+ gem push hatchet-sdk-${NEW_VERSION}.gem
diff --git a/.github/workflows/sdk-typescript.yml b/.github/workflows/sdk-typescript.yml
index fcd663b43..87c73f357 100644
--- a/.github/workflows/sdk-typescript.yml
+++ b/.github/workflows/sdk-typescript.yml
@@ -140,7 +140,6 @@ jobs:
run: |
export DATABASE_URL="postgresql://hatchet:hatchet@127.0.0.1:5431/hatchet"
go run ./cmd/hatchet-migrate
- task generate-go
- name: Setup
working-directory: .
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 0d5d76ada..cdd791278 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -107,7 +107,6 @@ jobs:
- name: Generate
run: |
go run ./cmd/hatchet-migrate
- task generate-go
task generate-certs
task generate-local-encryption-keys
@@ -184,7 +183,6 @@ jobs:
- name: Generate
run: |
go run ./cmd/hatchet-migrate
- task generate-go
task generate-certs
task generate-local-encryption-keys
@@ -276,7 +274,6 @@ jobs:
- name: Generate
run: |
go run ./cmd/hatchet-migrate
- task generate-go
task generate-certs
task generate-local-encryption-keys
diff --git a/.nvmrc b/.nvmrc
new file mode 100644
index 000000000..1efe0ac63
--- /dev/null
+++ b/.nvmrc
@@ -0,0 +1 @@
+v20.15.1
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 0ac84e25d..6c1444680 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,128 +1,120 @@
# Contributing
-### Setup
+This guide will help you understand how to contribute effectively to the Hatchet project.
-1. Make sure all prerequisite dependencies are installed:
+## Getting Started
- - [Go 1.25+](https://go.dev/doc/install)
- - [Node.js v18+](https://nodejs.org/en/download) - we recommend using [nvm](https://github.com/nvm-sh/nvm) for managing node versions.
- - [pnpm](https://pnpm.io/installation) installed globally (`npm i -g pnpm`)
- - [Docker Desktop](https://docs.docker.com/desktop/install/mac-install/)
- - [protoc](https://grpc.io/docs/protoc-installation/)
- - [pip](https://pip.pypa.io/en/stable/installation/)
- - [Caddy](https://caddyserver.com/docs/install)
- - [atlas](https://atlasgo.io/)
- - [pre-commit](https://pre-commit.com/)
- - You can install this in a virtual environment with `python3 -m venv venv && source venv/bin/activate && pip3 install pre-commit`
+New to Hatchet? Start with our [Architecture](https://docs.hatchet.run/home/architecture) docs to familiarize yourself with Hatchet's core system design.
-2. You can then populate a local `.env` file with the following:
+Then, before contributing, check out the following sections:
-```
-DATABASE_URL='postgresql://hatchet:hatchet@127.0.0.1:5431/hatchet'
+- [Development Environment Setup](#development-environment-setup)
+- [Pull Requests](#pull-requests)
+- [Testing](#testing)
+- [Running Locally](#running-locally)
+ - [Example Workflow](#example-workflow)
-SERVER_ENCRYPTION_MASTER_KEYSET_FILE=./hack/dev/encryption-keys/master.key
-SERVER_ENCRYPTION_JWT_PRIVATE_KEYSET_FILE=./hack/dev/encryption-keys/private_ec256.key
-SERVER_ENCRYPTION_JWT_PUBLIC_KEYSET_FILE=./hack/dev/encryption-keys/public_ec256.key
+## Development Environment Setup
-SERVER_PORT=8080
-SERVER_URL=http://localhost:8080
+Ensure all prerequisite dependencies are installed:
-SERVER_AUTH_COOKIE_SECRETS="1234"
-SERVER_AUTH_COOKIE_DOMAIN=app.dev.hatchet-tools.com
-SERVER_AUTH_COOKIE_INSECURE=false
-SERVER_AUTH_SET_EMAIL_VERIFIED=true
+- [Go 1.25+](https://go.dev/doc/install)
+- [Node.js v18+](https://nodejs.org/en/download)
+ - We recommend using [nvm](https://github.com/nvm-sh/nvm) for managing node versions to match the version defined in [`.nvmrc`](.nvmrc)
+- [pnpm](https://pnpm.io/installation) installed globally (`npm i -g pnpm`)
+- [Docker](https://docs.docker.com/engine/install/)
+- [task](https://taskfile.dev/docs/installation)
+- [protoc](https://grpc.io/docs/protoc-installation/)
+- [Caddy](https://caddyserver.com/docs/install)
+- [goose](https://pressly.github.io/goose/installation/)
+- [pre-commit](https://pre-commit.com/)
+ - You can install this in a virtual environment with `task pre-commit-install`
-SERVER_MSGQUEUE_KIND=rabbitmq
-SERVER_MSGQUEUE_RABBITMQ_URL=amqp://user:password@127.0.0.1:5672/
+We recommend installing these tools individually using your preferred package manager (e.g., Homebrew).
-SERVER_GRPC_BROADCAST_ADDRESS=grpc.dev.hatchet-tools.com:443
-SERVER_GRPC_INSECURE=true
+## Pull Requests
+
+Before opening a PR, check if there's a related issue in our [backlog](https://github.com/hatchet-dev/hatchet/issues).
+
+For non-trivial changes (anything beyond typos or patch version bumps), please create an issue first so we can discuss the proposal and ensure it aligns with the project.
+
+Next, ensure all changes are:
+
+- Unit tested with `task test`
+- Linted with `task lint`
+- Formatted with `task fmt`
+- Integration tested with `task test-integration` (when applicable)
+
+If your changes require documentation updates, modify the relevant files in [`frontend/docs/pages/`](frontend/docs/pages/). You can spin up the documentation site locally by running `task docs`. By default, this will be available at [`http://localhost:3000`](http://localhost:3000).
+
+For configuration changes, see [Updating Configuration](docs/development/updating-configuration.md).
+
+## Testing
+
+Hatchet uses Go build tags to categorize tests into different test suites. For example, these build tags mark a test as unit-only:
+```go
+//go:build !e2e && !load && !rampup && !integration
+
+func TestMyUnitOfCode() { ... }
```
-3. Start the Database and RabbitMQ services:
+Most contributors should familiarize themselves with **unit testing** and **integration testing**.
+**Unit tests** verify individual functions without external dependencies:
+```sh
+task test
+```
+
+**Integration tests** verify components working together with real dependencies (normally spun up via `docker compose`):
+```sh
+task test-integration
+```
+
+Note: **manual testing** is acceptable for cases where automated testing is impractical, but testing steps should be clearly outlined in your PR description.
+
+## Running locally
+
+1. Start the Postgres Database and RabbitMQ services:
```sh
task start-db
```
-4. Install dependencies, run migrations, generate encryption keys, and seed the database:
-
+2. Install Go & Node.js dependencies, run migrations, generate encryption keys, and seed the database:
```sh
task setup
```
-**_Note: You might need to run this as `sudo` so it can install certificates._**
-
-### Starting the dev server
-
-Start the Hatchet engine, API server, dashboard, and Prisma studio:
-
+3. Start the Hatchet engine, API server, and frontend:
```sh
task start-dev # or task start-dev-tmux if you want to use tmux panes
```
-### Creating and testing workflows
+Once started, you should be able to access the Hatchet UI at [https://app.dev.hatchet-tools.com](https://app.dev.hatchet-tools.com).
-To create and test workflows, run the examples in the `./examples` directory.
-
-You will need to add the tenant (output from the `task seed-dev` command) to the `.env` file in each example directory. An example `.env` file for the `./examples/simple` directory. You can be generated and add it to the .env file via:
+### Example Workflow
+1. Generate client credentials:
```sh
-cat >> ./examples/simple/.env <
-
-# optional
-OTEL_EXPORTER_OTLP_HEADERS=
-
-# optional
-OTEL_EXPORTER_OTLP_ENDPOINT=
+You should see the following logs if the workflow was started against your local instance successfully:
+```log
+{"level":"debug","service":"client","message":"connecting to 127.0.0.1:7070 without TLS"}
+{"level":"info","service":"client","message":"gzip compression enabled for gRPC client"}
+{"level":"debug","service":"worker","message":"worker simple-worker is listening for actions: [process-message:process-message]"}
+{"level":"debug","service":"client","message":"No compute configs found, skipping cloud registration and running all actions locally."}
+{"level":"debug","service":"client","message":"Registered worker with id: c47cc839-8c3b-4b0f-a904-00e37f164b7d"}
+{"level":"debug","service":"client","message":"Starting to listen for actions"}
+{"level":"debug","service":"client","message":"updating worker c47cc839-8c3b-4b0f-a904-00e37f164b7d heartbeat"}
```
-### CloudKMS
+## Questions
-CloudKMS can be used to generate master encryption keys:
-
-```
-gcloud kms keyrings create "development" --location "global"
-gcloud kms keys create "development" --location "global" --keyring "development" --purpose "encryption"
-gcloud kms keys list --location "global" --keyring "development"
-```
-
-From the last step, copy the Key URI and set the following environment variable:
-
-```
-SERVER_ENCRYPTION_CLOUDKMS_KEY_URI=gcp-kms://projects//locations/global/keyRings/development/cryptoKeys/development
-```
-
-Generate a service account in GCP which can encrypt/decrypt on CloudKMS, then download a service account JSON file and set it via:
-
-```
-SERVER_ENCRYPTION_CLOUDKMS_CREDENTIALS_JSON='{...}'
-```
+If you have any further questions or queries, feel free to raise an issue on GitHub. Else, come join our [Discord](https://hatchet.run/discord)!
diff --git a/Taskfile.yaml b/Taskfile.yaml
index b8aa147de..e5df3a879 100644
--- a/Taskfile.yaml
+++ b/Taskfile.yaml
@@ -4,15 +4,12 @@ tasks:
setup:
cmds:
- task: install-dependencies
- - task: generate-certs
- task: set-env-db
- task: migrate
- - task: generate-all
- task: generate-local-encryption-keys
+ - task: generate-docs
- task: set-env-all
- task: seed-dev
- - task: copy-ca-to-sdks
- - task: docs
set-env-db:
cmds:
- |
@@ -36,7 +33,7 @@ tasks:
SERVER_AUTH_COOKIE_SECRETS="$(randstring 16) $(randstring 16)"
SERVER_AUTH_COOKIE_DOMAIN=app.dev.hatchet-tools.com
- SERVER_AUTH_COOKIE_INSECURE=false
+ SERVER_AUTH_COOKIE_INSECURE=true
SERVER_AUTH_SET_EMAIL_VERIFIED=true
SERVER_MSGQUEUE_KIND=rabbitmq
@@ -46,9 +43,9 @@ tasks:
SERVER_ADDITIONAL_LOGGERS_QUEUE_FORMAT=console
SERVER_ADDITIONAL_LOGGERS_PGXSTATS_LEVEL=error
SERVER_ADDITIONAL_LOGGERS_PGXSTATS_FORMAT=console
- SERVER_LOGGER_LEVEL=error
+ SERVER_LOGGER_LEVEL=warn
SERVER_LOGGER_FORMAT=console
- DATABASE_LOGGER_LEVEL=error
+ DATABASE_LOGGER_LEVEL=warn
DATABASE_LOGGER_FORMAT=console
SERVER_GRPC_BROADCAST_ADDRESS=127.0.0.1:7070
@@ -56,16 +53,27 @@ tasks:
SERVER_INTERNAL_CLIENT_BASE_STRATEGY=none
SERVER_INTERNAL_CLIENT_BASE_INHERIT_BASE=false
EOF
+ fmt-go:
+ cmd: gofmt -s -w .
+ fmt-app:
+ dir: frontend/app
+ cmd: pnpm run prettier:fix && pnpm run prettier:check
+ fmt-docs:
+ dir: frontend/docs
+ cmd: pnpm run prettier:fix && pnpm run prettier:check
pre:
+ aliases: [fmt]
cmds:
- - cd frontend/app/ && pnpm run prettier:fix
- - pre-commit run --all-files
+ - task: fmt-go
+ - task: fmt-app
+ - task: fmt-docs
+ - task: pre-commit-run
start-db:
cmds:
- docker compose up -d
stop-db:
cmds:
- - docker compose down
+ - docker compose down {{.CLI_ARGS}}
recreate-db-from-scratch:
cmds:
- docker compose down -v
@@ -82,20 +90,20 @@ tasks:
cmds:
- task: generate-sqlc
- task: goose-migrate
- atlas-migrate:
- cmds:
- - bash ./hack/dev/atlas-migrate.sh {{.CLI_ARGS}}
goose-migrate:
cmds:
- bash ./hack/dev/migrate.sh
seed-dev:
+ dotenv: [.env]
cmds:
- - SEED_DEVELOPMENT=true bash ./hack/dev/run-go-with-env.sh run ./cmd/hatchet-admin seed
+ - SEED_DEVELOPMENT=true go run ./cmd/hatchet-admin seed
seed-cypress:
+ dotenv: [.env]
cmds:
- - SEED_DEVELOPMENT=true bash ./hack/dev/run-go-with-env.sh run ./cmd/hatchet-admin seed-cypress
+ - SEED_DEVELOPMENT=true go run ./cmd/hatchet-admin seed-cypress
start-dev:
deps:
+ - task: goose-migrate
- task: start-db
- task: start-api
- task: start-engine
@@ -125,30 +133,38 @@ tasks:
desc: Run Cypress E2E for frontend/app against a locally started API stack (CI-friendly).
cmds:
- bash ./hack/ci/e2e-frontend.sh
- start-ngrok:
- cmds:
- - ngrok http 8080
start-lite:
cmds:
- bash ./hack/dev/start-lite.sh
+ start-ngrok:
+ cmds:
+ - ngrok http 8080
generate-all:
+ aliases: [generate]
cmds:
- task: install-dependencies
- task: generate-api
- - task: generate-go
- task: generate-proto
- task: generate-sqlc
+ - task: generate-docs
- task: pre-commit-run
install-dependencies:
+ deps: [venv]
cmds:
- go mod download
- cd frontend/app/ && pnpm install
- cd frontend/docs/ && pnpm install
- - npm install -g vite
+ - source .venv/bin/activate && pip install pre-commit
generate-api:
cmds:
- task: generate-api-server
- task: generate-api-client
+ generate-docs:
+ deps: [venv]
+ dir: frontend/snippets
+ cmds:
+ - 'source {{.ROOT_DIR}}/.venv/bin/activate && python generate.py'
+ - task: fmt
generate-certs:
cmds:
- bash ./hack/dev/generate-x509-certs.sh ./hack/dev/certs
@@ -156,7 +172,9 @@ tasks:
cmds:
- bash ./hack/dev/generate-local-encryption-keys.sh ./hack/dev/encryption-keys
init-dev-env:
- - bash ./hack/dev/init-dev-token-and-env.sh
+ dotenv: [.env]
+ cmds:
+ - bash ./hack/dev/init-dev-token-and-env.sh
generate-dev-api-token:
cmds:
- bash ./hack/dev/generate-dev-api-token.sh
@@ -168,9 +186,6 @@ tasks:
cmds:
- bash ./hack/oas/generate-clients.sh
silent: true
- generate-go:
- cmds:
- - go generate ./...
generate-proto:
cmds:
- bash ./hack/proto/proto.sh
@@ -184,13 +199,16 @@ tasks:
lint:
cmds:
- task: lint-go
- - task: lint-frontend
+ - task: lint-app
+ - task: lint-docs
lint-go:
- cmds:
- - golangci-lint run ./... --config .golangci.yml
- lint-frontend:
- cmds:
- - cd frontend/app/ && pnpm run lint:check
+ cmd: golangci-lint run ./... --config .golangci.yml
+ lint-app:
+ dir: frontend/app
+ cmd: pnpm run lint:check
+ lint-docs:
+ dir: frontend/docs
+ cmd: npm run lint:check
kill-apis:
cmds:
- ps -A | grep 'cmd/hatchet-api' | grep -v grep | awk '{print $1}' | xargs kill -9 $1
@@ -202,15 +220,15 @@ tasks:
copy-ca-to-sdks:
cmds:
- mkdir -p ./python-sdk/certs/ && cp ./hack/dev/certs/ca.cert ./python-sdk/certs/
+ venv:
+ status: [test -d .venv]
+ cmd: python3 -m venv .venv
pre-commit-install:
- cmds:
- - pip install pre-commit # can use brew install pre-commit if you are on macOS
- - pre-commit install
+ deps: [venv]
+ cmd: source .venv/bin/activate && pip install pre-commit && pre-commit install
pre-commit-run:
- cmds:
- - cd frontend/app/ && pnpm run lint:fix && pnpm run prettier:check
- - cd frontend/docs/ && pnpm run lint:fix && pnpm run prettier:check
- - pre-commit run --all-files || pre-commit run --all-files
+ deps: [venv]
+ cmd: 'source .venv/bin/activate && pre-commit run --all-files || pre-commit run --all-files'
docs:
cmds:
- |
@@ -231,3 +249,15 @@ tasks:
start-telemetry:
cmds:
- docker compose -f docker-compose.infra.yml up -d
+ test:
+ cmds:
+ - go test -count=1 $(go list ./... | grep -v "quickstart") -v -failfast
+ test-integration:
+ deps:
+ - start-db
+ - goose-migrate
+ - generate-local-encryption-keys
+ cmds:
+ - defer:
+ task: stop-db
+ - go test -count=1 -tags integration $(go list ./... | grep -v "quickstart") -v -failfast
diff --git a/api-contracts/dispatcher/dispatcher.proto b/api-contracts/dispatcher/dispatcher.proto
index 1a56d231d..70f5c1abe 100644
--- a/api-contracts/dispatcher/dispatcher.proto
+++ b/api-contracts/dispatcher/dispatcher.proto
@@ -46,6 +46,7 @@ enum SDKS {
GO = 1;
PYTHON = 2;
TYPESCRIPT = 3;
+ RUBY = 4;
}
message RuntimeInfo {
diff --git a/api-contracts/openapi/components/schemas/v1/webhook.yaml b/api-contracts/openapi/components/schemas/v1/webhook.yaml
index 3ba5537a7..bb03c22e1 100644
--- a/api-contracts/openapi/components/schemas/v1/webhook.yaml
+++ b/api-contracts/openapi/components/schemas/v1/webhook.yaml
@@ -212,5 +212,3 @@ V1UpdateWebhookRequest:
staticPayload:
type: object
description: The static payload to use for the webhook. This is used to send a static payload with the webhook.
- required:
- - eventKeyExpression
diff --git a/api-contracts/openapi/components/schemas/worker.yaml b/api-contracts/openapi/components/schemas/worker.yaml
index a07d21921..5c50a0315 100644
--- a/api-contracts/openapi/components/schemas/worker.yaml
+++ b/api-contracts/openapi/components/schemas/worker.yaml
@@ -191,6 +191,7 @@ WorkerRuntimeSDKs:
- GOLANG
- PYTHON
- TYPESCRIPT
+ - RUBY
WorkerRuntimeInfo:
properties:
diff --git a/api/v1/server/handlers/tenants/create.go b/api/v1/server/handlers/tenants/create.go
index 19a46962b..6494b94aa 100644
--- a/api/v1/server/handlers/tenants/create.go
+++ b/api/v1/server/handlers/tenants/create.go
@@ -1,7 +1,6 @@
package tenants
import (
- "context"
"errors"
"github.com/jackc/pgx/v5"
@@ -80,8 +79,7 @@ func (t *TenantService) TenantCreate(ctx echo.Context, request gen.TenantCreateR
tenantId := tenant.ID
- err = t.config.V1.TenantLimit().SelectOrInsertTenantLimits(context.Background(), tenantId, nil)
-
+ err = t.config.V1.TenantLimit().UpdateLimits(ctx.Request().Context(), tenantId, t.config.V1.TenantLimit().DefaultLimits())
if err != nil {
return nil, err
}
diff --git a/api/v1/server/handlers/v1/webhooks/update.go b/api/v1/server/handlers/v1/webhooks/update.go
index da6e22358..d15c5f8cc 100644
--- a/api/v1/server/handlers/v1/webhooks/update.go
+++ b/api/v1/server/handlers/v1/webhooks/update.go
@@ -16,7 +16,7 @@ func (w *V1WebhooksService) V1WebhookUpdate(ctx echo.Context, request gen.V1Webh
webhook := ctx.Get("v1-webhook").(*sqlcv1.V1IncomingWebhook)
opts := repository.UpdateWebhookOpts{
- EventKeyExpression: &request.Body.EventKeyExpression,
+ EventKeyExpression: request.Body.EventKeyExpression,
ScopeExpression: request.Body.ScopeExpression,
}
diff --git a/api/v1/server/oas/gen/openapi.gen.go b/api/v1/server/oas/gen/openapi.gen.go
index 61f5142e2..f14eff8c2 100644
--- a/api/v1/server/oas/gen/openapi.gen.go
+++ b/api/v1/server/oas/gen/openapi.gen.go
@@ -319,6 +319,7 @@ const (
const (
GOLANG WorkerRuntimeSDKs = "GOLANG"
PYTHON WorkerRuntimeSDKs = "PYTHON"
+ RUBY WorkerRuntimeSDKs = "RUBY"
TYPESCRIPT WorkerRuntimeSDKs = "TYPESCRIPT"
)
@@ -1896,7 +1897,7 @@ type V1UpdateFilterRequest struct {
// V1UpdateWebhookRequest defines model for V1UpdateWebhookRequest.
type V1UpdateWebhookRequest struct {
// EventKeyExpression The CEL expression to use for the event key. This is used to create the event key from the webhook payload.
- EventKeyExpression string `json:"eventKeyExpression"`
+ EventKeyExpression *string `json:"eventKeyExpression,omitempty"`
// ScopeExpression The CEL expression to use for the scope. This is used to filter the correct workflow to trigger.
ScopeExpression *string `json:"scopeExpression,omitempty"`
@@ -16264,197 +16265,197 @@ var swaggerSpec = []string{
"Wn/yrJAvasUNQblHJ/0WfC0q9Nu1ClUbYDelFZcCilPgBSTmC9kELcTT/RYtsB6MyNyg99JPOWUCcS+w",
"J0BgfA98Xz/kzhTRtcuNbUeTaCg4uU9DQ2TRU4R3tEfXz6bQaKzrG7grtkrLD6S0rOYMp+oAa9V+5MK3",
"cMRe5A7qVQ7dr4Uj5CXPUUpNLBF6o+NUHH0bO013lgSv24liFMpaKRq/cfHVREr6um2qPlvj9ita10f8",
- "58btKun6Pp/y5EltWOjK/mb6ZwCRk6oUenlwYXT7GQW390Fs1gFrxirUajiTXcip7PDcPQTC2noRmTa0",
- "8+W5YjPOuOaK31Y+sJYBpUoE4VeVNZVQ7jKTRuijKWitfzNkXKEgOR98qCOCOQQejO3Of962uIli2lpc",
- "KTN15ToqRVRfEUj5UNNuGoreNcVLKuPkwnGLaqpVziu61CkdhSFUh8YEUyXahET5tXagAsrSUWuyX+XD",
- "U/0Z1QLnCxVv4w/9006X/ufs9e/8j9enZ51u59PF62rspRGvmjyzykT20bNpL5bi1A09i4p2uREGshNz",
- "uJkFgCQx/LA2HdOhnXQ8rcBEs4CVX3JjaLjeYvaNsWEqy9AssJqgGOKbIkrBk37FRdBqaWSg4D0NPB78",
- "H1bQcDxgYTP8j9vRZTV57IVzndRpLF1myoeZgob3g6vBiMmY98PJh9u3zGluNLwZMH+3/vnHTrdzObwa",
- "9Jkr2+fh/zHhJ7sPbj6kudLno7mnhDT6td4SrbfEj+Ut0To0lJ8h1jRr7rdZ/mCswg1fnGueeDX2Y/Hq",
- "u5YNmbXODMjZFSf/4Jt7f03fdlWrn3IaXkAiixIUXGaTwP6NXyQ0wHNQb7FQI7tp+3dhrIFHPr+wfBY2",
- "QTSsYXaXzb/drx8WwMHBm0vOUusOUY6M7uRwItEtIStvbV4dyG+vVxOLsoXqkOqUVcC+1BuGqh01eMQw",
- "YHxTDxpfdA4UEkXmxewolK7g5qPGXPbfiwJpWpVcaP28bMNGCxY2Mg+Kkg9a5TeJDUmdZd8k9hsZpYTx",
- "gI6r2+scSnjKLHMq/00tEttdn6lcFfnC6SnmDO+dICROFIePyINe1wFODAIvXMhOT8j3nSl0ZjCAsbzG",
- "qNR1tjWMN0ezt58EuNre7JqUUzhrkU2lljl/9E6tFHnxY2WpyHUxMqa4tN8Bw76xB0YQeFm9xJgPtdqV",
- "fwHJPPQarVaA/on3THX789AzUO2HyeRGZqJ2Qy+l4Fgg3z56/w7w8H02c27ir5YIryYhgcqac17SvGxt",
- "ncZLSwEr086ndOsyY9ek0+3cXI/Zf24nTEsynZA8pAlXxTth8X7Cqxq5IHAiGFO6OrKvL/fc7YBHgNhl",
- "1pxDLZdeqDwt/AbdhEDHDQNR+9JfGnwUEY7YzVqbL4tSHUoz8QGM0SyAnpN1Ypan29vhhSPYZ/c3Sh9M",
- "oY+rC3+yNoylct4c/BiwI0UuUOk4ui3zASYfIIjJFAJSZRvIbRWr48oqMABnLnvnb+VnJ2dnvdOz3umr",
- "yenrNye/v/ntj6M//vjj1es/eiev35yc2Cc4AZyZqXowwARMfWZs20NIF+CbmfAX4BtaJIvNMcD29Q6z",
- "vhFDF6bVS7Epiwttw6NEePW6MF6FgEf5uTQ0HIuqP1nVT1ybcQk7WS8nDNR9aABZcV4tdElACWYY3Id2",
- "vDpSOtBD1w9N5xSGCxDNwxg6tJEQEyuieSzHGrP5dEHy1uUnsqnTvDLnk+FnXuo6/fOmfzs2hPDaxI1w",
- "ZKUxI/zcNObaEic5l/cFIOuNebz3bZ1ufDu61AzfVFVm7bVqjiLKS6d8ZU5cmUWJdt20k01F+Wpetrpm",
- "8uoUoBV4ePkXTOOlIAVylGf+Qu1qEMwS8aRlLRbGFx8xPxZ5Z6X0czlxjV5tExJp8I3EQNsAew/mYUuL",
- "YxCpyun1ZZ/F7t/8c/KBPZBM/nkzGJ+PhjcTvYUn42TV9WFw+e7D9ZiH/n/qX/V5SpGqEvmpGC4bMVXa",
- "1Me1pL9YuM12G5QJ5QeOLBSqLy/5dzg1CFb6RQeQFX3+GU51gnwnmoMRc7KonEZ5A7PV15paF4H2alL9",
- "wCScyrIbQ+UKxAtNMzmhPAZJZFZafTXnQurKb5CJ4pGA6426YvkzSJTvrBS5xn8hkEkweE62GSRY+Cmm",
- "XZ0Z7ZuedYrlWYswVi15TGJA4Kw2W7QC4WWuX3MNO1Oi84WUi/lmX53VGybk1MXVdLVYrdqi4YUuLV4K",
- "4PBCi0PZ+yMKcqaAd7dX55MhE7MXt6P+20uqWl3031cKSDqIPD8bUTCbXcNe8rv+UF4rJm/H57lejX+u",
- "2E9jLiHGJB9hVXgdCQnwdRSb8tgDXBq8YuTwlCztIvjkLQw4OIIuukduNonzSwQwhp7ziIDwTv5VzxVG",
- "RDRwmcp+vVFakziBmvHrXiBV36P0Wn96cnJi9CXSDpP3/mnoyNNoQX+HUynGbM9xQyGAtaNd+Ym4a9MX",
- "n1vc6V8GhJw7zCZdW1SvBa1/i7n0xNtlg8EnSq+yw0lDlcTosrJOLulsINUZRQH7a7Uw2ZMbnuK2Yn8o",
- "jJJgjTy75VHeIejnzn01jUNGyzkppkjGmknG0h2nld2t7G5l90vJbsMcP6Bor/DnW0E0s9GGBC7MHoKG",
- "+0p9Z2P1tTFLjVWdgHVNn6ks+9bGk2ptYECDTC+maC3mKhCL6pYQqYxaRz2lzKE3g6sLnjA0Sx2qyQqb",
- "zyGapht92z//eP3uXe0pyaZd6d6cFyhmYpzkxUnRYyQMbhTJX4KVNhi7c+glfkXmdEPntY+jL8W8GZYC",
- "pmazMa+qbfSjyaXr2CI7VtWnwrWLMBoJWAbeJnQkhzrnHeu00ELz0vwZQ2iTDVfldZZMp/0omEv7TfJo",
- "82zRVYudgJkOvT5XGdc3+QcbTrYhzLocwir6EULhPKYXmXu9XNCyNOfLO2TgxroJmfu4dkYmR+7Ek+Om",
- "p8X6FTbXDAp400hemAYNrDJwip/NKvdc3dKjL9PA7sQrRHM085QjRnm6yZetKjAUbbbIsrknDJsNUV89",
- "WP66e5D45KYy645oZMy+Y/VIIG6Rf2J+8C4MJZH+HF9fORzocuAJG0EbESqfBV/osS+MPe5PaIEGLNSO",
- "CVrA0FAsBRPkPixNDiT0m4PFs4rdS6IiLxqwLdPBHk8LL2VWOFb6jHkyIB3KHzPKNif1tFngk/Kebftu",
- "0Th5qvU1UC5LEkZuoK/1nM7IapNvQ03ocy/2ZFcI5w4V2aNQocpsDJmT17m5xMQCfKtp8dRM2TfVmeCx",
- "CwmVv0x+cginEMQwltkrGEbZscJ+zjZlTkjErj1h+ICgbI7orvKf5Nv5m44Iws36ikQmtHeCSbiwnOyZ",
- "SXzuzKPxf+ezOP2bISt/RJhNLP9rSoid06OToxNGxzwMufOm8+ro9OhERBQzTLCoYV+UDZ3pQjzey+d5",
- "2iqAGDupPYZuOpDFLjqX4vt7hgbpks9mOTs5KQ/8AQKfzBmKXvPvbhgQUf5L1BemTY//xpyvcHoA1vDx",
- "II5DKoWf2VGtznkVknQdOeLovPnra7eDZU0PuuqsofQp+UvA7M6h+9D5Svsz/MUQeMt6BNJmqAqDI9lg",
- "31HIFuyQ0AGuCyPikBjc3yO3FqMpBmpR+nh6DHwqUoJZDy4A8nvsIRkff2c/q789c7z4kGhuTxfsd+yA",
- "NK8T7e6w7vxturQLfdpiQBswVws+AuOZGCwgYfrAXxVOPqUZHJH3uvOGR/KnQqO0lI4q1Pj7QLZj69Vo",
- "/Vqip9/K2Bonrgsxvk98f+lwlHq5pFgl5D13O7/tivL6zgL4FAvQc1i+JE8GznAwXm0cDB0U78J4ijwP",
- "8ttHRt+cTqrITFL8hDWhh9W3XixUDvaB9+10NYTxlV17iavJms2vW+uQOB/hxyBxRg9vQy6PN0IMHDt8",
- "0wqISyOvymRSiS0SOonEeR4bz3qxv5GFaJeggz0nBjigrRiwFAOcWrYnBtQDMkI9Ej7AgJ6K8m92Gkah",
- "Lih/BB/DB+iAgKXmY62Ft1Y6Y0FMRGhCW0mDDu1uIyXS4Q0yQcK6V8ddzJYn6JxB92MTNW5C1YJ06MZO",
- "xM5JMs5+q6LkdMtzFOz6YeIdqzd0swZdynUmrz1sEAcFmIDAhSUiPqefpXuJWbHePm4ZIE4SpEGse0Ng",
- "NVo7R7D6Xi+2/pPywvatJ4fohRF3dhEnmrLf3Bx+/J3997lqv6mUYq2OShvKrOJ8I2slEU8JbFJO2Ned",
- "CqHNbbZIDlRzePOaGo9CrHFssB1rZVuOxBXMZOTNUVwh1Tj9fDVT+HGdWGPbkkq1Gpq/SAXYz073F4yE",
- "W9rfL9pfwJXPcOPpvbuDW+QMa0JT6ZF4IAf5Jo5wOsYxs9PzXcLGHb9EmF6AfCfX2rTBtPUw33Bru03n",
- "EjuuTNlw82UOl9zq9okQ0q1nG1HYhPL+5zY5DBAJqTQ//s45/vk4isMpNF8u5dunA3J1Bphdl9cpyEXw",
- "mxk+nfomxGSUBDdsXnvblOnQSyXXjk+9CoISuTg4PTH8Hu30VLgKCcs3H8boPzwnucjKw7NV8CjNkpmT",
- "AORDz+F2e4dtj/NOyPNhtq36gyNHZtgH7sPxd/YfCyu+M6YNlXIaecphX0V6I3ujfW5MI/EwEPfSOp/H",
- "yT6pNqe7AeM2yEiYT/x6NxPzrFks+SDw/fCJTq97EShSrRS97PcqFYsTXZ5jAnz8HQfYiluuxqrUL/NL",
- "gBuwSX4wM6OIk3vv2KSAjJZR9pBRSgSbssrVuJJRAqxhE6m4KNYmvepC55VX4hKLNH4bezH9o2s2BPAy",
- "PCtZAhQYzl6/zgFxugkdKIpD+g/otWfYHrGm6RLJKhA4IIoktZePNd6mwI8ETH147IEZPk6TlxsvjZjd",
- "Glk7h8wBcabQD4OZmlUgTZQNZuUr5efTC8DKj05ESe16c5lMUZ0laOFJoxnL/DuB8TLjGQ/M7pBXfcxt",
- "K0LESu4U4H2pi4819W6sJvoFmKW15LU5syrkEJ1Svv6xWX9uK2G383pXwo/eQtEi8uECBqSkGzDjhaSD",
- "9Okc4AethGENj7/T/9Q8L/FaDdMl55uiAKETWJraeY1606FPAd3xkZ8vxm8QCrKcvwpLKRZqm3b8QlWK",
- "RqY3htWfnT9/43ef7c86UeuxU03hPkx4kqY9EREZP5dEhPnOQGxEyLEfzup0FT+cOT4KoMx8JOAoSpTL",
- "cHaJAl5R5BClisjyREKHpSdzpkuDZGGfO1poUEBYCcFy0KUh5WtMRMLl0JlBQlHNsGyYGSNuedTMXJG6",
- "wXBvSvPiW02dBAT5G5i671B51yPwG3EwBLE7d9hMSkXfivWzDjqRXr1WRsHwEfq/4F/pRChw/cSDpv2l",
- "LXFHq+1WC3zJAnQAW+XWk8ltKGAsSsVMeezz3XR5l3bKQWkFXCmnjtUha7U9e3DkqkKogUIsoljbd/O8",
- "VppKfuXYuQxn65869P97Weiw+XVVKTVmPHjSSmI/wNGDH1BkYv77eww3cu5s9aTbvkqd7fUKDjLttbdV",
- "q3MyTidh1lexWQvFRO9C/9iD02RmNtIPHoGfsIpFzvng0oFpuXwHzAAKcFYBTFS49QABRxp5eA79CzbV",
- "obgUbD6i5fPp+eCSIaEmgIVhElNRyCreUjGhR/5O41hU8GXaxRpRBwX1eJo1tHqN+hI3TWYlFlN4/nxw",
- "aWZ5K1630Gv4A0Be9KR1eYv83Ey32cc3uh9Jv9HcaKUx/wEusXJRMk5L2zW/XjIyEBH3dRfL8zDAiF4l",
- "BYmxR6bQZZk3PAfcE5aDBmFHXNu3aWyohmUK78MY1gKzKfPDO741JMxBA2JW4Sx0EZOgT4jM1be4YoFj",
- "DXxZWgnDzm75mcx+Xbk0/s4CEHeO2NOjC2MCUJCF7letM83GB1cylBRKl1svLt0Sscrpkh53KHb4c6UO",
- "YpGw70W3Zbp0sgy5mY84K5qV3ksMNpVyAmHtQjTFHOQ0D3DZ43WEIoBi7PziQSb4KPctHeD8682/fi2K",
- "rUonCDvDFnbDCFrJQ97Sdl2s9XrwbveOan8/bS1QdRaolDcswzYaKGjH7Bi21NL42W6lqX2Ey0NR1rYe",
- "xiRx0ZQRGLpbZtAxgyO0xy0wxPfH016DwFXmW0Cw3r+gSQzrHvsVmmCSmDpQ5hT70x5QGwktxE3CClPK",
- "seJMruPYHFOiZe0ZxVXS1pywr+aEUpVdCwW69vZZOUXpisgu43zOo/VLTTS7K+BkiiFxXBB4iOWZkXS9",
- "0dtD1YqdWww9xkYcFkKvx2V4AJE2V/Z2byiasdOLh8LaDQS7FDGtZM9rWxIvmWzn+K3StbqGt51zVmrI",
- "AU4An8TARtHM2/7cjzcMBRwdNg847P0mJWWHFXbiVv1dvtkI8qhjPVF2SgG4fZLe1ZP0VfYKnWP4lD9T",
- "3rTneXstjl2w+N824Y2gTlI0Tty5X2qc4FbE4rI9uRb9ZSvFxGHetixFg4zlbMXCS4oFW9bvKoRJj/6K",
- "UIxUgTcbTPhsh2wxSfn5J+fiWUjaw91oMVnhjC0yWmWa4Ppj88ADnnPHZppk9yUZbhtXAL5JK18BXiD5",
- "sLV8kPmGW/lweKe8hbLPfNsXWbG6CrVASEYZCOzESeCIntV5i7kHxSXChHtRyNp4hyrTypFQChpq/JMs",
- "AF07OKoemk05KBVts8z6G3jcW8c8fVrTDL2QpwuFm1etY6T8v7CacsAAtKhyR9vfydZ3rPVWiS1LgcDf",
- "+JirVFp5N4u/NSQb4A1RMLvjNfx2BHlf40D00HsUPj0WjwSZJ9HdotKV6GWN2FSwjZJASrTmcdOqFG1z",
- "HOxPADPbm0V6UNnFWNifuFGIAmJ57i5QkBBIr+PyrxiCBy98CtKjuMEx/B6SGzr5oR/C7MCTvsFK6I4w",
- "WHe6SpX6s5Oz094J/d/k5OQN+9//Ncgd0b1/z28imzggGaSp57AKakjhWwPYexQgPIfeWzZ4c3C3Lxtz",
- "pLaCdGR80srHPZWP+d3ZuJTExy4rBW6OQuOlwtN8NDp5x5v83A+UDAVMVakpkMRzfIWOK5G20ygyNqkP",
- "PZ4nrPZlUjZvk0S10bIlGVWQDBuXTDGMfLCsKu5Ev1dKJt7kp5ZMHAVNJFMskbZLycTBtBVMsWjdyqVW",
- "LpXkUkEubFAuidSfNt63Mr16nfetyN7eut/us/stJxeHDmsXv8baX9HmqwRDCpoYp6PY2lsl0VkDKjpU",
- "QFo9yYt7uKrs08DFNWXk9i0+7+OaIiaTmwLFa3u5mopYpJvY+rkKP1eBjyav3JIpX8jTVdJIE1fXfUx+",
- "/nP7upYzm1vwfgO1ibm7in/Y+bvWyowD93ilk8u3R8nC9b6vGVbMwO7WDm3L/9KfteX9vXB1qWXvrkpu",
- "NS6tkn6FT6tQDw18e8hurQUF+EfjUemt2vKowV215piEAT0FezEgsMduoHRzxd5bclmdP2vtsXjgHq3b",
- "5bDteaf+uIq7dFFtBcMeKe4aebD6ya6/wd+EmOX3QIEbLlAwS+l1ATEGs4oTfgRdiB5bGdREBgWJ75co",
- "P1g6EVj6IfAcFDggWDpitd0Ogd/IceQDVKC04pTrypDMU/AmpttNEB2HL1TMFU7/hm6VDS6Ho3vgY9gq",
- "FoaaY5zpNKy2Knfb3NGFv3AvToK69418xsDaF44sQ2D7yrH/OUuxyOJo9c6xs4yPzAcfxD6CmOW5hlbg",
- "bTEgwAekCSgbq5ayN07flnlqDiRSgQKRxtDZZNeB8Zbd+7/MIZlzASCq0zgX/feYnl5h4C/V39OagTqB",
- "FPjLO9mgVkmZhqEPQWARz5ErIGmBsxcK7dCUuTTGeFhk9X2xWA/n3gczdtQ+CboIY+Z8oZJBercEgeeE",
- "CaF/CtURU92RNpB64JFzAe9B4vNc9/+i9PAvB907SYAhO8Z1yxcz3clBO5UktLNaek1ff1uHoX2ruZHT",
- "KFVFV/4+or+v+QqlarjHHsKRD5Y95ipRo++KtnRY4VoR3lcowdU68AUfjLlcHLQ+rIhWnL5h5ZAiYiUF",
- "+gTqzIqAIktfpNTwls3vWhJoRVcrupqKLsknPcon1ZIrx6NMe9An+89S21VIroEYbOgdruBq77ntPfcn",
- "uefu7DjL5EJ7mv1Ip1nu9NjJySau1+aQnwlvID1K8xf2iqOrdS09FahTkFLzTJ0jBRIK381dv08rWjMk",
- "APm4mY+pSiHte1PR5bPAQBtg8Dw/M39P5ZeaMhJ5kgOBxxzJ0vOfhOlVUhRK+p+Ox4jifzpOZHiMzujH",
- "0uUsBwO3bc5YT8MLsLK8g81juAKXtaf4Hp/ixdA3S4bulgh6BRY/FuXiqjid8AxfJGGGozzfH9Vy8VjW",
- "o1uRl9XpFXX9x2Rt9frZsvSeOnidh4nv8VhaepHUaS57lJckx1VpccgXkTUs0ZNFeV0WkssD3Lml3v7q",
- "kBaYtzZ6/TzVaDKxqjWA/LgSdaWKjq1QbfWkouwiaIGCWb22JNo1ll7vIZmIKQ727qOVQR6MyJxnK+EZ",
- "zRx3jnwvhibXDdahofTbviDhm9NKkoOXJFX8uWnxAiMhU+Sfz8cgdufoEdZpQaKVAJN214qQMYGRcNft",
- "y4EtxIccz2g9lfC2rrura2TblEli38WeW0mlfELJtibo7nMxpVxXyMdUFlI59leYX8onuv1UNlWJppSF",
- "62WSzb1MlO23l0cDWV+1lUY/iTSyv2u1suhwZJHC+NuXRH44q/OU8sOZ46OgpBuVzdGX4ewSBdDWGtSK",
- "oZeNZ/LhI/StXIZ4y9zMVcwg6YD2eoeg7xmzx0F68DpsNgWOikImrENTQMa8lzaUBLBAgTD2qtbPPr9d",
- "8rU0nPxa7WvAA5/eQzF0RaR7BRQXSrNVIMn6b/eQUqVBWzx/3fRzqRRWzoLLcNb8GBCORhVpzZkHBBae",
- "RAbH/Qn7+Vx1fNm0Yw4fnE9Ul6CXuya9jCsOh7CR841A6o9N4yt43aTElmamFf40RSLXUXTqOldrMuau",
- "MeKFvZLAmyZjSgM7xAzGJ5/deMu9LMXLdEktte/2tsGJ0Qshv2jAb/wELhXRsGW2XDbT6vxLAZ8NBbNq",
- "vjqcLExb8jrlCGhyuEVpfpFcMdP2nDukc07wyQqsV3HeHQOfEkYw68EFQH5vFodJVPlwSpU7eQsU5MXG",
- "cNgAjhigyLp92mRAW7ynDQ4l0mn7J6EOMQ3LTRk3oeWd/GtiBbU2Osesrz7lueoY46cPqVBvbgXc2J11",
- "JZQ3utqdbpe9VzgBNTTU8rX27qflts2ekscYElLnWoTZ7skujuxSnc1AIRcUzMaiz4Ek9N3RMakgZo0z",
- "Ut2TlpU01zoNmjbGRxHqkfAB1iTDc/o3Q4e3q+aafoQmtFmrT+Jj5ld0M2T4wCMxS0M+kf5RrQ29qDxS",
- "iuSoVZgh/XGdMi5BRu12xN7qiAwBktYVtXCbJozipC1/bThsNmOmhgxWdeBYeEvxynI5lylT2tXMaaZN",
- "t7rX7gkPcGnlnEDbNU8/w8jgI1za5DXJYErdl4cX2DYfJpcVjQGULtHDixVBzGLQ1kjlYwPhKAl4HKUw",
- "fL2Iqwfbz5dx9GBT74GbhwqH6uRRQSxZBiG4dB6Bn0B9HiH4DSwiH1KR/QCXp29Y09NOl/7rjP/rjIr3",
- "6nxDnzabbihbBk9cmmYcqqZz1nh4+JmGVoq0a71rArPPpaK0MOSub0Jm4xp0kPYKwBDAcFFjFhaJiV/E",
- "vYdTQhObL+Q9fnbv6rP/3s2sI8GfQj2F31wIPWgo5cj3pgGf119MjqeJ/2B2p3ub+KKGEcSZTMCVQoH2",
- "+YkFA11+Q+GAX1I64ObioY2+2DP5wNhUFRJ4w1LCBYEL/Qq3W/adGzKUxNk5FdckNbhbCR/hZ1YoGALs",
- "FQpxYYhh5IPlxsVGpBSE+p5aAkZJMOTJibdVxMO67pQQTQxpMMtR0gqpvRVSI0ap25FPzIxmaWPltjkL",
- "O+tHuGyf9TJj40q3dYbs9sauu7E7wva7ST4Qp4HxnOY8iJsdzSN5xPysRzNHwL4czZsxq3HgWq3+Jz0w",
- "v7P/9p4QmffkJ2bdrg0/AgTwwzOoNBBeAALeQ/IFkflEsn2t/JDsoxcfJZB3/Xb5w5/ydNNWScfAqKI9",
- "5fO+bApmrHm3qyHyan5GwSMisGnAhOyldwIdsq+t7it9PxV8rOT1KbHd+nrqwiEyWtxSDASfoJLW2+cs",
- "JeqBo8Qu2IHj9kUjHDi4qwQ2CML42WN7z852pPUCYvfOVeRbnVyAAZj6sBcDAntsTMoegtdW0YuFFJI/",
- "9Pi/n7mI8SGBZWFzwX7HqRnJRtDwPgfrvZfn+mrYeik6Dv3kr5UtnEL2Wbbk2IwTYUauJl00v4+1EfTN",
- "OOFwougPhRO2G+i/mlbwYqH+lpzL4TsYzhUh+I05t+rkW8DFlDFfoxuk7KVn8U/sa3uDlNSo4GOlG6TE",
- "dnuD1N0gM1rcTJCgGO/4O//DQgl0gADCuY/DRV2QLaeGH0MVFMs2wcY/75R3f9sK766iA/4cXLtHuWqv",
- "DKlpUybNbUwDedGVhGyRRqo0iVkE/Bg68F6IgO0qv3y77JRfgY49SXllKb00erDYt1Z4vbDwMsqVFYRX",
- "ldYTxeECkjlMcG9BdVC3vnxR1sURXVIfvLrMlDdp109ish/iokDgN3Ic+QAVqKI4UpM7QBnLLVO+NFNS",
- "DtDsy6ZuIP9OYAKt2ZC1bsyB/6C9Doj5Djuy+ZCCVbdvD8nR3moZLJxHGGMUBq1M3CeZmO5OWSJKzllV",
- "JmZPfTau3nH62Fjn6z0CBF7Shm1ejX2uTruJHAy1mNxmpoWUzvYg20IRll2V1cjzWoNgAoWdWz/DghVc",
- "xU0mbpm3xSX/dVWJK3r0otBH7rI+5aTs4PAONgknpSv0DevRpps81qFltUejwm60j0c7z9qKfeA+VCea",
- "HNMmzhOczsPwofycyj5/4V/b51SeY1LFSZPbQwHV+8QOO6p4fBuAhMzDGP0Henzi17uZ+BMk89BjFT2A",
- "74dP+mrLfIOYHshZQD3P2Me1GPEYExATIzuO6Vd+jl33EzJ32GWlyJC3WD7bMICuKUJZz0PkzFcnZxo8",
- "qNzDUCaOlRxW5hB4wmvEDznB1Fg82YZDN4kRWTL8uGH4gCAdlBVF+qrSA0NpfkZJCHQHVqaDury/46tx",
- "kQALAjnArRwWcvhqPFRR1UASF7HcyuK9k8VlRkgl8dV4jXTDhYF1DNZGYzAE5PmrMsvw5mg2P6l1VEVx",
- "V1uG3iOGNnKeJUdXnqiiTmdvF09WonT4ob1cbd9coENMM5tBWs86tzPto8o+PKqke7PpZ2ZdVfVK1s0K",
- "qDvTJWeowunNCfFA7Hjdfa3svk2JIbZoRfnQSoSdlUJVafEJ8HqodSJCPdTpT3SjV62yXS0nanMC9gmB",
- "i0gkt2RtFfFhEhyHlgywlSBVLvEIM19pIUI4Efj7d0F44Ue8OkbZFUPHkHasyB3Gkiza8jBr3rLwPmYz",
- "i5NAbFWNRzsKooT5Q/DHXd1yn/dCU2lzmVXIF7bhLyFQsjVV2gJ4M+EsUCdc3kMy5sO2ouXltINmWXoN",
- "lgYxXHuh2OcLhdylrUgNAvBDDxNAagyGAD+walDCUlhjJZwA/DBmg9qLiOHFj2gbTBHRgEO1uG55dA/M",
- "gCY22EV6JOE103sK44eqZBGZA7bRpan1ZsqCSTgqvjCkUoRUVfWkyEgDXnhHR25H+9y2b+/nCvmvnsRQ",
- "DGJioZ/+nTzHPxwbOyrGq5nZa5SCUG5ty7n791CuMt5KhyWjiuqHNHpCcuFd7SWfnQ0//WGZYaKteb2R",
- "DNVSe8jH6K3uXSkRzQ1BzWtRqNV/NSUplJK9bWEKpTCFghdcY9DN1Vd+uTIVOrity9krtt4cwbSX1L0s",
- "X5Hfo3I4cLUpqYnA+a7+s86PJccJtSewINNDdmspsL4eNBWDB6wmiO1aNbNA6+ZijuvPvyDVx/R38zS1",
- "Oj8fs8fI2sck/mTJGVoF+qiGr4ds9Ja5X565sywmN0oRSg7jOu9OeRyx7W7N2jsya39RcR/Y5A/JNqmp",
- "yrA5iYPnIIJb0iPGbOxW3hyMMsE3rNUofiCNIo1dET5DlZGholI7Y3HfT9/HsUbXqGJ9FjjJXVkGsrBf",
- "KwM2DuAlwMQZXrCE9XPo+EDuoClNEcBk6BnzFL060+Up2oGPbZOCnqWyfK1JZP98a1aQJfaON3ayEFu9",
- "TLCWdhrNT5k4zYP3IPFJ581JNycqdpFCLZ379SqTj3kmtenSYRPoJxWfzPkcdqF2tY89m9e3NpmSMR2z",
- "NhjoXMY1TAFx56XHniqN6XCCgbbl5aC8k3Bk2Lrti2iS8lPJph97IsVS8z1V+kZJMPRwLvXsWggu59tt",
- "aBASEUjt61FNejRONrt4ucHHbhwG9RoJbeX8HU4zoEiMZrNa94nzOAx+ajXlYPK7phuLPDrtDJJUJT6q",
- "SeNturht4a5LZ24K3lWdKqWdklF8k+loh+ZTHWaG8oqcudOlcy/y8m4sda8qRbB9+t7pcnsZfBWlYMc5",
- "fHPIWENDb49djZZeOue2pK7TQ/f4O/1PT/5qV+aufBBbP3xQwjnwonfp6k1g5TC6+7J3lvXptJvY5gcu",
- "1ovTo6nZW0WeIL4+d6seE9dkrkN2T9pjztrS0dkem4dg2G90WG9EPtSVl2SzpjNaC4cDrzW5X/JhW9Um",
- "VQEx4QYOK1sfpQJewtHGtlenKqjFIFtVoVoOCLbchiiwU+XZcWD7oKe+Mta7KbUGs302mLFH5AbWMtZ+",
- "h6ayfbTjRSCmSDO4rhTA4o2/qI8ZO4JPkyJGC5twEtkuXH1tfBZLRJBgaFVvUbZdxbo1Zn2FnckGuAcU",
- "eFZQsYaNQfqIAq8emoM3phK0gA64p4CWnKefAJaxzOoSOmcnZ6e9E/q/ycnJG/a//2s0VrPufTqBnnjp",
- "sdqjUHRsq5FTiKfwPozhNkF+y2bYJMwVWL5HAcLz1WGW/XeK500BvVFMb+9xoGyJ/2mfBoq6Y2vh2Iq7",
- "9HbeBJiHtE3+fuAI0OhBl2d/NaG/ZSDEIVegbtXwVg3fvRre6patbvkiIVB4zYrtTAC1lUXqz/ctVE/P",
- "znkKqpf49HissRqmLVexH45l59aKuM9WxO3di1ICOCjPqVaZapWpg1GmsmVkonojttkUJCsGT620Gpi3",
- "GiNZkjCt1WGzWolBA9iuXnI8TfyHXuaJqI8oepv4D8KpbUOKCh3xcPwTt+SHUOapDC22YUfT+q3ZbR2R",
- "yjWZE8+pJBan7VoJISXEW6t93rqk4O4qNZKCN3J+iaHs/esGxcbhOFftVGzINJ0NxIbYp/0VG3JNNWJD",
- "rKMVGwaxUbvP2xQb39M/e6WckbUREHqQGwqNA4+D0ODAWM1Ii+q9DY3Q727r8FiMjTDgqZnHo4E2aqIk",
- "NsKAB12h+KC4b5sHcnvXP/QYim3Lkepoitx1YEOS5cADLfZeuGwr9qIkXRrUR83IqJz38WWvLLUSUg32",
- "+CmVnwOo/nZbdVnalKy0u0SlKTSfs8wtVWWsHOAE8Mmcv8U+fYuIhzqcolf1mUSqc2ZWgrYj0cixvWpY",
- "mqgcbdz8ncrGZsG3aq0uM/ytZNy9ZNy7QidC0FVR+XZSZymyOOfUo5fHUjcQEtlew9UpRq0U3qUUljuw",
- "gmZaodbtuWKqSuBWMW3Fr0n8CoWkTifeuMjl1fN6bpgEpCZegrWRuchl2UfwCJAPpj5k0lcRN3r7wntI",
- "eHU+fM5mPHjRW5cy/sBLRuQ2a0UzJScVTj7tC6LBYTqHpNUKSeTZP8EwxsduEsewmrMxvx3whg7tVuLe",
- "Wwzj95Cci8G2SHd0poZ0xiBuCxC/fAFi6CYxIksmxt0wfECwn1DZ9ddXKqoKSYfy5CbJnW2/hoxniMyT",
- "6bELfH8K3AcjOZ+Hi8iHBHKavqbzO9rziE7E7VHv2dDXFJfncvgCgb86Oat5e3XFvF553jkEHjvcvnf8",
- "kG9Gfh+KYv25gMwc7uQC83NYog8TEJtFwZh+XQ1xrGtzrDF4to8zBl1DhIXhzIfboTc29A9Obxx9G6a3",
- "DHE/HL2h4BERWF27CbNoJqkN8w5M6bY6vukIE9Z3KOba4imuTmTlzO4jLDcmv8BWX7Q+VllNngL2Msqb",
- "aG6IOdo7Bq4LI2K2vPXZd5xa2MQkJWpTN5/36WzHnsQH5xMphiSDAaiC+vjKdfTXekyl5MWxXdp7e/qK",
- "IatuUVFJn35vRl+8T2dbdenp4BugL77ylr4q6YtjewX68sMZCsxkdRnOsIMCB7Cz8ahCwbhkA23JOYMe",
- "wXT8ekLa3T3aD2cz6DkoaK/PL3x97nZ+Ozvb1bqjOKQ0wIy2g4AgsnR6ziPwkccmo5simqBg5kA5klnh",
- "ZYStv8p3O996MKBT9WJAYI/ZwKkOzd9qdMwcJqSGm8OE2LFzmLy8sUowWbhnhbpbI1WNNs2ox9Y+tYCL",
- "KYzxHEUN7nBKJ7t7HD8DP2XdRFKKrRK4ftLmFzoVRe2lbpVLnYrBepKMAMZPYVzhSpHmYqcdHNm+SqTe",
- "yDG3pySdz0EwSyfaJ23JZZB5KaJacd4qTc2UpmpW55SfZ8a19akYzqgkjquu3bwFrlSpUk+pbfG9BGOf",
- "OF4ir31obJl+MzclSeWbuSxhH7gPW3mkGtOR9/iNqkaSNny0eoQxFiAY3Z/oGkQ76QKFYfyo0dKHwX34",
- "HpLPYtCN1iRWIM0yNJ4enRyd6HJAKp5Hf6Vdv1qUG55ULLbgbVlB7F+gE0OSxEEOeYWbDhWzSRBQ/kmn",
- "+NaTQ/bCiKecKrPAE5zOw/ChJxzRjr+LHyzC3+lRJ1qXHdX47/aR7WIgsyNYOtGO/cAsQ8UlfO3B9vLG",
- "iWJ4ukqmRu8v0eKrFXMcCzzbmClkU+FXX8MxQnHDtoky95ZvNuM/yaHn7pMCNRQzVRlXKFbSOiACO+l2",
- "tey5R+zJrDKlLWrKoylvsj+ea7yveSutYzVzzrTiOe5kWuWzrDnjD8djubHvqFhxa48sOSWXAr7kBcXs",
- "g8zU6vrKj5WEbJ92YC9oeVtR/Llzw3RWCAwkEmW7i4Oy5DU1KL/lNEPNxXWYrXCaFIN7rBKBNavB2uBe",
- "tJcRMk2SaKUAtgF6L5w5QhCrQjErxsd06zQse05ooHL9DIFiKwaHtbz10rylRqGtw1g2ap89dzXTA/eC",
- "wTavC+aRYRsrL3KS5rhs18qhlUQoqoetPDAqiOsxZ42aaFUuj25Svi5eyniP6UuH8aRsUB5vH/hZU6KC",
- "F5jYQP3g1asH6wGbxWESsbofGQhyo4ygsE4f4bJTmwZky0JizVpc8lGpLce1h9rESvW/GgkumZrI6Nwi",
- "s2o0TRa0Uo6gvZRcEw27HDnDe2bdxgmlDuh1GVf5gEBMUp5C2LmHxJ1Dz1QdKhP8e65ICTJYMfHQi6Ub",
- "UuBtlGeozS7UZhfaQnahRqJZyAZs8aqVO8mtxLLwrTkgE8yPIJe3LOWkw9R6qmAr7/ZKBcxIcVUVsOj4",
- "N4UghnHq+NfVugIyTzIuD5LY77zpdJ6/Pv+/AAAA//9+b1jkXTUDAA==",
+ "58btKun6Pp/y5EltWOjK/mb6ZwCRk6oUenlwYXT7GQW3l0FsGjIw1phWg5XsAkplh+fuIZDN1kvEtIGb",
+ "L0/zm3G1NdfztvJwtQwXVeIDv6qsqQRql5k0Qh9NIWn9myHjCgXJ+dBCHRHMIfBgbHe687bFTRTT1uJK",
+ "makr1/G1SkT1FYGUDyTtpoHmXVM0pDJOLti2qIRaZbSiS53SURhCdWhMMFWRTUiUX2sHKqAsHbUmt1U+",
+ "+NSfUR1vvlDxNv7QP+106X/OXv/O/3h9etbpdj5dvK7GXhrPqskiq0xkHxub9mIJTN3Qs6hXlxthIDsx",
+ "d5pZAEgSww9r0zEd2knH0wpMNAtYcSU3hobLK2bfGBumsgzNAqsJigG8KaIUPOlXXAStlkYGCt7TsOLB",
+ "/2HlCscDFhTD/7gdXVaTx164zkmdxtIhpnyYKWh4P7gajJiMeT+cfLh9y1ziRsObAfNm659/7HQ7l8Or",
+ "QZ85qn0e/h8TfrLb3uYDlis9Opr7QUiTXusL0fpC/Fi+EK27QvmRYU2j5X4b3Q/G5tvwPbnmAVdjHRZv",
+ "umtZiFnrzDycXXHyz7m519X05Va16Smn4QUksuRAwSE2Cexf8EW6AjwH9RYLNW6btn8Xxhp45OMKy1Zh",
+ "EyLDGmZ32fzL/PpO/xwcvLnUK7XODuW4504OJxLdErLy1ubVgfz2ejWRJluo/ahOWQXsS71QqNpRgycK",
+ "A8Y39VzxReceIVFkXsyOAuUKTjxqRGX/vSh/plXJhdbPizJstBxhI/OgKOigVX6T2JCyWfZNYr+RUUoY",
+ "D+i4ur3OoYQnxDIn6t/UIrHd9ZnKVZENnJ5izvDeCULiRHH4iDzodR3gxCDwwoXs9IR835lCZwYDGMtr",
+ "jEpdZ1vDeHM0e/tJgKvtza5JOYWzFtlUapmzQ+/USpEXP1aWilwXI2OKS/sdMOwbez4EgZdVQ4z5UKtd",
+ "+ReQzEOv0WoF6J94z1S3Pw89A9V+mExuZJ5pN/RSCo4F8u1j8+8AD85nM+cm/mqJ8GoSEqisOeclzcvW",
+ "1km6tBSwMu18SrcuM3ZNOt3OzfWY/ed2wrQk0wnJA5ZwVTQTFu8nvGaRCwIngjGlqyP76nHP3Q54BIhd",
+ "Zs0Z0nLJg8rTwm/QTQh03DAQlS39pcEDEeGI3ay12bAo1aE0zx7AGM0C6DlZJ2Z5ur0dXjiCfXZ/o/TB",
+ "FPq4uqwna8NYKuerwY8BO1LkApWOo9syH2DyAYKYTCEgVbaB3FaxKq2svgJw5rJ3/lZ+dnJ21js9652+",
+ "mpy+fnPy+5vf/jj6448/Xr3+o3fy+s3JiX36EsCZmaoHA0zA1GfGtj2EdAG+mQl/Ab6hRbLYHANsX+8w",
+ "6xsxdGFamxSbcrTQNjwGhNemC+NVCHiUn0tDw7Go6ZPV9MS1+ZSwk/VywkDdhwaQFefVQpcElGCGwX1o",
+ "x6sjpQM9dP3QdE5huADRPIyhQxsJMbEimsdyrDGbTxcCb11cIps6zRpzPhl+5oWs0z9v+rdjQ4CuTVQI",
+ "R1YaEcLPTWMmLXGSc3lfALLemMd739bpxrejS83wTVVl1l6r5iiivHTKV2a8lTmSaNdNO9lUFKfmRalr",
+ "Jq9O8FmBh5d/wTReClIgR3nmL1SmBsEsEU9a1mJhfPER82ORd1YKO5fT0ujVNiGRBt9IDLQNsPdgHra0",
+ "OAaRqpxeX/ZZZP7NPycf2APJ5J83g/H5aHjD8ozcvv2n3tCTMbTqATG4fPfheszj+z/1r/o8b0hVHfwv",
+ "SpX/gi1TJVF98Er6i4VvbLdBLVB+7shqoPoakn+HU4N8pV90AFmR6Z/hVCfPd6JAGDEnK8dpdDgwW32t",
+ "qZERaG8o1e9MwrcsuzhUrkA81DQTF8qbkERmpfFXczyk/voG0SjeCrj6qKuIP4NE+c7qjWvcGAKZ6YIn",
+ "XptBgoW7YtrVmdG+6ZGnGKC1CGMlkcckBgTOalNCKxBe5vo1V7QzXTpfLbmYVPbVWb19Qk5dXE1Xi9Wq",
+ "LRpe6HLfpQAOL7Q4lL0/oiBnEXh3e3U+GTJpe3E76r+9pBrWRf99pYCkg8hjtBEFs9k17CW/68/mtQLv",
+ "dnys67X554r9NCYMYkzyEVbF0JGQAF9HsSmPPcClwTlGDk/J0i5MT17GgIMj6KJ75GaTOL9EAGPoOY8I",
+ "CCflX/VcYUREA8+p7NcbpTWJE6gZv+4hUnVBSm/3pycnJ0aXIu0weSeghv48jRb0dziVYsz2HDdk+187",
+ "pJWfiLu2gPG5xdX+ZUDIecVs0sNFdV7QurmY60u8XTYYfKL0KvudNFRJjJ4r6ySMzgZSfVIUsL9WC5M9",
+ "uegp3iv2h8IoCdZIplse5R2Cfu7cV3M1ZLSck2KKZKyZZCy9clrZ3cruVna/lOw2zPEDivYKt74VRDMb",
+ "bUjgwuwoaLiv1Hc2llgbs/xX1VlW13SdylJsbTxz1gYGNMj0Yh7WYkICsahuCZHKqHXUU0oPejO4uuBZ",
+ "QbP8oJrUr/lEoWlO0bf984/X797VnpJs2pXuzXmBYibGSV6cFB1HwuBGkfwlWGmDsTuHXuJXpEc3dF77",
+ "OPpSTI5hKWBqNhvz0tlGd5pcTo4tsmNVESpcuwijkYCl2W1CR3Koc96xTgstNC/NnzGENqNwVfJmyXTa",
+ "j4K5tN8kjzZPCV212AmY6dDrc5VxfZN/sOGMGsKsyyGsoh8hFM5jepG518sFLUtzvrxDBm6sm5B5kWtn",
+ "ZHLkTrw8bnparF9hc82ggDeN5IVp7MAqA6f42axyz9UtPfoyDexOvEI0RzPPK2KUp5t82aoCQ9Fmiyyb",
+ "e8Kw2RD11YMlqbsHiU9uKlPriEbGFDtWjwTiFvkn5gfvwlD36M/x9ZXDgS7Hn7ARtIGh8lnwhR77wtjj",
+ "boUWaMBC7ZigBQwNFVEwQe7D0uRHQr85WDyr2L0kKvKiAdsyHezxtPBSZoVjpc+YZ/zRofwxo2xz5k6b",
+ "BT4p79m27xaNM6RaXwPlsiRh5Ab6Ws/pjKw2+TbUhD73Yk92hXDuUJE9ChVKycaQ+Xqdm+tILMC3mhZP",
+ "zZR9UzEJHsKQUPnL5CeHcApBDGOZxIJhlB0r7OdsU+aEROzaE4YPCMrmiO4q/0m+nb/piFjcrK/IZ0J7",
+ "J5iEC8vJnpnE5z49Gjd4PovTvxmyGkeE2cTyv6aE2Dk9Ojk6YXTMo5E7bzqvjk6PTkRgMcMECx72RW3Q",
+ "mS7S4718nqetAoixk9pj6KYDWdGicym+v2dokJ75bJazk5PywB8g8Mmcoeg1/+6GARE1vkQRYdr0+G/M",
+ "+QqnB2ANHw/iOKRS+Jkd1eqcVyFJ15Ejjs6bv752O1gW7qCrzhpKn5K/BMzuHLoPna+0P8NfDIG3rEcg",
+ "bYaqMDiSDfYdhWzBDgkd4LowIg6Jwf09cmsxmmKgFqWPp8fApyIlmPXgAiC/xx6S8fF39rP62zPHiw+J",
+ "5vZ0wX7HDkjTO9HuDuvO36ZLu9CnLQa0AXO14CMwnonBAhKmD/xV4eRTmsERya07b3hAfyo0SkvpqEKN",
+ "vw9kO7ZeIdavJXr6rYytceK6EOP7xPeXDkepl8uNVULec7fz264or+8sgE+xAD2HpU3yZPwMB+PVxsHQ",
+ "QfEujKfI8yC/fWT0zemkiswkxU9YE3pYfevFQuVgH3jfTldDGF/ZtZe4mtTY/Lq1DonzEX4MEmf08Dbk",
+ "8ngjxMCxwzetgLg0AKtMJpXYIqGTSJznsfGsF/sbWYh2CTrYc2KAA9qKAUsxwKlle2JAPSAj1CPhAwzo",
+ "qSj/ZqdhFOpi80fwMXyADghYhj7WWnhrpTMWxESEJrSVNOjQ7jZSIh3eIBMkrHt13MVseYLOGXQ/NlHj",
+ "JlQtSIdu7ETsnCTj7LcqSk63PEfBrh8m3rF6Qzdr0KWUZ/LawwZxUIAJCFxYIuJz+lm6l5gV6+3jlgHi",
+ "JEEay7o3BFajtXMEq+/1Yus/KS9s33pyiF4YcWcXcaIp+83N4cff2X+fq/abSinW6qi0ocwqzjeyVhLx",
+ "zMAm5YR93akQ2txmixxBNYc3L5zxKMQaxwbbsVa25UhcwUxG3hzFFVKN089XM4Uf14k1ti2pVKuh+YtU",
+ "gP3sdH/BSLil/f2i/QVc+Qw3nt67O7hF6rAmNJUeiQdykG/iCKdjHDM7Pd8lbNzxS4TpBch3cq1NG0xb",
+ "D/MNt7bbdC6x48qUDTdfpnLJrW6fCCHderYRhU0o739uk8MAkZBK8+PvnOOfj6M4nELz5VK+fTogV26A",
+ "2XV5uYJcIL+Z4dOpb0JMRklww+a1t02ZDr1Ucu341KsgKJGSg9MTw+/RTk+Fq5CwtPNhjP7DU5OL5Dw8",
+ "aQWP0iyZOQlAPvQcbrd32PY474Q8H2bbqj84cmSGfeA+HH9n/7Gw4jtj2lCpqpGnHPZVZDmyN9rnxjQS",
+ "DwNxL63zeZzsk2pzuhswboOMhPnEr3czMU+exXIQAt8Pn+j0uheBItVK0ct+r1KxONHlOSbAx99xgK24",
+ "5WqsSv0yvwS4AZvkBzMziji5945NCshoGWUPGaVEsCmrXI0rGSXAGjaRiotibdKrLnReeSUusUjjt7EX",
+ "0z+6ZkMAr8azkiVAgeHs9escEKeb0IGiOKT/gF57hu0Ra5oukawQgQOiSFJ7+VjjbQr8SMDUh8cemOHj",
+ "NIe58dKI2a2RtXPIHBBnCv0wmKlZBdJ82WBWvlJ+Pr0ArMboRNTNrjeXyUzVWYIWnjuascy/ExgvM57x",
+ "wOwOedXH3LYiRKzkTgHel7r4WFPvxgqfX4BZWjBemzqrQg7RKeXrH5v157YSdjuvdyX86C0ULSIfLmBA",
+ "SroBM15IOkifzgF+0EoY1vD4O/1PzfMSL9kwXXK+KQoQOoGlqZ0Xojcd+hTQHR/5+Yr7BqEga/arsJRi",
+ "obZpxy8Up2hkemNY/dn58zd+99n+rBO16DrVFO7DhCdp2hMRkfFzSUSY7wzERoQc++GsTlfxw5njowDK",
+ "zEcCjqJEuQxnlyjghUUOUaqILE8kdFh6Mme6NEgW9rmjhQYFhFUSLAddGjK/xkTkXQ6dGSQU1QzLhpkx",
+ "4pZHzcwVqRsM96Y0Pb7V1ElAkL+BqfsOlXc9Ar8RB0MQu3OHzaQU9q1YP+ugE+nVa2UUDB+h/wv+lU6E",
+ "AtdPPGjaX9oSd7TabrXAlyxAB7BVbj2Z3IYCxqJUzJTHPt9Nl3dppxyUVsCVcupYHbJW27MHR64qhBoo",
+ "xCKKtX03z2ulqeRXjp3LcLb+qUP/v5eFDptfV5WKY8aDJy0o9gMcPfgBRSbmv7/HcCPnzlZPuu2r1Nle",
+ "r+Ag0157W7U6J+N0EmZ9FZu1UEz0LvSPPThNZmYj/eAR+AkrXOScDy4dmFbNd8AMoABnhcBEoVsPEHCk",
+ "kYfn0L9gUx2KS8HmI1o+n54PLhkSagJYGCYxFYWs8C0VE3rk7zSORQVfpl2sEXVQUI+nWUOr16gvcdNk",
+ "VmIxhefPB5dmlrfidQu9hj8A5EVPWp63yM/NdJt9fKP7kfQbzY1WGvMf4BIrFyXjtLRd8+slIwMRcV93",
+ "sTwPA4zoVVKQGHtkCl2WecNzwD1hOWgQdsS1fZvGhmpYpvA+jGEtMJsyP7zjW0PCHDQgZoXOQhcxCfqE",
+ "yFx9iyvWOdbAl6WVMOzslp/J7NeVS+PvLABx54g9PbowJgAFWeh+1TrTbHxwJUNJoYK59eLSLRGrnC7p",
+ "cYdihz9X6iAWCftedFumSyfLkJv5iLPaWem9xGBTKScQ1i5EU8xBTvMAlz1eTigCKMbOLx5kgo9y39IB",
+ "zr/e/OvXotiqdIKwM2xhN4yglTzkLW3XxVqvB+9276j299PWAlVngUp5wzJso4GCdsyOYUstjZ/tVpra",
+ "R7g8FGVt62FMEhdNGYGhu2UGHTM4QnvcAkN8fzztNQhcZb4FBOv9C5rEsO6xX6EJJompA2VOsT/tAbWR",
+ "0ELcJKwwpRwrzuQ6js0xJVrWnlFcJW3NCftqTigV27VQoGtvn5VTlK6I7DLO5zxav9REs7sCTqYYEscF",
+ "gYdYnhlJ1xu9PVSt2LnF0GNsxGEh9HpchgcQaXNlb/eGohk7vXgorN1AsEsR00r2vLYl8ZLJdo7fKl2r",
+ "a3jbOWelhhzgBPBJDGwUzbztz/14w1DA0WHzgMPeb1JSdlhhJ27V3+WbjSCPOtYTZacUgNsn6V09SV9l",
+ "r9A5hk/5M+VNe5631+LYBYv/bRPeCOokRePEnfulxgluRSwu25Nr0V+2Ukwc5m3LUjTIWM5WLLykWLBl",
+ "/a5CmPTorwjFSBV4s8GEz3bIFpOUn39yLp6FpD3cjRaTFc7YIqNVpgmuPzYPPOA5d2ymSXZfkuG2cQXg",
+ "m7TyFeAFkg9byweZb7iVD4d3ylso+8y3fZEVq6tQC4RklIHATpwEjuhZnbeYe1BcIky4F4WsjXeoMq0c",
+ "CaWgocY/yQLQtYOj6qHZlINS0TbLrL+Bx711zNOnNc3QC3m6ULh51TpGyv8LqykHDECLKne0/Z1sfcda",
+ "b5XYshQI/I2PuUqllXez+FtDsgHeEAWzO17Db0eQ9zUORA+9R+HTY/FIkHkS3S0qXYle1ohNBdsoCaRE",
+ "ax43rUrRNsfB/gQws71ZpAeVXYyF/YkbhSgglufuAgUJgfQ6Lv+KIXjwwqcgPYobHMPvIbmhkx/6IcwO",
+ "POkbrITuCIN1p6tUqT87OTvtndD/TU5O3rD//V+D3BHd+/f8JrKJA5JBmnoOq6CGFL41gL1HAcJz6L1l",
+ "gzcHd/uyMUdqK0hHxietfNxT+ZjfnY1LSXzsslLg5ig0Xio8zUejk3e8yc/9QMlQwFSVmgJJPMdX6LgS",
+ "aTuNImOT+tDjecJqXyZl8zZJVBstW5JRBcmwcckUw8gHy6riTvR7pWTiTX5qycRR0EQyxRJpu5RMHExb",
+ "wRSL1q1cauVSSS4V5MIG5ZJI/WnjfSvTq9d534rs7a377T6733JyceiwdvFrrP0Vbb5KMKSgiXE6iq29",
+ "VRKdNaCiQwWk1ZO8uIeryj4NXFxTRm7f4vM+riliMrkpULy2l6upiEW6ia2fq/BzFfho8sotmfKFPF0l",
+ "jTRxdd3H5Oc/t69rObO5Be83UJuYu6v4h52/a63MOHCPVzq5fHuULFzv+5phxQzsbu3Qtvwv/Vlb3t8L",
+ "V5da9u6q5Fbj0irpV/i0CvXQwLeH7NZaUIB/NB6V3qotjxrcVWuOSRjQU7AXAwJ77AZKN1fsvSWX1fmz",
+ "1h6LB+7Rul0O25536o+ruEsX1VYw7JHirpEHq5/s+hv8TYhZfg8UuOECBbOUXhcQYzCrOOFH0IXosZVB",
+ "TWRQkPh+ifKDpROBpR8Cz0GBA4KlI1bb7RD4jRxHPkAFSitOua4MyTwFb2K63QTRcfhCxVzh9G/oVtng",
+ "cji6Bz6GrWJhqDnGmU7Daqtyt80dXfgL9+IkqHvfyGcMrH3hyDIEtq8c+5+zFIssjlbvHDvL+Mh88EHs",
+ "I4hZnmtoBd4WAwJ8QJqAsrFqKXvj9G2Zp+ZAIhUoEGkMnU12HRhv2b3/yxySORcAojqNc9F/j+npFQb+",
+ "Uv09rRmoE0iBv7yTDWqVlGkY+hAEFvEcuQKSFjh7odAOTZlLY4yHRVbfF4v1cO59MGNH7ZOgizBmzhcq",
+ "GaR3SxB4TpgQ+qdQHTHVHWkDqQceORfwHiQ+z3X/L0oP/3LQvZMEGLJjXLd8MdOdHLRTSUI7q6XX9PW3",
+ "dRjat5obOY1SVXTl7yP6+5qvUKqGe+whHPlg2WOuEjX6rmhLhxWuFeF9hRJcrQNf8MGYy8VB68OKaMXp",
+ "G1YOKSJWUqBPoM6sCCiy9EVKDW/Z/K4lgVZ0taKrqeiSfNKjfFItuXI8yrQHfbL/LLVdheQaiMGG3uEK",
+ "rvae295zf5J77s6Os0wutKfZj3Sa5U6PnZxs4nptDvmZ8AbSozR/Ya84ulrX0lOBOgUpNc/UOVIgofDd",
+ "3PX7tKI1QwKQj5v5mKoU0r43FV0+Cwy0AQbP8zPz91R+qSkjkSc5EHjMkSw9/0mYXiVFoaT/6XiMKP6n",
+ "40SGx+iMfixdznIwcNvmjPU0vAAryzvYPIYrcFl7iu/xKV4MfbNk6G6JoFdg8WNRLq6K0wnP8EUSZjjK",
+ "8/1RLRePZT26FXlZnV5R139M1lavny1L76mD13mY+B6PpaUXSZ3mskd5SXJclRaHfBFZwxI9WZTXZSG5",
+ "PMCdW+rtrw5pgXlro9fPU40mE6taA8iPK1FXqujYCtVWTyrKLoIWKJjVa0uiXWPp9R6SiZjiYO8+Whnk",
+ "wYjMebYSntHMcefI92Joct1gHRpKv+0LEr45rSQ5eElSxZ+bFi8wEjJF/vl8DGJ3jh5hnRYkWgkwaXet",
+ "CBkTGAl33b4c2EJ8yPGM1lMJb+u6u7pGtk2ZJPZd7LmVVMonlGxrgu4+F1PKdYV8TGUhlWN/hfmlfKLb",
+ "T2VTlWhKWbheJtncy0TZfnt5NJD1VVtp9JNII/u7ViuLDkcWKYy/fUnkh7M6Tyk/nDk+Ckq6UdkcfRnO",
+ "LlEAba1BrRh62XgmHz5C38pliLfMzVzFDJIOaK93CPqeMXscpAevw2ZT4KgoZMI6NAVkzHtpQ0kACxQI",
+ "Y69q/ezz2yVfS8PJr9W+Bjzw6T0UQ1dEuldAcaE0WwWSrP92DylVGrTF89dNP5dKYeUsuAxnzY8B4WhU",
+ "kdaceUBg4UlkcNyfsJ/PVceXTTvm8MH5RHUJerlr0su44nAIGznfCKT+2DS+gtdNSmxpZlrhT1Mkch1F",
+ "p65ztSZj7hojXtgrCbxpMqY0sEPMYHzy2Y233MtSvEyX1FL7bm8bnBi9EPKLBvzGT+BSEQ1bZstlM63O",
+ "vxTw2VAwq+arw8nCtCWvU46AJodblOYXyRUzbc+5QzrnBJ+swHoV590x8ClhBLMeXADk92ZxmESVD6dU",
+ "uZO3QEFebAyHDeCIAYqs26dNBrTFe9rgUCKdtn8S6hDTsNyUcRNa3sm/JlZQa6NzzPrqU56rjjF++pAK",
+ "9eZWwI3dWVdCeaOr3el22XuFE1BDQy1fa+9+Wm7b7Cl5jCEhda5FmO2e7OLILtXZDBRyQcFsLPocSELf",
+ "HR2TCmLWOCPVPWlZSXOt06BpY3wUoR4JH2BNMjynfzN0eLtqrulHaEKbtfokPmZ+RTdDhg88ErM05BPp",
+ "H9Xa0IvKI6VIjlqFGdIf1ynjEmTUbkfsrY7IECBpXVELt2nCKE7a8teGw2YzZmrIYFUHjoW3FK8sl3OZ",
+ "MqVdzZxm2nSre+2e8ACXVs4JtF3z9DOMDD7CpU1ekwym1H15eIFt82FyWdEYQOkSPbxYEcQsBm2NVD42",
+ "EI6SgMdRCsPXi7h6sP18GUcPNvUeuHmocKhOHhXEkmUQgkvnEfgJ1OcRgt/AIvIhFdkPcHn6hjU97XTp",
+ "v874v86oeK/ON/Rps+mGsmXwxKVpxqFqOmeNh4efaWilSLvWuyYw+1wqSgtD7vomZDauQQdprwAMAQwX",
+ "NWZhkZj4Rdx7OCU0sflC3uNn964+++/dzDoS/CnUU/jNhdCDhlKOfG8a8Hn9xeR4mvgPZne6t4kvahhB",
+ "nMkEXCkUaJ+fWDDQ5TcUDvglpQNuLh7a6Is9kw+MTVUhgTcsJVwQuNCvcLtl37khQ0mcnVNxTVKDu5Xw",
+ "EX5mhYIhwF6hEBeGGEY+WG5cbERKQajvqSVglARDnpx4W0U8rOtOCdHEkAazHCWtkNpbITVilLod+cTM",
+ "aJY2Vm6bs7CzfoTL9lkvMzaudFtnyG5v7LobuyNsv5vkA3EaGM9pzoO42dE8kkfMz3o0cwTsy9G8GbMa",
+ "B67V6n/SA/M7+2/vCZF5T35i1u3a8CNAAD88g0oD4QUg4D0kXxCZTyTb18oPyT568VECeddvlz/8KU83",
+ "bZV0DIwq2lM+78umYMaad7saIq/mZxQ8IgKbBkzIXnon0CH72uq+0vdTwcdKXp8S262vpy4cIqPFLcVA",
+ "8Akqab19zlKiHjhK7IIdOG5fNMKBg7tKYIMgjJ89tvfsbEdaLyB271xFvtXJBRiAqQ97MSCwx8ak7CF4",
+ "bRW9WEgh+UOP//uZixgfElgWNhfsd5yakWwEDe9zsN57ea6vhq2XouPQT/5a2cIpZJ9lS47NOBFm5GrS",
+ "RfP7WBtB34wTDieK/lA4YbuB/qtpBS8W6m/JuRy+g+FcEYLfmHOrTr4FXEwZ8zW6Qcpeehb/xL62N0hJ",
+ "jQo+VrpBSmy3N0jdDTKjxc0ECYrxjr/zPyyUQAcIIJz7OFzUBdlyavgxVEGxbBNs/PNOefe3rfDuKjrg",
+ "z8G1e5Sr9sqQmjZl0tzGNJAXXUnIFmmkSpOYRcCPoQPvhQjYrvLLt8tO+RXo2JOUV5bSS6MHi31rhdcL",
+ "Cy+jXFlBeFVpPVEcLiCZwwT3FlQHdevLF2VdHNEl9cGry0x5k3b9JCb7IS4KBH4jx5EPUIEqiiM1uQOU",
+ "sdwy5UszJeUAzb5s6gby7wQm0JoNWevGHPgP2uuAmO+wI5sPKVh1+/aQHO2tlsHCeYQxRmHQysR9konp",
+ "7pQlouScVWVi9tRn4+odp4+Ndb7eI0DgJW3Y5tXY5+q0m8jBUIvJbWZaSOlsD7ItFGHZVVmNPK81CCZQ",
+ "2Ln1MyxYwVXcZOKWeVtc8l9XlbiiRy8KfeQu61NOyg4O72CTcFK6Qt+wHm26yWMdWlZ7NCrsRvt4tPOs",
+ "rdgH7kN1oskxbeI8wek8DB/Kz6ns8xf+tX1O5TkmVZw0uT0UUL1P7LCjise3AUjIPIzRf6DHJ369m4k/",
+ "QTIPPVbRA/h++KSvtsw3iOmBnAXU84x9XIsRjzEBMTGy45h+5efYdT8hc4ddVooMeYvlsw0D6JoilPU8",
+ "RM58dXKmwYPKPQxl4ljJYWUOgSe8RvyQE0yNxZNtOHSTGJElw48bhg8I0kFZUaSvKj0wlOZnlIRAd2Bl",
+ "OqjL+zu+GhcJsCCQA9zKYSGHr8ZDFVUNJHERy60s3jtZXGaEVBJfjddIN1wYWMdgbTQGQ0CevyqzDG+O",
+ "ZvOTWkdVFHe1Zeg9Ymgj51lydOWJKup09nbxZCVKhx/ay9X2zQU6xDSzGaT1rHM70z6q7MOjSro3m35m",
+ "1lVVr2TdrIC6M11yhiqc3pwQD8SO193Xyu7blBhii1aUD61E2FkpVJUWnwCvh1onItRDnf5EN3rVKtvV",
+ "cqI2J2CfELiIRHJL1lYRHybBcWjJAFsJUuUSjzDzlRYihBOBv38XhBd+xKtjlF0xdAxpx4rcYSzJoi0P",
+ "s+YtC+9jNrM4CcRW1Xi0oyBKmD8Ef9zVLfd5LzSVNpdZhXxhG/4SAiVbU6UtgDcTzgJ1wuU9JGM+bCta",
+ "Xk47aJal12BpEMO1F4p9vlDIXdqK1CAAP/QwAaTGYAjwA6sGJSyFNVbCCcAPYzaovYgYXvyItsEUEQ04",
+ "VIvrlkf3wAxoYoNdpEcSXjO9pzB+qEoWkTlgG12aWm+mLJiEo+ILQypFSFVVT4qMNOCFd3TkdrTPbfv2",
+ "fq6Q/+pJDMUgJhb66d/Jc/zDsbGjYryamb1GKQjl1racu38P5SrjrXRYMqqofkijJyQX3tVe8tnZ8NMf",
+ "lhkm2prXG8lQLbWHfIze6t6VEtHcENS8FoVa/VdTkkIp2dsWplAKUyh4wTUG3Vx95ZcrU6GD27qcvWLr",
+ "zRFMe0ndy/IV+T0qhwNXm5KaCJzv6j/r/FhynFB7AgsyPWS3lgLr60FTMXjAaoLYrlUzC7RuLua4/vwL",
+ "Un1MfzdPU6vz8zF7jKx9TOJPlpyhVaCPavh6yEZvmfvlmTvLYnKjFKHkMK7z7pTHEdvu1qy9I7P2FxX3",
+ "gU3+kGyTmqoMm5M4eA4iuCU9YszGbuXNwSgTfMNajeIH0ijS2BXhM1QZGSoqtTMW9/30fRxrdI0q1meB",
+ "k9yVZSAL+7UyYOMAXgJMnOEFS1g/h44P5A6a0hQBTIaeMU/RqzNdnqId+Ng2KehZKsvXmkT2z7dmBVli",
+ "73hjJwux1csEa2mn0fyUidM8eA8Sn3TenHRzomIXKdTSuV+vMvmYZ1KbLh02gX5S8cmcz2EXalf72LN5",
+ "fWuTKRnTMWuDgc5lXMMUEHdeeuyp0pgOJxhoW14OyjsJR4at276IJik/lWz6sSdSLDXfU6VvlARDD+dS",
+ "z66F4HK+3YYGIRGB1L4e1aRH42Szi5cbfOzGYVCvkdBWzt/hNAOKxGg2q3WfOI/D4KdWUw4mv2u6scij",
+ "084gSVXio5o03qaL2xbuunTmpuBd1alS2ikZxTeZjnZoPtVhZiivyJk7XTr3Ii/vxlL3qlIE26fvnS63",
+ "l8FXUQp2nMM3h4w1NPT22NVo6aVzbkvqOj10j7/T//Tkr3Zl7soHsfXDByWcAy96l67eBFYOo7sve2dZ",
+ "n067iW1+4GK9OD2amr1V5Ani63O36jFxTeY6ZPekPeasLR2d7bF5CIb9Rof1RuRDXXlJNms6o7VwOPBa",
+ "k/slH7ZVbVIVEBNu4LCy9VEq4CUcbWx7daqCWgyyVRWq5YBgy22IAjtVnh0Htg966itjvZtSazDbZ4MZ",
+ "e0RuYC1j7XdoKttHO14EYoo0g+tKASze+Iv6mLEj+DQpYrSwCSeR7cLV18ZnsUQECYZW9RZl21WsW2PW",
+ "V9iZbIB7QIFnBRVr2Bikjyjw6qE5eGMqQQvogHsKaMl5+glgGcusLqFzdnJ22juh/5ucnLxh//u/RmM1",
+ "696nE+iJlx6rPQpFx7YaOYV4Cu/DGG4T5Ldshk3CXIHlexQgPF8dZtl/p3jeFNAbxfT2HgfKlvif9mmg",
+ "qDu2Fo6tuEtv502AeUjb5O8HjgCNHnR59lcT+lsGQhxyBepWDW/V8N2r4a1u2eqWLxIChdes2M4EUFtZ",
+ "pP5830L19Oycp6B6iU+PxxqrYdpyFfvhWHZurYj7bEXc3r0oJYCD8pxqlalWmToYZSpbRiaqN2KbTUGy",
+ "YvDUSquBeasxkiUJ01odNquVGDSA7eolx9PEf+hlnoj6iKK3if8gnNo2pKjQEQ/HP3FLfghlnsrQYht2",
+ "NK3fmt3WEalckznxnEpicdqulRBSQry12uetSwrurlIjKXgj55cYyt6/blBsHI5z1U7FhkzT2UBsiH3a",
+ "X7Eh11QjNsQ6WrFhEBu1+7xNsfE9/bNXyhlZGwGhB7mh0DjwOAgNDozVjLSo3tvQCP3utg6PxdgIA56a",
+ "eTwaaKMmSmIjDHjQFYoPivu2eSC3d/1Dj6HYthypjqbIXQc2JFkOPNBi74XLtmIvStKlQX3UjIzKeR9f",
+ "9spSKyHVYI+fUvk5gOpvt1WXpU3JSrtLVJpC8znL3FJVxsoBTgCfzPlb7NO3iHiowyl6VZ9JpDpnZiVo",
+ "OxKNHNurhqWJytHGzd+pbGwWfKvW6jLD30rG3UvGvSt0IgRdFZVvJ3WWIotzTj16eSx1AyGR7TVcnWLU",
+ "SuFdSmG5AytophVq3Z4rpqoEbhXTVvyaxK9QSOp04o2LXF49r+eGSUBq4iVYG5mLXJZ9BI8A+WDqQyZ9",
+ "FXGjty+8h4RX58PnbMaDF711KeMPvGREbrNWNFNyUuHk074gGhymc0harZBEnv0TDGN87CZxDKs5G/Pb",
+ "AW/o0G4l7r3FMH4PybkYbIt0R2dqSGcM4rYA8csXIIZuEiOyZGLcDcMHBPsJlV1/faWiqpB0KE9uktzZ",
+ "9mvIeIbIPJkeu8D3p8B9MJLzebiIfEggp+lrOr+jPY/oRNwe9Z4NfU1xeS6HLxD4q5OzmrdXV8zrleed",
+ "Q+Cxw+17xw/5ZuT3oSjWnwvIzOFOLjA/hyX6MAGxWRSM6dfVEMe6Nscag2f7OGPQNURYGM58uB16Y0P/",
+ "4PTG0bdhessQ98PRGwoeEYHVtZswi2aS2jDvwJRuq+ObjjBhfYdiri2e4upEVs7sPsJyY/ILbPVF62OV",
+ "1eQpYC+jvInmhpijvWPgujAiZstbn33HqYVNTFKiNnXzeZ/OduxJfHA+kWJIMhiAKqiPr1xHf63HVEpe",
+ "HNulvbenrxiy6hYVlfTp92b0xft0tlWXng6+AfriK2/pq5K+OLZXoC8/nKHATFaX4Qw7KHAAOxuPKhSM",
+ "SzbQlpwz6BFMx68npN3do/1wNoOeg4L2+vzC1+du57ezs12tO4pDSgPMaDsICCJLp+c8Ah95bDK6KaIJ",
+ "CmYOlCOZFV5G2PqrfLfzrQcDOlUvBgT2mA2c6tD8rUbHzGFCarg5TIgdO4fJyxurBJOFe1aouzVS1WjT",
+ "jHps7VMLuJjCGM9R1OAOp3Syu8fxM/BT1k0kpdgqgesnbX6hU1HUXupWudSpGKwnyQhg/BTGFa4UaS52",
+ "2sGR7atE6o0cc3tK0vkcBLN0on3SllwGmZciqhXnrdLUTGmqZnVO+XlmXFufiuGMSuK46trNW+BKlSr1",
+ "lNoW30sw9onjJfLah8aW6TdzU5JUvpnLEvaB+7CVR6oxHXmP36hqJGnDR6tHGGMBgtH9ia5BtJMuUBjG",
+ "jxotfRjch+8h+SwG3WhNYgXSLEPj6dHJ0YkuB6TiefRX2vWrRbnhScViC96WFcT+BToxJEkc5JBXuOlQ",
+ "MZsEAeWfdIpvPTlkL4x4yqkyCzzB6TwMH3rCEe34u/jBIvydHnWiddlRjf9uH9kuBjI7gqUT7dgPzDJU",
+ "XMLXHmwvb5wohqerZGr0/hItvloxx7HAs42ZQjYVfvU1HCMUN2ybKHNv+WYz/pMceu4+KVBDMVOVcYVi",
+ "Ja0DIrCTblfLnnvEnswqU9qipjya8ib747nG+5q30jpWM+dMK57jTqZVPsuaM/5wPJYb+46KFbf2yJJT",
+ "cingS15QzD7ITK2ur/xYScj2aQf2gpa3FcWfOzdMZ4XAQCJRtrs4KEteU4PyW04z1Fxch9kKp0kxuMcq",
+ "EVizGqwN7kV7GSHTJIlWCmAboPfCmSMEsSoUs2J8TLdOw7LnhAYq188QKLZicFjLWy/NW2oU2jqMZaP2",
+ "2XNXMz1wLxhs87pgHhm2sfIiJ2mOy3atHFpJhKJ62MoDo4K4HnPWqIlW5fLoJuXr4qWM95i+dBhPygbl",
+ "8faBnzUlKniBiQ3UD169erAesFkcJhGr+5GBIDfKCArr9BEuO7VpQLYsJNasxSUfldpyXHuoTaxU/6uR",
+ "4JKpiYzOLTKrRtNkQSvlCNpLyTXRsMuRM7xn1m2cUOqAXpdxlQ8IxCTlKYSde0jcOfRM1aEywb/nipQg",
+ "gxUTD71YuiEF3kZ5htrsQm12oS1kF2okmoVswBavWrmT3EosC9+aAzLB/AhyectSTjpMracKtvJur1TA",
+ "jBRXVQGLjn9TCGIYp45/Xa0rIPMk4/Igif3Om07n+evz/wsAAP//ral9pkI1AwA=",
}
// GetSwagger returns the content of the embedded swagger specification file
diff --git a/cmd/hatchet-cli/cli/docs.go b/cmd/hatchet-cli/cli/docs.go
new file mode 100644
index 000000000..740230d3e
--- /dev/null
+++ b/cmd/hatchet-cli/cli/docs.go
@@ -0,0 +1,300 @@
+package cli
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "fmt"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "runtime"
+
+ "github.com/charmbracelet/huh"
+ "github.com/spf13/cobra"
+
+ "github.com/hatchet-dev/hatchet/cmd/hatchet-cli/cli/internal/config/cli"
+ "github.com/hatchet-dev/hatchet/cmd/hatchet-cli/cli/internal/styles"
+)
+
+const defaultMCPURL = "https://docs.hatchet.run/api/mcp"
+const docsBaseURL = "https://docs.hatchet.run"
+
+var mcpURL string
+
+// docsCmd represents the docs command
+var docsCmd = &cobra.Command{
+ Use: "docs",
+ Aliases: []string{"doc"},
+ Short: "Hatchet documentation for AI editors and coding agents",
+ Long: `Hatchet documentation is optimized for LLMs and available as:
+ • MCP server: ` + defaultMCPURL + `
+ • llms.txt: ` + docsBaseURL + `/llms.txt
+ • Full docs: ` + docsBaseURL + `/llms-full.txt
+
+Use "hatchet docs install" to configure your AI editor.`,
+ Example: ` # Interactive — pick your editor
+ hatchet docs install
+
+ # Configure for Cursor
+ hatchet docs install cursor
+
+ # Configure for Claude Code
+ hatchet docs install claude-code
+
+ # Use a custom MCP URL (self-hosted)
+ hatchet docs install cursor --url https://my-hatchet.example.com/api/mcp`,
+ Run: func(cmd *cobra.Command, args []string) {
+ printAllOptions()
+ },
+}
+
+// docsInstallCmd represents the docs install command
+var docsInstallCmd = &cobra.Command{
+ Use: "install",
+ Short: "Install Hatchet docs into an AI editor",
+ Long: `Configure Hatchet documentation as an MCP (Model Context Protocol) server
+for AI editors like Cursor and Claude Code.`,
+ Example: ` # Interactive — pick your editor
+ hatchet docs install
+
+ # Configure for Cursor
+ hatchet docs install cursor
+
+ # Configure for Claude Code
+ hatchet docs install claude-code`,
+ Run: func(cmd *cobra.Command, args []string) {
+ // Interactive mode: let user pick their editor
+ var editor string
+ form := huh.NewForm(
+ huh.NewGroup(
+ huh.NewSelect[string]().
+ Title("Which AI editor do you want to configure?").
+ Options(
+ huh.NewOption("Cursor", "cursor"),
+ huh.NewOption("Claude Code", "claude-code"),
+ ).
+ Value(&editor),
+ ),
+ ).WithTheme(styles.HatchetTheme())
+
+ err := form.Run()
+ if err != nil {
+ cli.Logger.Fatalf("could not run AI editor selection form: %v", err)
+ }
+
+ switch editor {
+ case "cursor":
+ runDocsCursor()
+ case "claude-code":
+ runDocsClaudeCode()
+ }
+ },
+}
+
+// ---------------------------------------------------------------------------
+// Subcommands of `docs install`
+// ---------------------------------------------------------------------------
+
+var docsInstallCursorCmd = &cobra.Command{
+ Use: "cursor",
+ Short: "Configure Hatchet docs for Cursor IDE",
+ Long: `Set up Hatchet documentation as an MCP server in Cursor.
+
+This creates a .cursor/rules/hatchet-docs.mdc file in your project that
+configures the Hatchet MCP docs server, and prints the one-click deeplink.`,
+ Run: func(cmd *cobra.Command, args []string) {
+ runDocsCursor()
+ },
+}
+
+var docsInstallClaudeCodeCmd = &cobra.Command{
+ Use: "claude-code",
+ Short: "Configure Hatchet docs for Claude Code",
+ Long: `Set up Hatchet documentation as an MCP server in Claude Code.`,
+ Run: func(cmd *cobra.Command, args []string) {
+ runDocsClaudeCode()
+ },
+}
+
+func init() {
+ rootCmd.AddCommand(docsCmd)
+ docsCmd.AddCommand(docsInstallCmd)
+ docsInstallCmd.AddCommand(docsInstallCursorCmd)
+ docsInstallCmd.AddCommand(docsInstallClaudeCodeCmd)
+
+ // Add --url flag to install and its subcommands
+ for _, cmd := range []*cobra.Command{docsInstallCmd, docsInstallCursorCmd, docsInstallClaudeCodeCmd} {
+ cmd.Flags().StringVar(&mcpURL, "url", "", "Custom MCP server URL (default: "+defaultMCPURL+")")
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Implementation
+// ---------------------------------------------------------------------------
+
+func runDocsCursor() {
+ url := getMCPURL()
+
+ fmt.Println(styles.Title("Hatchet Docs → Cursor"))
+ fmt.Println()
+
+ // 1. Write .cursor/rules/hatchet-docs.mdc
+ rulesDir := filepath.Join(".", ".cursor", "rules")
+ rulesFile := filepath.Join(rulesDir, "hatchet-docs.mdc")
+
+ ruleContent := fmt.Sprintf(`---
+description: Hatchet documentation MCP server
+alwaysApply: true
+---
+
+When working with Hatchet (task queues, workflows, durable execution), use the
+Hatchet MCP docs server for accurate, up-to-date API reference and examples.
+
+MCP server URL: %s
+
+Use the search_docs tool to find relevant documentation pages, or get_full_docs
+for comprehensive context. Documentation covers Python, TypeScript, and Go SDKs.
+`, url)
+
+ if err := os.MkdirAll(rulesDir, 0o755); err == nil {
+ if err := os.WriteFile(rulesFile, []byte(ruleContent), 0o644); err == nil {
+ fmt.Println(styles.SuccessMessage("Created " + rulesFile))
+ } else {
+ fmt.Printf(" ⚠ Could not write %s: %v\n", rulesFile, err)
+ }
+ } else {
+ fmt.Printf(" ⚠ Could not create %s: %v\n", rulesDir, err)
+ }
+
+ // 2. Print the MCP deeplink
+ fmt.Println()
+ deeplink := cursorMCPDeeplink(url)
+ fmt.Println(styles.Section("One-click install"))
+ fmt.Println(styles.InfoMessage("Open this link in your browser to install the MCP server in Cursor:"))
+ fmt.Println()
+ fmt.Println(" " + styles.URL(deeplink))
+ fmt.Println()
+
+ // 3. Offer to open in browser
+ if promptOpenBrowser() {
+ openBrowser(deeplink)
+ }
+}
+
+func runDocsClaudeCode() {
+ url := getMCPURL()
+
+ fmt.Println(styles.Title("Hatchet Docs → Claude Code"))
+ fmt.Println()
+
+ claudeCmd := fmt.Sprintf("claude mcp add --transport http hatchet-docs %s", url)
+
+ // Try to run claude mcp add directly
+ if _, err := exec.LookPath("claude"); err == nil {
+ fmt.Println(styles.InfoMessage("Found claude CLI. Adding MCP server..."))
+ fmt.Println()
+
+ cmd := exec.Command("claude", "mcp", "add", "--transport", "http", "hatchet-docs", url)
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+
+ if err := cmd.Run(); err == nil {
+ fmt.Println()
+ fmt.Println(styles.SuccessMessage("Hatchet docs MCP server added to Claude Code"))
+ return
+ }
+
+ fmt.Printf(" ⚠ Command failed. You can run it manually:\n\n")
+ } else {
+ fmt.Println(styles.InfoMessage("Claude CLI not found on PATH. Run this command manually:"))
+ fmt.Println()
+ }
+
+ fmt.Println(styles.Code.Render(claudeCmd))
+ fmt.Println()
+}
+
+func printAllOptions() {
+ url := getMCPURL()
+
+ fmt.Println(styles.Title("Hatchet Docs for AI Editors"))
+ fmt.Println()
+
+ // MCP Server
+ fmt.Println(styles.Section("MCP Server"))
+ fmt.Println(styles.KeyValue("URL", url))
+ fmt.Println()
+
+ // Cursor
+ fmt.Println(styles.Section("Cursor"))
+ deeplink := cursorMCPDeeplink(url)
+ fmt.Println(styles.KeyValue("Deeplink", deeplink))
+ fmt.Println(styles.KeyValue("Or run", "hatchet docs install cursor"))
+ fmt.Println()
+
+ // Claude Code
+ fmt.Println(styles.Section("Claude Code"))
+ fmt.Println(styles.Code.Render(fmt.Sprintf("claude mcp add --transport http hatchet-docs %s", url)))
+ fmt.Println()
+
+ // llms.txt
+ fmt.Println(styles.Section("LLM-Friendly Docs (llms.txt)"))
+ fmt.Println(styles.KeyValue("Index", docsBaseURL+"/llms.txt"))
+ fmt.Println(styles.KeyValue("Full docs", docsBaseURL+"/llms-full.txt"))
+ fmt.Println()
+}
+
+// ---------------------------------------------------------------------------
+// Helpers
+// ---------------------------------------------------------------------------
+
+func getMCPURL() string {
+ if mcpURL != "" {
+ return mcpURL
+ }
+ return defaultMCPURL
+}
+
+func cursorMCPDeeplink(url string) string {
+ config := map[string]interface{}{
+ "command": "npx",
+ "args": []string{"-y", "mcp-remote", url},
+ }
+ configJSON, _ := json.Marshal(config)
+ encoded := base64.StdEncoding.EncodeToString(configJSON)
+ return fmt.Sprintf("cursor://anysphere.cursor-deeplink/mcp/install?name=hatchet-docs&config=%s", encoded)
+}
+
+func promptOpenBrowser() bool {
+ var open bool
+ form := huh.NewForm(
+ huh.NewGroup(
+ huh.NewConfirm().
+ Title("Open in browser?").
+ Value(&open),
+ ),
+ ).WithTheme(styles.HatchetTheme())
+
+ if err := form.Run(); err != nil {
+ return false
+ }
+ return open
+}
+
+func openBrowser(url string) {
+ var cmd *exec.Cmd
+
+ switch runtime.GOOS {
+ case "darwin":
+ cmd = exec.Command("open", url)
+ case "windows":
+ cmd = exec.Command("rundll32", "url.dll,FileProtocolHandler", url)
+ default:
+ cmd = exec.Command("xdg-open", url)
+ }
+
+ if err := cmd.Start(); err != nil {
+ fmt.Printf(" ⚠ Could not open browser: %v\n", err)
+ fmt.Println(" Copy the link above and paste it in your browser.")
+ }
+}
diff --git a/cmd/hatchet-cli/cli/tui/run_details.go b/cmd/hatchet-cli/cli/tui/run_details.go
index da307b7fe..03c6267ac 100644
--- a/cmd/hatchet-cli/cli/tui/run_details.go
+++ b/cmd/hatchet-cli/cli/tui/run_details.go
@@ -860,6 +860,45 @@ func (v *RunDetailsView) fetchWorkflowRun() tea.Cmd {
}
}
+// calculateAvailableDAGHeight dynamically calculates how much vertical space is available for the DAG
+// by measuring the actual rendered chrome elements
+func (v *RunDetailsView) calculateAvailableDAGHeight() int {
+ if v.Height == 0 {
+ return 10 // Fallback minimum
+ }
+
+ title := "Run Details"
+ if v.details != nil {
+ if v.details.Run.DisplayName != "" {
+ title = fmt.Sprintf("Run Details: %s", v.details.Run.DisplayName)
+ } else if len(v.details.Tasks) > 0 && v.details.Tasks[0].WorkflowName != nil && *v.details.Tasks[0].WorkflowName != "" {
+ title = fmt.Sprintf("Run Details: %s", *v.details.Tasks[0].WorkflowName)
+ }
+ }
+ header := RenderHeader(title, v.Ctx.ProfileName, v.Width)
+ headerHeight := lipgloss.Height(header) + 2 // +2 for spacing after header
+
+ statusSection := v.renderStatusSection()
+ statusHeight := lipgloss.Height(statusSection) + 2 // +2 for spacing after status
+
+ tabs := v.renderTabs()
+ tabsHeight := lipgloss.Height(tabs) + 2 // +2 for spacing after tabs
+
+ footerHeight := 3 // Footer typically has border + padding + content
+
+ // Account for DAG border and padding (from renderDAG style)
+ // Border: 2 lines (top + bottom), Padding(1, 2): 2 lines (top + bottom)
+ dagBorderPadding := 4
+
+ usedHeight := headerHeight + statusHeight + tabsHeight + footerHeight + dagBorderPadding
+ availableHeight := max(v.Height-usedHeight, 10) // Ensure minimum height of 10
+
+ v.debugLogger.Log("DAG height calculation: total=%d, header=%d, status=%d, tabs=%d, footer=%d, border=%d, available=%d",
+ v.Height, headerHeight, statusHeight, tabsHeight, footerHeight, dagBorderPadding, availableHeight)
+
+ return availableHeight
+}
+
// buildDAG builds and renders the DAG visualization
func (v *RunDetailsView) buildDAG() {
if v.details == nil || len(v.details.Shape) == 0 {
@@ -869,8 +908,7 @@ func (v *RunDetailsView) buildDAG() {
return
}
- // Reserve space for DAG (height of border box minus padding/chrome)
- dagHeight := 10 // Approximate height for DAG area
+ dagHeight := v.calculateAvailableDAGHeight()
dagWidth := v.Width - 10 // Account for border and padding
v.debugLogger.Log("Building DAG graph: nodes=%d, dagWidth=%d, dagHeight=%d", len(v.details.Shape), dagWidth, dagHeight)
@@ -960,16 +998,10 @@ func (v *RunDetailsView) navigateDAG(direction string) {
switch direction {
case "left":
// Move to previous node in visual order
- newIndex = currentIndex - 1
- if newIndex < 0 {
- newIndex = 0
- }
+ newIndex = max(currentIndex-1, 0)
case "right":
// Move to next node in visual order
- newIndex = currentIndex + 1
- if newIndex >= len(navigableNodes) {
- newIndex = len(navigableNodes) - 1
- }
+ newIndex = min(currentIndex+1, len(navigableNodes)-1)
default:
return
}
@@ -1085,18 +1117,18 @@ func (v *RunDetailsView) exportDAGData() (string, error) {
b.WriteString(separator)
b.WriteString("\n\n")
- b.WriteString(fmt.Sprintf("Nodes: %d\n", v.dagGraph.NodeCount()))
- b.WriteString(fmt.Sprintf("Edges: %d\n", v.dagGraph.EdgeCount()))
- b.WriteString(fmt.Sprintf("Components: %d\n", v.dagGraph.ComponentCount()))
- b.WriteString(fmt.Sprintf("Actual Width: %d\n", v.dagGraph.ActualWidth))
- b.WriteString(fmt.Sprintf("Actual Height: %d\n", v.dagGraph.ActualHeight))
+ fmt.Fprintf(&b, "Nodes: %d\n", v.dagGraph.NodeCount())
+ fmt.Fprintf(&b, "Edges: %d\n", v.dagGraph.EdgeCount())
+ fmt.Fprintf(&b, "Components: %d\n", v.dagGraph.ComponentCount())
+ fmt.Fprintf(&b, "Actual Width: %d\n", v.dagGraph.ActualWidth)
+ fmt.Fprintf(&b, "Actual Height: %d\n", v.dagGraph.ActualHeight)
b.WriteString("\n")
stats := v.dagGraph.GetComponentStats()
- b.WriteString(fmt.Sprintf("Total Components: %d\n", stats.TotalComponents))
- b.WriteString(fmt.Sprintf("Largest Component: %d nodes\n", stats.LargestComponent))
- b.WriteString(fmt.Sprintf("Smallest Component: %d nodes\n", stats.SmallestComponent))
- b.WriteString(fmt.Sprintf("Isolated Nodes: %d\n", stats.IsolatedNodes))
+ fmt.Fprintf(&b, "Total Components: %d\n", stats.TotalComponents)
+ fmt.Fprintf(&b, "Largest Component: %d nodes\n", stats.LargestComponent)
+ fmt.Fprintf(&b, "Smallest Component: %d nodes\n", stats.SmallestComponent)
+ fmt.Fprintf(&b, "Isolated Nodes: %d\n", stats.IsolatedNodes)
b.WriteString("\n")
}
diff --git a/cmd/hatchet-migrate/migrate/migrations/20260215120000_v1_0_77.sql b/cmd/hatchet-migrate/migrate/migrations/20260215120000_v1_0_77.sql
new file mode 100644
index 000000000..99598dead
--- /dev/null
+++ b/cmd/hatchet-migrate/migrate/migrations/20260215120000_v1_0_77.sql
@@ -0,0 +1,7 @@
+-- +goose Up
+-- Add RUBY to WorkerSDKS enum
+ALTER TYPE "WorkerSDKS" ADD VALUE IF NOT EXISTS 'RUBY';
+
+-- +goose Down
+-- NOTE: Postgres does not support removing enum values.
+-- A full enum recreation would be needed to revert this.
diff --git a/docs/development/updating-configuration.md b/docs/development/updating-configuration.md
new file mode 100644
index 000000000..ce4e71dfa
--- /dev/null
+++ b/docs/development/updating-configuration.md
@@ -0,0 +1,22 @@
+# Updating Configuration
+
+Modifications to Hatchet's configuration should be reflected in the appropriate [`pkg/config`](../pkg/config) package and wired in [`pkg/config/loader/loader.go`](../../pkg/config/loader/loader.go).
+```go
+type ServerConfig struct {
+ RequestTimeout time.Duration `mapstructure:"request_timeout"`
+}
+```
+
+To ensure configuration is loadable via environment variables, add the corresponding `BindEnv` call in `BindAllEnv()`.
+```go
+func BindAllEnv(v *viper.Viper) {
+ v.BindEnv("request_timeout", "HATCHET_REQUEST_TIMEOUT")
+}
+```
+
+Finally, document the new environment variable in [`frontend/docs/pages/self-hosting/configuration-options.mdx`](frontend/docs/pages/self-hosting/configuration-options.mdx) and any other relevant documentation.
+```markdown
+| Variable | Description | Default Value |
+| ------------------------- | ---------------------------- | ------------- |
+| `HATCHET_REQUEST_TIMEOUT` | Duration of request timeouts | `5s` |
+```
diff --git a/examples/go/child-workflows/main.go b/examples/go/child-workflows/main.go
index 4c6f90082..7fc17927f 100644
--- a/examples/go/child-workflows/main.go
+++ b/examples/go/child-workflows/main.go
@@ -75,7 +75,6 @@ func Child(client *hatchet.Client) *hatchet.StandaloneTask {
)
}
-
func main() {
client, err := hatchet.NewClient()
if err != nil {
@@ -127,7 +126,6 @@ func main() {
return err
}
-
_ = childResult
n := 5
diff --git a/examples/go/migration-guides/mergent.go b/examples/go/migration-guides/mergent.go
index 777d988c2..54bef3077 100644
--- a/examples/go/migration-guides/mergent.go
+++ b/examples/go/migration-guides/mergent.go
@@ -45,7 +45,6 @@ func ProcessImageMergent(req MergentRequest) (*MergentResponse, error) {
}, nil
}
-
// > After (Hatchet)
type ImageProcessInput struct {
ImageURL string `json:"image_url"`
diff --git a/examples/go/on-event/main.go b/examples/go/on-event/main.go
index e4605b1d5..73ade3c21 100644
--- a/examples/go/on-event/main.go
+++ b/examples/go/on-event/main.go
@@ -40,7 +40,6 @@ func Lower(client *hatchet.Client) *hatchet.StandaloneTask {
)
}
-
// > Accessing the filter payload
func accessFilterPayload(ctx hatchet.Context, input EventInput) (*LowerTaskOutput, error) {
fmt.Println(ctx.FilterPayload())
@@ -49,7 +48,6 @@ func accessFilterPayload(ctx hatchet.Context, input EventInput) (*LowerTaskOutpu
}, nil
}
-
// > Declare with filter
func LowerWithFilter(client *hatchet.Client) *hatchet.StandaloneTask {
return client.NewStandaloneTask(
@@ -66,7 +64,6 @@ func LowerWithFilter(client *hatchet.Client) *hatchet.StandaloneTask {
)
}
-
func Upper(client *hatchet.Client) *hatchet.StandaloneTask {
return client.NewStandaloneTask(
"upper", func(ctx hatchet.Context, input EventInput) (*UpperTaskOutput, error) {
diff --git a/examples/go/sticky-workers/main.go b/examples/go/sticky-workers/main.go
index def4bfe80..f34306d70 100644
--- a/examples/go/sticky-workers/main.go
+++ b/examples/go/sticky-workers/main.go
@@ -48,7 +48,6 @@ func StickyDag(client *hatchet.Client) *hatchet.Workflow {
return stickyDag
}
-
type ChildInput struct {
N int `json:"n"`
}
@@ -91,4 +90,3 @@ func Sticky(client *hatchet.Client) *hatchet.StandaloneTask {
return sticky
}
-
diff --git a/examples/go/streaming/consumer/main.go b/examples/go/streaming/consumer/main.go
index 59d120e44..358a10c1d 100644
--- a/examples/go/streaming/consumer/main.go
+++ b/examples/go/streaming/consumer/main.go
@@ -34,4 +34,3 @@ func main() {
fmt.Println("\nStreaming completed!")
}
-
diff --git a/examples/go/streaming/server/main.go b/examples/go/streaming/server/main.go
index 4438a5f19..c527918e4 100644
--- a/examples/go/streaming/server/main.go
+++ b/examples/go/streaming/server/main.go
@@ -54,4 +54,3 @@ func main() {
log.Println("Failed to start server:", err)
}
}
-
diff --git a/examples/go/streaming/shared/task.go b/examples/go/streaming/shared/task.go
index 8b6070a47..adce03d0e 100644
--- a/examples/go/streaming/shared/task.go
+++ b/examples/go/streaming/shared/task.go
@@ -46,7 +46,6 @@ func StreamTask(ctx hatchet.Context, input StreamTaskInput) (*StreamTaskOutput,
}, nil
}
-
func StreamingWorkflow(client *hatchet.Client) *hatchet.StandaloneTask {
return client.NewStandaloneTask("stream-example", StreamTask)
}
diff --git a/examples/go/webhooks/main.go b/examples/go/webhooks/main.go
index a7f8d077e..a9996e51a 100644
--- a/examples/go/webhooks/main.go
+++ b/examples/go/webhooks/main.go
@@ -59,8 +59,9 @@ func main() {
// Update the webhook
fmt.Println("\nUpdating webhook...")
+ eventKeyExpr := "body.type"
updated, err := client.Webhooks().Update(ctx, basicWebhook.Name, features.UpdateWebhookOpts{
- EventKeyExpression: "body.type",
+ EventKeyExpression: &eventKeyExpr,
})
if err != nil {
log.Fatalf("failed to update webhook: %v", err)
diff --git a/examples/python/quickstart/poetry.lock b/examples/python/quickstart/poetry.lock
index c5d57ba76..2d8d7c34b 100644
--- a/examples/python/quickstart/poetry.lock
+++ b/examples/python/quickstart/poetry.lock
@@ -473,14 +473,14 @@ setuptools = "*"
[[package]]
name = "hatchet-sdk"
-version = "1.23.2"
+version = "1.24.0"
description = "This is the official Python SDK for Hatchet, a distributed, fault-tolerant task queue. The SDK allows you to easily integrate Hatchet's task scheduling and workflow orchestration capabilities into your Python applications."
optional = false
python-versions = "<4.0,>=3.10"
groups = ["main"]
files = [
- {file = "hatchet_sdk-1.23.2-py3-none-any.whl", hash = "sha256:95aa0f330527fa0a64adb1d9c758ae2161beb159b0ad54665eb0f1018c2d880f"},
- {file = "hatchet_sdk-1.23.2.tar.gz", hash = "sha256:54e9120341ad464c1bb57db76fb29c06ff77df5afd935b22e4bd8c3586ed9f93"},
+ {file = "hatchet_sdk-1.24.0-py3-none-any.whl", hash = "sha256:6719947bcf3ee954966f5c403f3217b05f3a8829a54eddc3a12c982863d53c4c"},
+ {file = "hatchet_sdk-1.24.0.tar.gz", hash = "sha256:e39bdb4e7013e98f5354dba046cfe14f9284bf835a2f0ca67613efadcac3e180"},
]
[package.dependencies]
@@ -1125,4 +1125,4 @@ propcache = ">=0.2.0"
[metadata]
lock-version = "2.1"
python-versions = "^3.10"
-content-hash = "b1e5494e65f47bb499caeffe3e70a906f1b1da98fd8e15182fe863138eada31f"
+content-hash = "665009b2127a5e046ab48cb29fac59dd00bf17ab45f53a8c897bf8bf62d6bc57"
diff --git a/examples/python/quickstart/pyproject.toml b/examples/python/quickstart/pyproject.toml
index 7419af637..acf7292af 100644
--- a/examples/python/quickstart/pyproject.toml
+++ b/examples/python/quickstart/pyproject.toml
@@ -8,7 +8,7 @@ package-mode = false
[tool.poetry.dependencies]
python = "^3.10"
-hatchet-sdk = "1.23.2"
+hatchet-sdk = "1.24.0"
[build-system]
diff --git a/examples/python/return_exceptions/worker.py b/examples/python/return_exceptions/worker.py
index 10f5db8fa..fdae2f085 100644
--- a/examples/python/return_exceptions/worker.py
+++ b/examples/python/return_exceptions/worker.py
@@ -1,5 +1,3 @@
-from pydantic import BaseModel
-
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
@@ -15,3 +13,34 @@ async def return_exceptions_task(input: Input, ctx: Context) -> dict[str, str]:
raise ValueError(f"error in task with index {input.index}")
return {"message": "this is a successful task."}
+
+
+exception_parsing_workflow = hatchet.workflow(name="ExceptionParsingWorkflow")
+
+
+@exception_parsing_workflow.task()
+async def exception_class_no_name_task(input: EmptyModel, ctx: Context) -> None:
+ class CustomNoNamedException(Exception): ...
+
+ CustomNoNamedException.__name__ = ""
+ raise CustomNoNamedException
+
+
+@exception_parsing_workflow.task()
+async def exception_class_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_no_args_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError()
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_falsy_arg_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError("")
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_truthy_arg_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError("Oh no!")
diff --git a/examples/python/worker.py b/examples/python/worker.py
index 1a2cd8e9a..6265e26f1 100644
--- a/examples/python/worker.py
+++ b/examples/python/worker.py
@@ -39,7 +39,10 @@ from examples.lifespans.simple import lifespan, lifespan_task
from examples.logger.workflow import logging_workflow
from examples.non_retryable.worker import non_retryable_workflow
from examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details
-from examples.return_exceptions.worker import return_exceptions_task
+from examples.return_exceptions.worker import (
+ exception_parsing_workflow,
+ return_exceptions_task,
+)
from examples.run_details.worker import run_detail_test_workflow
from examples.serde.worker import serde_workflow
from examples.simple.worker import simple, simple_durable
@@ -98,6 +101,7 @@ def main() -> None:
webhook_with_scope,
webhook_with_static_payload,
return_exceptions_task,
+ exception_parsing_workflow,
wait_for_sleep_twice,
async_task_with_dependencies,
sync_task_with_dependencies,
diff --git a/examples/ruby/affinity_workers/worker.rb b/examples/ruby/affinity_workers/worker.rb
new file mode 100644
index 000000000..abdaa896c
--- /dev/null
+++ b/examples/ruby/affinity_workers/worker.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > AffinityWorkflow
+AFFINITY_WORKER_WORKFLOW = HATCHET.workflow(name: "AffinityWorkflow")
+
+
+# > AffinityTask
+AFFINITY_WORKER_WORKFLOW.task(
+ :step,
+ desired_worker_labels: {
+ "model" => Hatchet::DesiredWorkerLabel.new(value: "fancy-ai-model-v2", weight: 10),
+ "memory" => Hatchet::DesiredWorkerLabel.new(
+ value: 256,
+ required: true,
+ comparator: :less_than
+ )
+ }
+) do |input, ctx|
+ if ctx.worker.labels["model"] != "fancy-ai-model-v2"
+ ctx.worker.upsert_labels("model" => "unset")
+ # DO WORK TO EVICT OLD MODEL / LOAD NEW MODEL
+ ctx.worker.upsert_labels("model" => "fancy-ai-model-v2")
+ end
+
+ { "worker" => ctx.worker.id }
+end
+
+
+# > AffinityWorker
+def main
+ worker = HATCHET.worker(
+ "affinity-worker",
+ slots: 10,
+ labels: {
+ "model" => "fancy-ai-model-v2",
+ "memory" => 512
+ },
+ workflows: [AFFINITY_WORKER_WORKFLOW]
+ )
+ worker.start
+end
+
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/api/worker.rb b/examples/ruby/api/worker.rb
new file mode 100644
index 000000000..5d6336310
--- /dev/null
+++ b/examples/ruby/api/worker.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+def main
+ workflow_list = HATCHET.workflows.list
+ rows = workflow_list.rows || []
+
+ rows.each do |workflow|
+ puts workflow.name
+ puts workflow.metadata.id
+ puts workflow.metadata.created_at
+ puts workflow.metadata.updated_at
+ end
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/bulk_fanout/worker.rb b/examples/ruby/bulk_fanout/worker.rb
new file mode 100644
index 000000000..d7cc5209d
--- /dev/null
+++ b/examples/ruby/bulk_fanout/worker.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > BulkFanoutParent
+BULK_PARENT_WF = HATCHET.workflow(name: "BulkFanoutParent")
+BULK_CHILD_WF = HATCHET.workflow(name: "BulkFanoutChild")
+
+BULK_PARENT_WF.task(:spawn, execution_timeout: 300) do |input, ctx|
+ n = input["n"] || 100
+
+ # Create each workflow run to spawn
+ child_workflow_runs = n.times.map do |i|
+ BULK_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s },
+ key: "child#{i}",
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "hello" => "earth" }
+ )
+ )
+ end
+
+ # Run workflows in bulk to improve performance
+ spawn_results = BULK_CHILD_WF.run_many(child_workflow_runs)
+
+ { "results" => spawn_results }
+end
+
+BULK_CHILD_WF.task(:process) do |input, ctx|
+ puts "child process #{input['a']}"
+ { "status" => "success #{input['a']}" }
+end
+
+BULK_CHILD_WF.task(:process2) do |input, ctx|
+ puts "child process2"
+ { "status2" => "success" }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "fanout-worker", slots: 40, workflows: [BULK_PARENT_WF, BULK_CHILD_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/bulk_operations/cancel.rb b/examples/ruby/bulk_operations/cancel.rb
new file mode 100644
index 000000000..9b719bc97
--- /dev/null
+++ b/examples/ruby/bulk_operations/cancel.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+# > Setup
+hatchet = Hatchet::Client.new
+
+workflows = hatchet.workflows.list
+
+workflow = workflows.rows.first
+
+# > List runs
+workflow_runs = hatchet.runs.list(workflow_ids: [workflow.metadata.id])
+
+# > Cancel by run ids
+workflow_run_ids = workflow_runs.rows.map { |run| run.metadata.id }
+
+hatchet.runs.bulk_cancel(ids: workflow_run_ids)
+
+# > Cancel by filters
+hatchet.runs.bulk_cancel(
+ since: Time.now - 86_400,
+ until_time: Time.now,
+ statuses: ["RUNNING"],
+ workflow_ids: [workflow.metadata.id],
+ additional_metadata: { "key" => "value" }
+)
diff --git a/examples/ruby/bulk_operations/worker.rb b/examples/ruby/bulk_operations/worker.rb
new file mode 100644
index 000000000..1459f41d6
--- /dev/null
+++ b/examples/ruby/bulk_operations/worker.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+BULK_REPLAY_TEST_1 = HATCHET.task(name: "bulk_replay_test_1") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+BULK_REPLAY_TEST_2 = HATCHET.task(name: "bulk_replay_test_2") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+BULK_REPLAY_TEST_3 = HATCHET.task(name: "bulk_replay_test_3") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+def main
+ worker = HATCHET.worker(
+ "bulk-replay-test-worker",
+ workflows: [BULK_REPLAY_TEST_1, BULK_REPLAY_TEST_2, BULK_REPLAY_TEST_3]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/cancellation/worker.rb b/examples/ruby/cancellation/worker.rb
new file mode 100644
index 000000000..4bf57d417
--- /dev/null
+++ b/examples/ruby/cancellation/worker.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CANCELLATION_WORKFLOW = HATCHET.workflow(name: "CancelWorkflow")
+
+# > Self-cancelling task
+CANCELLATION_WORKFLOW.task(:self_cancel) do |input, ctx|
+ sleep 2
+
+ ## Cancel the task
+ ctx.cancel
+
+ sleep 10
+
+ { "error" => "Task should have been cancelled" }
+end
+
+
+# > Checking exit flag
+CANCELLATION_WORKFLOW.task(:check_flag) do |input, ctx|
+ 3.times do
+ sleep 1
+
+ # Note: Checking the status of the exit flag is mostly useful for cancelling
+ # sync tasks without needing to forcibly kill the thread they're running on.
+ if ctx.cancelled?
+ puts "Task has been cancelled"
+ raise "Task has been cancelled"
+ end
+ end
+
+ { "error" => "Task should have been cancelled" }
+end
+
+
+def main
+ worker = HATCHET.worker("cancellation-worker", workflows: [CANCELLATION_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/child/bulk.rb b/examples/ruby/child/bulk.rb
new file mode 100644
index 000000000..491b67bad
--- /dev/null
+++ b/examples/ruby/child/bulk.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Bulk run a task
+greetings = ["Hello, World!", "Hello, Moon!", "Hello, Mars!"]
+
+results = CHILD_TASK_WF.run_many(
+ greetings.map do |greeting|
+ CHILD_TASK_WF.create_bulk_run_item(
+ input: { "message" => greeting }
+ )
+ end
+)
+
+puts results
diff --git a/examples/ruby/child/simple_fanout.rb b/examples/ruby/child/simple_fanout.rb
new file mode 100644
index 000000000..4477bfd41
--- /dev/null
+++ b/examples/ruby/child/simple_fanout.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+require_relative "worker"
+
+hatchet = Hatchet::Client.new
+
+# > Running a task from within a task
+SPAWN_TASK = hatchet.task(name: "SpawnTask") do |input, ctx|
+ result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+ { "results" => result }
+end
diff --git a/examples/ruby/child/trigger.rb b/examples/ruby/child/trigger.rb
new file mode 100644
index 000000000..48efd7559
--- /dev/null
+++ b/examples/ruby/child/trigger.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Running a task
+result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+
+# > Running a task aio
+# In Ruby, run is synchronous
+result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+
+# > Running multiple tasks
+results = CHILD_TASK_WF.run_many(
+ [
+ CHILD_TASK_WF.create_bulk_run_item(input: { "message" => "Hello, World!" }),
+ CHILD_TASK_WF.create_bulk_run_item(input: { "message" => "Hello, Moon!" })
+ ]
+)
+puts results
diff --git a/examples/ruby/child/worker.rb b/examples/ruby/child/worker.rb
new file mode 100644
index 000000000..24e829aa2
--- /dev/null
+++ b/examples/ruby/child/worker.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+# > Simple
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CHILD_TASK_WF = HATCHET.workflow(name: "SimpleWorkflow")
+
+CHILD_TASK_WF.task(:step1) do |input, ctx|
+ puts "executed step1: #{input['message']}"
+ { "transformed_message" => input["message"].upcase }
+end
+
+
+def main
+ worker = HATCHET.worker("test-worker", slots: 1, workflows: [CHILD_TASK_WF])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/concurrency_cancel_in_progress/worker.rb b/examples/ruby/concurrency_cancel_in_progress/worker.rb
new file mode 100644
index 000000000..e66f5f30f
--- /dev/null
+++ b/examples/ruby/concurrency_cancel_in_progress/worker.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyCancelInProgress",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :cancel_in_progress
+ )
+)
+
+STEP1_CIP = CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW.task(:step1) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
+
+CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW.task(:step2, parents: [STEP1_CIP]) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
diff --git a/examples/ruby/concurrency_cancel_newest/worker.rb b/examples/ruby/concurrency_cancel_newest/worker.rb
new file mode 100644
index 000000000..daa1749b4
--- /dev/null
+++ b/examples/ruby/concurrency_cancel_newest/worker.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CONCURRENCY_CANCEL_NEWEST_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyCancelNewest",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :cancel_newest
+ )
+)
+
+STEP1_CN = CONCURRENCY_CANCEL_NEWEST_WORKFLOW.task(:step1) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
+
+CONCURRENCY_CANCEL_NEWEST_WORKFLOW.task(:step2, parents: [STEP1_CN]) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
diff --git a/examples/ruby/concurrency_limit/worker.rb b/examples/ruby/concurrency_limit/worker.rb
new file mode 100644
index 000000000..e4fa44504
--- /dev/null
+++ b/examples/ruby/concurrency_limit/worker.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Workflow
+CONCURRENCY_LIMIT_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyDemoWorkflow",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group_key",
+ max_runs: 5,
+ limit_strategy: :cancel_in_progress
+ )
+)
+
+CONCURRENCY_LIMIT_WORKFLOW.task(:step1) do |input, ctx|
+ sleep 3
+ puts "executed step1"
+ { "run" => input["run"] }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-demo-worker", slots: 10, workflows: [CONCURRENCY_LIMIT_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/concurrency_limit_rr/worker.rb b/examples/ruby/concurrency_limit_rr/worker.rb
new file mode 100644
index 000000000..f81d98e7a
--- /dev/null
+++ b/examples/ruby/concurrency_limit_rr/worker.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Concurrency Strategy With Key
+CONCURRENCY_LIMIT_RR_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyDemoWorkflowRR",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :group_round_robin
+ )
+)
+
+CONCURRENCY_LIMIT_RR_WORKFLOW.task(:step1) do |input, ctx|
+ puts "starting step1"
+ sleep 2
+ puts "finished step1"
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-demo-worker-rr",
+ slots: 10,
+ workflows: [CONCURRENCY_LIMIT_RR_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/concurrency_multiple_keys/worker.rb b/examples/ruby/concurrency_multiple_keys/worker.rb
new file mode 100644
index 000000000..a53db3f46
--- /dev/null
+++ b/examples/ruby/concurrency_multiple_keys/worker.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SLEEP_TIME_MK = 2
+DIGIT_MAX_RUNS = 8
+NAME_MAX_RUNS = 3
+
+# > Concurrency Strategy With Key
+CONCURRENCY_MULTIPLE_KEYS_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyWorkflowManyKeys"
+)
+
+CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.task(
+ :concurrency_task,
+ concurrency: [
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.digit",
+ max_runs: DIGIT_MAX_RUNS,
+ limit_strategy: :group_round_robin
+ ),
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.name",
+ max_runs: NAME_MAX_RUNS,
+ limit_strategy: :group_round_robin
+ )
+ ]
+) do |input, ctx|
+ sleep SLEEP_TIME_MK
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-worker-multiple-keys",
+ slots: 10,
+ workflows: [CONCURRENCY_MULTIPLE_KEYS_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/concurrency_workflow_level/worker.rb b/examples/ruby/concurrency_workflow_level/worker.rb
new file mode 100644
index 000000000..24caf5d17
--- /dev/null
+++ b/examples/ruby/concurrency_workflow_level/worker.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SLEEP_TIME_WL = 2
+DIGIT_MAX_RUNS_WL = 8
+NAME_MAX_RUNS_WL = 3
+
+# > Multiple Concurrency Keys
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyWorkflowLevel",
+ concurrency: [
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.digit",
+ max_runs: DIGIT_MAX_RUNS_WL,
+ limit_strategy: :group_round_robin
+ ),
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.name",
+ max_runs: NAME_MAX_RUNS_WL,
+ limit_strategy: :group_round_robin
+ )
+ ]
+)
+
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.task(:task_1) do |input, ctx|
+ sleep SLEEP_TIME_WL
+end
+
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.task(:task_2) do |input, ctx|
+ sleep SLEEP_TIME_WL
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-worker-workflow-level",
+ slots: 10,
+ workflows: [CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/conditions/worker.rb b/examples/ruby/conditions/worker.rb
new file mode 100644
index 000000000..4c33bf5e5
--- /dev/null
+++ b/examples/ruby/conditions/worker.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+# > Create a workflow
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+TASK_CONDITION_WORKFLOW = HATCHET.workflow(name: "TaskConditionWorkflow")
+
+
+# > Add base task
+COND_START = TASK_CONDITION_WORKFLOW.task(:start) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add wait for sleep
+WAIT_FOR_SLEEP = TASK_CONDITION_WORKFLOW.task(
+ :wait_for_sleep,
+ parents: [COND_START],
+ wait_for: [Hatchet::SleepCondition.new(10)]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add skip condition override
+TASK_CONDITION_WORKFLOW.task(
+ :skip_with_multiple_parents,
+ parents: [COND_START, WAIT_FOR_SLEEP],
+ skip_if: [Hatchet::ParentCondition.new(parent: COND_START, expression: "output.random_number > 0")]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add skip on event
+SKIP_ON_EVENT = TASK_CONDITION_WORKFLOW.task(
+ :skip_on_event,
+ parents: [COND_START],
+ wait_for: [Hatchet::SleepCondition.new(30)],
+ skip_if: [Hatchet::UserEventCondition.new(event_key: "skip_on_event:skip")]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add branching
+LEFT_BRANCH = TASK_CONDITION_WORKFLOW.task(
+ :left_branch,
+ parents: [WAIT_FOR_SLEEP],
+ skip_if: [
+ Hatchet::ParentCondition.new(
+ parent: WAIT_FOR_SLEEP,
+ expression: "output.random_number > 50"
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+RIGHT_BRANCH = TASK_CONDITION_WORKFLOW.task(
+ :right_branch,
+ parents: [WAIT_FOR_SLEEP],
+ skip_if: [
+ Hatchet::ParentCondition.new(
+ parent: WAIT_FOR_SLEEP,
+ expression: "output.random_number <= 50"
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add wait for event
+WAIT_FOR_EVENT = TASK_CONDITION_WORKFLOW.task(
+ :wait_for_event,
+ parents: [COND_START],
+ wait_for: [
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(60),
+ Hatchet::UserEventCondition.new(event_key: "wait_for_event:start")
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Add sum
+TASK_CONDITION_WORKFLOW.task(
+ :sum,
+ parents: [COND_START, WAIT_FOR_SLEEP, WAIT_FOR_EVENT, SKIP_ON_EVENT, LEFT_BRANCH, RIGHT_BRANCH]
+) do |input, ctx|
+ one = ctx.task_output(COND_START)["random_number"]
+ two = ctx.task_output(WAIT_FOR_EVENT)["random_number"]
+ three = ctx.task_output(WAIT_FOR_SLEEP)["random_number"]
+ four = ctx.was_skipped?(SKIP_ON_EVENT) ? 0 : ctx.task_output(SKIP_ON_EVENT)["random_number"]
+ five = ctx.was_skipped?(LEFT_BRANCH) ? 0 : ctx.task_output(LEFT_BRANCH)["random_number"]
+ six = ctx.was_skipped?(RIGHT_BRANCH) ? 0 : ctx.task_output(RIGHT_BRANCH)["random_number"]
+
+ { "sum" => one + two + three + four + five + six }
+end
+
+
+def main
+ worker = HATCHET.worker("dag-worker", workflows: [TASK_CONDITION_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/cron/programatic_sync.rb b/examples/ruby/cron/programatic_sync.rb
new file mode 100644
index 000000000..4164bae86
--- /dev/null
+++ b/examples/ruby/cron/programatic_sync.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+dynamic_cron_workflow = hatchet.workflow(name: "DynamicCronWorkflow")
+
+# > Create
+cron_trigger = dynamic_cron_workflow.create_cron(
+ "customer-a-daily-report",
+ "0 12 * * *",
+ input: { "name" => "John Doe" }
+)
+
+id = cron_trigger.metadata.id
+
+# > List
+cron_triggers = hatchet.cron.list
+
+# > Delete
+hatchet.cron.delete(cron_trigger.metadata.id)
diff --git a/examples/ruby/cron/worker.rb b/examples/ruby/cron/worker.rb
new file mode 100644
index 000000000..d1eb05da7
--- /dev/null
+++ b/examples/ruby/cron/worker.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Cron Workflow Definition
+CRON_WORKFLOW = HATCHET.workflow(
+ name: "CronWorkflow",
+ on_crons: ["*/5 * * * *"]
+)
+
+CRON_WORKFLOW.task(:cron_task) do |input, ctx|
+ puts "Cron task executed at #{Time.now}"
+ { "status" => "success" }
+end
+
+
+# > Programmatic Cron Creation
+def create_cron
+ HATCHET.cron.create(
+ workflow_name: "CronWorkflow",
+ cron_name: "my-programmatic-cron",
+ expression: "*/10 * * * *",
+ input: { "message" => "hello from cron" }
+ )
+end
+
+
+def main
+ worker = HATCHET.worker("cron-worker", workflows: [CRON_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/dag/trigger.rb b/examples/ruby/dag/trigger.rb
new file mode 100644
index 000000000..d23641a14
--- /dev/null
+++ b/examples/ruby/dag/trigger.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Trigger the DAG
+result = DAG_WORKFLOW.run
+puts result
diff --git a/examples/ruby/dag/worker.rb b/examples/ruby/dag/worker.rb
new file mode 100644
index 000000000..ededf9b77
--- /dev/null
+++ b/examples/ruby/dag/worker.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Define a DAG
+DAG_WORKFLOW = HATCHET.workflow(name: "DAGWorkflow")
+
+
+# > First task
+STEP1 = DAG_WORKFLOW.task(:step1, execution_timeout: 5) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+STEP2 = DAG_WORKFLOW.task(:step2, execution_timeout: 5) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+
+# > Task with parents
+DAG_WORKFLOW.task(:step3, parents: [STEP1, STEP2]) do |input, ctx|
+ one = ctx.task_output(STEP1)["random_number"]
+ two = ctx.task_output(STEP2)["random_number"]
+
+ { "sum" => one + two }
+end
+
+DAG_WORKFLOW.task(:step4, parents: [STEP1, :step3]) do |input, ctx|
+ puts(
+ "executed step4",
+ Time.now.strftime("%H:%M:%S"),
+ input.inspect,
+ ctx.task_output(STEP1).inspect,
+ ctx.task_output(:step3).inspect
+ )
+
+ { "step4" => "step4" }
+end
+
+
+# > Declare a worker
+def main
+ worker = HATCHET.worker("dag-worker", workflows: [DAG_WORKFLOW])
+ worker.start
+end
+
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/dataclasses/worker.rb b/examples/ruby/dataclasses/worker.rb
new file mode 100644
index 000000000..4a05e8908
--- /dev/null
+++ b/examples/ruby/dataclasses/worker.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Task using Struct-based input
+# Ruby equivalent of Python dataclass -- use plain hashes
+SAY_HELLO = HATCHET.task(name: "say_hello") do |input, ctx|
+ { "message" => "Hello, #{input['name']}!" }
+end
+
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SAY_HELLO])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/dedupe/worker.rb b/examples/ruby/dedupe/worker.rb
new file mode 100644
index 000000000..c02e73f9d
--- /dev/null
+++ b/examples/ruby/dedupe/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+DEDUPE_PARENT_WF = HATCHET.workflow(name: "DedupeParent")
+DEDUPE_CHILD_WF = HATCHET.workflow(name: "DedupeChild")
+
+DEDUPE_PARENT_WF.task(:spawn, execution_timeout: 60) do |input, ctx|
+ puts "spawning child"
+
+ results = []
+
+ 2.times do |i|
+ begin
+ results << DEDUPE_CHILD_WF.run(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "dedupe" => "test" },
+ key: "child#{i}"
+ )
+ )
+ rescue Hatchet::DedupeViolationError => e
+ puts "dedupe violation #{e}"
+ next
+ end
+ end
+
+ puts "results #{results}"
+ { "results" => results }
+end
+
+DEDUPE_CHILD_WF.task(:process) do |input, ctx|
+ sleep 3
+ puts "child process"
+ { "status" => "success" }
+end
+
+DEDUPE_CHILD_WF.task(:process2) do |input, ctx|
+ puts "child process2"
+ { "status2" => "success" }
+end
+
+def main
+ worker = HATCHET.worker(
+ "fanout-worker", slots: 100, workflows: [DEDUPE_PARENT_WF, DEDUPE_CHILD_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/delayed/worker.rb b/examples/ruby/delayed/worker.rb
new file mode 100644
index 000000000..48e465a46
--- /dev/null
+++ b/examples/ruby/delayed/worker.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+PRINT_SCHEDULE_WF = HATCHET.workflow(name: "PrintScheduleWorkflow")
+PRINT_PRINTER_WF = HATCHET.workflow(name: "PrintPrinterWorkflow")
+
+PRINT_SCHEDULE_WF.task(:schedule) do |input, ctx|
+ now = Time.now.utc
+ puts "the time is \t #{now.strftime('%H:%M:%S')}"
+ future_time = now + 15
+ puts "scheduling for \t #{future_time.strftime('%H:%M:%S')}"
+
+ PRINT_PRINTER_WF.schedule(future_time, input: input)
+end
+
+PRINT_PRINTER_WF.task(:step1) do |input, ctx|
+ now = Time.now.utc
+ puts "printed at \t #{now.strftime('%H:%M:%S')}"
+ puts "message \t #{input['message']}"
+end
+
+def main
+ worker = HATCHET.worker(
+ "delayed-worker", slots: 4, workflows: [PRINT_SCHEDULE_WF, PRINT_PRINTER_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/dependency_injection/worker.rb b/examples/ruby/dependency_injection/worker.rb
new file mode 100644
index 000000000..390380050
--- /dev/null
+++ b/examples/ruby/dependency_injection/worker.rb
@@ -0,0 +1,140 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+SYNC_DEPENDENCY_VALUE = "sync_dependency_value"
+ASYNC_DEPENDENCY_VALUE = "async_dependency_value"
+SYNC_CM_DEPENDENCY_VALUE = "sync_cm_dependency_value"
+ASYNC_CM_DEPENDENCY_VALUE = "async_cm_dependency_value"
+CHAINED_CM_VALUE = "chained_cm_value"
+CHAINED_ASYNC_CM_VALUE = "chained_async_cm_value"
+
+# > Declare dependencies (Ruby uses callable objects instead of Python's Depends)
+sync_dep = ->(_input, _ctx) { SYNC_DEPENDENCY_VALUE }
+async_dep = ->(_input, _ctx) { ASYNC_DEPENDENCY_VALUE }
+
+sync_cm_dep = lambda { |_input, _ctx, deps|
+ "#{SYNC_CM_DEPENDENCY_VALUE}_#{deps[:sync_dep]}"
+}
+
+async_cm_dep = lambda { |_input, _ctx, deps|
+ "#{ASYNC_CM_DEPENDENCY_VALUE}_#{deps[:async_dep]}"
+}
+
+chained_dep = ->(_input, _ctx, deps) { "chained_#{CHAINED_CM_VALUE}" }
+chained_async_dep = ->(_input, _ctx, deps) { "chained_#{CHAINED_ASYNC_CM_VALUE}" }
+
+
+# > Inject dependencies
+ASYNC_TASK_WITH_DEPS = HATCHET.task(
+ name: "async_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+SYNC_TASK_WITH_DEPS = HATCHET.task(
+ name: "sync_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DURABLE_ASYNC_TASK_WITH_DEPS = HATCHET.durable_task(
+ name: "durable_async_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DURABLE_SYNC_TASK_WITH_DEPS = HATCHET.durable_task(
+ name: "durable_sync_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DI_WORKFLOW = HATCHET.workflow(name: "dependency-injection-workflow")
+
+# Workflow tasks with dependencies follow the same pattern
+DI_WORKFLOW.task(:wf_task_with_dependencies) do |input, ctx|
+ {
+ "sync_dep" => SYNC_DEPENDENCY_VALUE,
+ "async_dep" => ASYNC_DEPENDENCY_VALUE
+ }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "dependency-injection-worker",
+ workflows: [
+ ASYNC_TASK_WITH_DEPS,
+ SYNC_TASK_WITH_DEPS,
+ DURABLE_ASYNC_TASK_WITH_DEPS,
+ DURABLE_SYNC_TASK_WITH_DEPS,
+ DI_WORKFLOW
+ ]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/durable/worker.rb b/examples/ruby/durable/worker.rb
new file mode 100644
index 000000000..925c9557a
--- /dev/null
+++ b/examples/ruby/durable/worker.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Create a durable workflow
+DURABLE_WORKFLOW = HATCHET.workflow(name: "DurableWorkflow")
+EPHEMERAL_WORKFLOW = HATCHET.workflow(name: "EphemeralWorkflow")
+
+
+# > Add durable task
+DURABLE_EVENT_KEY = "durable-example:event"
+DURABLE_SLEEP_TIME = 5
+
+DURABLE_WORKFLOW.task(:ephemeral_task) do |input, ctx|
+ puts "Running non-durable task"
+end
+
+DURABLE_WORKFLOW.durable_task(:durable_task, execution_timeout: 60) do |input, ctx|
+ puts "Waiting for sleep"
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+ puts "Sleep finished"
+
+ puts "Waiting for event"
+ ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY, expression: "true")
+ )
+ puts "Event received"
+
+ { "status" => "success" }
+end
+
+
+# > Add durable tasks that wait for or groups
+DURABLE_WORKFLOW.durable_task(:wait_for_or_group_1, execution_timeout: 60) do |input, ctx|
+ start = Time.now
+ wait_result = ctx.wait_for(
+ SecureRandom.hex(16),
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(DURABLE_SLEEP_TIME),
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY)
+ )
+ )
+
+ key = wait_result.keys.first
+ event_id = wait_result[key].keys.first
+
+ {
+ "runtime" => (Time.now - start).to_i,
+ "key" => key,
+ "event_id" => event_id
+ }
+end
+
+DURABLE_WORKFLOW.durable_task(:wait_for_or_group_2, execution_timeout: 120) do |input, ctx|
+ start = Time.now
+ wait_result = ctx.wait_for(
+ SecureRandom.hex(16),
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(6 * DURABLE_SLEEP_TIME),
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY)
+ )
+ )
+
+ key = wait_result.keys.first
+ event_id = wait_result[key].keys.first
+
+ {
+ "runtime" => (Time.now - start).to_i,
+ "key" => key,
+ "event_id" => event_id
+ }
+end
+
+DURABLE_WORKFLOW.durable_task(:wait_for_multi_sleep, execution_timeout: 120) do |input, ctx|
+ start = Time.now
+
+ 3.times do
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+ end
+
+ { "runtime" => (Time.now - start).to_i }
+end
+
+EPHEMERAL_WORKFLOW.task(:ephemeral_task_2) do |input, ctx|
+ puts "Running non-durable task"
+end
+
+WAIT_FOR_SLEEP_TWICE = HATCHET.durable_task(name: "wait_for_sleep_twice", execution_timeout: 60) do |input, ctx|
+ start = Time.now
+
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+
+ { "runtime" => (Time.now - start).to_i }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "durable-worker",
+ workflows: [DURABLE_WORKFLOW, EPHEMERAL_WORKFLOW, WAIT_FOR_SLEEP_TWICE]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/durable_event/worker.rb b/examples/ruby/durable_event/worker.rb
new file mode 100644
index 000000000..c40a841f1
--- /dev/null
+++ b/examples/ruby/durable_event/worker.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+DURABLE_EVENT_TASK_KEY = "user:update"
+
+# > Durable Event
+DURABLE_EVENT_TASK = HATCHET.durable_task(name: "DurableEventTask") do |input, ctx|
+ res = ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(event_key: "user:update")
+ )
+
+ puts "got event #{res}"
+end
+
+DURABLE_EVENT_TASK_WITH_FILTER = HATCHET.durable_task(name: "DurableEventWithFilterTask") do |input, ctx|
+
+ # > Durable Event With Filter
+ res = ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(
+ event_key: "user:update",
+ expression: "input.user_id == '1234'"
+ )
+ )
+
+ puts "got event #{res}"
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "durable-event-worker",
+ workflows: [DURABLE_EVENT_TASK, DURABLE_EVENT_TASK_WITH_FILTER]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/durable_sleep/worker.rb b/examples/ruby/durable_sleep/worker.rb
new file mode 100644
index 000000000..ea355e738
--- /dev/null
+++ b/examples/ruby/durable_sleep/worker.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Durable Sleep
+DURABLE_SLEEP_TASK = HATCHET.durable_task(name: "DurableSleepTask") do |input, ctx|
+ res = ctx.sleep_for(duration: 5)
+
+ puts "got result #{res}"
+end
+
+
+def main
+ worker = HATCHET.worker("durable-sleep-worker", workflows: [DURABLE_SLEEP_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/events/event.rb b/examples/ruby/events/event.rb
new file mode 100644
index 000000000..144dd7229
--- /dev/null
+++ b/examples/ruby/events/event.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Event trigger
+HATCHET.event.push("user:create", { "should_skip" => false })
+
+# > Event trigger with metadata
+HATCHET.event.push(
+ "user:create",
+ { "userId" => "1234", "should_skip" => false },
+ additional_metadata: { "source" => "api" }
+)
diff --git a/examples/ruby/events/filter.rb b/examples/ruby/events/filter.rb
new file mode 100644
index 000000000..59a85dae7
--- /dev/null
+++ b/examples/ruby/events/filter.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+require_relative "worker"
+
+HATCHET_CLIENT = Hatchet::Client.new
+
+# > Create a filter
+HATCHET_CLIENT.filters.create(
+ workflow_id: EVENT_WORKFLOW.id,
+ expression: "input.should_skip == false",
+ scope: "foobarbaz",
+ payload: {
+ "main_character" => "Anna",
+ "supporting_character" => "Stiva",
+ "location" => "Moscow"
+ }
+)
+
+# > Skip a run
+HATCHET_CLIENT.event.push(
+ EVENT_KEY,
+ { "should_skip" => true },
+ scope: "foobarbaz"
+)
+
+# > Trigger a run
+HATCHET_CLIENT.event.push(
+ EVENT_KEY,
+ { "should_skip" => false },
+ scope: "foobarbaz"
+)
diff --git a/examples/ruby/events/worker.rb b/examples/ruby/events/worker.rb
new file mode 100644
index 000000000..e1859c400
--- /dev/null
+++ b/examples/ruby/events/worker.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Event trigger
+EVENT_KEY = "user:create"
+SECONDARY_KEY = "foobarbaz"
+WILDCARD_KEY = "subscription:*"
+
+EVENT_WORKFLOW = HATCHET.workflow(
+ name: "EventWorkflow",
+ on_events: [EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY]
+)
+
+
+# > Event trigger with filter
+EVENT_WORKFLOW_WITH_FILTER = HATCHET.workflow(
+ name: "EventWorkflow",
+ on_events: [EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],
+ default_filters: [
+ Hatchet::DefaultFilter.new(
+ expression: "true",
+ scope: "example-scope",
+ payload: {
+ "main_character" => "Anna",
+ "supporting_character" => "Stiva",
+ "location" => "Moscow"
+ }
+ )
+ ]
+)
+
+EVENT_WORKFLOW.task(:task) do |input, ctx|
+ puts "event received"
+ ctx.filter_payload
+end
+
+
+# > Accessing the filter payload
+EVENT_WORKFLOW_WITH_FILTER.task(:filtered_task) do |input, ctx|
+ puts ctx.filter_payload.inspect
+end
+
+
+def main
+ worker = HATCHET.worker(name: "EventWorker", workflows: [EVENT_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/fanout/example_child_spawn.rb b/examples/ruby/fanout/example_child_spawn.rb
new file mode 100644
index 000000000..225f61887
--- /dev/null
+++ b/examples/ruby/fanout/example_child_spawn.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Child spawn
+FANOUT_CHILD_WF.run({ "a" => "b" })
+
+# > Error handling
+begin
+ FANOUT_CHILD_WF.run({ "a" => "b" })
+rescue StandardError => e
+ puts "Child workflow failed: #{e.message}"
+end
diff --git a/examples/ruby/fanout/trigger.rb b/examples/ruby/fanout/trigger.rb
new file mode 100644
index 000000000..894f3562b
--- /dev/null
+++ b/examples/ruby/fanout/trigger.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Bulk run children
+def run_child_workflows(n)
+ FANOUT_CHILD_WF.run_many(
+ n.times.map do |i|
+ FANOUT_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s }
+ )
+ end
+ )
+end
diff --git a/examples/ruby/fanout/worker.rb b/examples/ruby/fanout/worker.rb
new file mode 100644
index 000000000..ddf0da8ca
--- /dev/null
+++ b/examples/ruby/fanout/worker.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > FanoutParent
+FANOUT_PARENT_WF = HATCHET.workflow(name: "FanoutParent")
+FANOUT_CHILD_WF = HATCHET.workflow(name: "FanoutChild")
+
+FANOUT_PARENT_WF.task(:spawn, execution_timeout: 300) do |input, ctx|
+ puts "spawning child"
+ n = input["n"] || 100
+
+ result = FANOUT_CHILD_WF.run_many(
+ n.times.map do |i|
+ FANOUT_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "hello" => "earth" },
+ key: "child#{i}"
+ )
+ )
+ end
+ )
+
+ puts "results #{result}"
+ { "results" => result }
+end
+
+
+# > FanoutChild
+FANOUT_CHILD_PROCESS = FANOUT_CHILD_WF.task(:process) do |input, ctx|
+ puts "child process #{input['a']}"
+ { "status" => input["a"] }
+end
+
+FANOUT_CHILD_WF.task(:process2, parents: [FANOUT_CHILD_PROCESS]) do |input, ctx|
+ process_output = ctx.task_output(FANOUT_CHILD_PROCESS)
+ a = process_output["status"]
+ { "status2" => "#{a}2" }
+end
+
+
+def main
+ worker = HATCHET.worker("fanout-worker", slots: 40, workflows: [FANOUT_PARENT_WF, FANOUT_CHILD_WF])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/fanout_sync/worker.rb b/examples/ruby/fanout_sync/worker.rb
new file mode 100644
index 000000000..020747f00
--- /dev/null
+++ b/examples/ruby/fanout_sync/worker.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true)
+
+SYNC_FANOUT_PARENT = HATCHET.workflow(name: "SyncFanoutParent")
+SYNC_FANOUT_CHILD = HATCHET.workflow(name: "SyncFanoutChild")
+
+SYNC_FANOUT_PARENT.task(:spawn, execution_timeout: 300) do |input, ctx|
+ puts "spawning child"
+ n = input["n"] || 5
+
+ results = SYNC_FANOUT_CHILD.run_many(
+ n.times.map do |i|
+ SYNC_FANOUT_CHILD.create_bulk_run_item(
+ input: { "a" => i.to_s },
+ key: "child#{i}",
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "hello" => "earth" }
+ )
+ )
+ end
+ )
+
+ puts "results #{results}"
+ { "results" => results }
+end
+
+SYNC_PROCESS = SYNC_FANOUT_CHILD.task(:process) do |input, ctx|
+ { "status" => "success #{input['a']}" }
+end
+
+SYNC_FANOUT_CHILD.task(:process2, parents: [SYNC_PROCESS]) do |input, ctx|
+ process_output = ctx.task_output(SYNC_PROCESS)
+ a = process_output["status"]
+ { "status2" => "#{a}2" }
+end
+
+def main
+ worker = HATCHET.worker(
+ "sync-fanout-worker",
+ slots: 40,
+ workflows: [SYNC_FANOUT_PARENT, SYNC_FANOUT_CHILD]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/hatchet_client.rb b/examples/ruby/hatchet_client.rb
new file mode 100644
index 000000000..49eea6aab
--- /dev/null
+++ b/examples/ruby/hatchet_client.rb
@@ -0,0 +1,30 @@
+#!/usr/bin/env ruby
+
+require 'hatchet-sdk'
+# require_relative '../src/lib/hatchet-sdk'
+
+# Initialize the Hatchet client
+HATCHET = Hatchet::Client.new() unless defined?(HATCHET)
+
+result = HATCHET.events.create(
+ key: "test-event",
+ data: {
+ message: "test"
+ }
+)
+puts "Event created: #{result.inspect}"
+
+
+run = HATCHET.runs.create(
+ name: "simple",
+ input: {
+ Message: "test workflow run"
+ },
+)
+
+puts "TriggeredRun ID: #{run.metadata.id}"
+
+result = HATCHET.runs.poll(run.metadata.id)
+
+puts "Runs client initialized: #{result.inspect}"
+puts "Run status: #{result.status}"
diff --git a/examples/ruby/lifespans/worker.rb b/examples/ruby/lifespans/worker.rb
new file mode 100644
index 000000000..464d34b99
--- /dev/null
+++ b/examples/ruby/lifespans/worker.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# > Lifespan
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true)
+
+# Ruby lifespan uses a block with yield for setup/teardown
+LIFESPAN_PROC = proc do
+ { foo: "bar", pi: 3.14 }
+end
+
+LIFESPAN_TASK = HATCHET.task(name: "LifespanWorkflow") do |input, ctx|
+ ctx.lifespan
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "test-worker", slots: 1, workflows: [LIFESPAN_TASK], lifespan: LIFESPAN_PROC
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/logger/worker.rb b/examples/ruby/logger/worker.rb
new file mode 100644
index 000000000..2f032103a
--- /dev/null
+++ b/examples/ruby/logger/worker.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+# > LoggingWorkflow
+
+require "hatchet-sdk"
+require "logger"
+
+logger = Logger.new($stdout)
+logger.level = Logger::INFO
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+LOGGING_WORKFLOW = HATCHET.workflow(name: "LoggingWorkflow")
+
+LOGGING_WORKFLOW.task(:root_logger) do |input, ctx|
+ 12.times do |i|
+ logger.info("executed step1 - #{i}")
+ logger.info({ "step1" => "step1" }.inspect)
+
+ sleep 0.1
+ end
+
+ { "status" => "success" }
+end
+
+
+# > ContextLogger
+LOGGING_WORKFLOW.task(:context_logger) do |input, ctx|
+ 12.times do |i|
+ ctx.log("executed step1 - #{i}")
+ ctx.log({ "step1" => "step1" }.inspect)
+
+ sleep 0.1
+ end
+
+ { "status" => "success" }
+end
+
+
+def main
+ worker = HATCHET.worker("logger-worker", slots: 5, workflows: [LOGGING_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/manual_slot_release/worker.rb b/examples/ruby/manual_slot_release/worker.rb
new file mode 100644
index 000000000..c84fac5bb
--- /dev/null
+++ b/examples/ruby/manual_slot_release/worker.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > SlotRelease
+SLOT_RELEASE_WORKFLOW = HATCHET.workflow(name: "SlotReleaseWorkflow")
+
+SLOT_RELEASE_WORKFLOW.task(:step1) do |input, ctx|
+ puts "RESOURCE INTENSIVE PROCESS"
+ sleep 10
+
+ # Release the slot after the resource-intensive process, so that other steps can run
+ ctx.release_slot
+
+ puts "NON RESOURCE INTENSIVE PROCESS"
+ { "status" => "success" }
+end
+
diff --git a/examples/ruby/non_retryable/worker.rb b/examples/ruby/non_retryable/worker.rb
new file mode 100644
index 000000000..9110a5f6a
--- /dev/null
+++ b/examples/ruby/non_retryable/worker.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+NON_RETRYABLE_WORKFLOW = HATCHET.workflow(name: "NonRetryableWorkflow")
+
+# > Non-retryable task
+NON_RETRYABLE_WORKFLOW.task(:should_not_retry, retries: 1) do |input, ctx|
+ raise Hatchet::NonRetryableError, "This task should not retry"
+end
+
+NON_RETRYABLE_WORKFLOW.task(:should_retry_wrong_exception_type, retries: 1) do |input, ctx|
+ raise TypeError, "This task should retry because it's not a NonRetryableError"
+end
+
+NON_RETRYABLE_WORKFLOW.task(:should_not_retry_successful_task, retries: 1) do |input, ctx|
+ # no-op
+end
+
+
+def main
+ worker = HATCHET.worker("non-retry-worker", workflows: [NON_RETRYABLE_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/on_failure/worker.rb b/examples/ruby/on_failure/worker.rb
new file mode 100644
index 000000000..5d34c5b40
--- /dev/null
+++ b/examples/ruby/on_failure/worker.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+ERROR_TEXT = "step1 failed"
+
+# > OnFailure Step
+# This workflow will fail because the step will throw an error
+# we define an onFailure step to handle this case
+
+ON_FAILURE_WF = HATCHET.workflow(name: "OnFailureWorkflow")
+
+ON_FAILURE_WF.task(:step1, execution_timeout: 1) do |input, ctx|
+ # This step will always raise an exception
+ raise ERROR_TEXT
+end
+
+# After the workflow fails, this special step will run
+ON_FAILURE_WF.on_failure_task do |input, ctx|
+ # We can do things like perform cleanup logic
+ # or notify a user here
+
+ # Fetch the errors from upstream step runs from the context
+ puts ctx.task_run_errors.inspect
+
+ { "status" => "success" }
+end
+
+
+# > OnFailure With Details
+# We can access the failure details in the onFailure step
+# via the context method
+
+ON_FAILURE_WF_WITH_DETAILS = HATCHET.workflow(name: "OnFailureWorkflowWithDetails")
+
+DETAILS_STEP1 = ON_FAILURE_WF_WITH_DETAILS.task(:details_step1, execution_timeout: 1) do |input, ctx|
+ raise ERROR_TEXT
+end
+
+# After the workflow fails, this special step will run
+ON_FAILURE_WF_WITH_DETAILS.on_failure_task do |input, ctx|
+ error = ctx.get_task_run_error(DETAILS_STEP1)
+
+ unless error
+ next { "status" => "unexpected success" }
+ end
+
+ # We can access the failure details here
+ raise "Expected Hatchet::TaskRunError" unless error.is_a?(Hatchet::TaskRunError)
+
+ if error.message.include?("step1 failed")
+ next {
+ "status" => "success",
+ "failed_run_external_id" => error.task_run_external_id
+ }
+ end
+
+ raise "unexpected failure"
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "on-failure-worker",
+ slots: 4,
+ workflows: [ON_FAILURE_WF, ON_FAILURE_WF_WITH_DETAILS]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/on_success/worker.rb b/examples/ruby/on_success/worker.rb
new file mode 100644
index 000000000..3daa05ae7
--- /dev/null
+++ b/examples/ruby/on_success/worker.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+ON_SUCCESS_WORKFLOW = HATCHET.workflow(name: "OnSuccessWorkflow")
+
+FIRST_TASK = ON_SUCCESS_WORKFLOW.task(:first_task) do |input, ctx|
+ puts "First task completed successfully"
+end
+
+SECOND_TASK = ON_SUCCESS_WORKFLOW.task(:second_task, parents: [FIRST_TASK]) do |input, ctx|
+ puts "Second task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.task(:third_task, parents: [FIRST_TASK, SECOND_TASK]) do |input, ctx|
+ puts "Third task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.task(:fourth_task) do |input, ctx|
+ puts "Fourth task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.on_success_task do |input, ctx|
+ puts "On success task completed successfully"
+end
+
+def main
+ worker = HATCHET.worker("on-success-worker", workflows: [ON_SUCCESS_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/priority/trigger.rb b/examples/ruby/priority/trigger.rb
new file mode 100644
index 000000000..88b2714dc
--- /dev/null
+++ b/examples/ruby/priority/trigger.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Runtime priority
+low_prio = PRIORITY_WORKFLOW.run_no_wait(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ priority: 1,
+ additional_metadata: { "priority" => "low", "key" => 1 }
+ )
+)
+
+high_prio = PRIORITY_WORKFLOW.run_no_wait(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ priority: 3,
+ additional_metadata: { "priority" => "high", "key" => 1 }
+ )
+)
+
+# > Scheduled priority
+schedule = PRIORITY_WORKFLOW.schedule(
+ Time.now + 60,
+ options: Hatchet::TriggerWorkflowOptions.new(priority: 3)
+)
+
+cron = PRIORITY_WORKFLOW.create_cron(
+ "my-scheduled-cron",
+ "0 * * * *",
+ input: {},
+)
diff --git a/examples/ruby/priority/worker.rb b/examples/ruby/priority/worker.rb
new file mode 100644
index 000000000..362ce2ad6
--- /dev/null
+++ b/examples/ruby/priority/worker.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Default priority
+DEFAULT_PRIORITY = 1
+SLEEP_TIME = 0.25
+
+PRIORITY_WORKFLOW = HATCHET.workflow(
+ name: "PriorityWorkflow",
+ default_priority: DEFAULT_PRIORITY
+)
+
+PRIORITY_WORKFLOW.task(:priority_task) do |input, ctx|
+ puts "Priority: #{ctx.priority}"
+ sleep SLEEP_TIME
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "priority-worker",
+ slots: 1,
+ workflows: [PRIORITY_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/quickstart/run.rb b/examples/ruby/quickstart/run.rb
new file mode 100644
index 000000000..cf6529f77
--- /dev/null
+++ b/examples/ruby/quickstart/run.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require_relative "workflows/first_task"
+
+# > Run a task
+result = FIRST_TASK.run({ "message" => "Hello World!" })
+puts "Finished running task: #{result['transformed_message']}"
diff --git a/examples/ruby/quickstart/workflows/first_task.rb b/examples/ruby/quickstart/workflows/first_task.rb
new file mode 100644
index 000000000..7d61a724d
--- /dev/null
+++ b/examples/ruby/quickstart/workflows/first_task.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Simple task
+FIRST_TASK = HATCHET.task(name: "first-task") do |input, ctx|
+ puts "first-task called"
+ { "transformed_message" => input["message"].downcase }
+end
diff --git a/examples/ruby/rate_limit/worker.rb b/examples/ruby/rate_limit/worker.rb
new file mode 100644
index 000000000..c9ddff1e6
--- /dev/null
+++ b/examples/ruby/rate_limit/worker.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Workflow
+RATE_LIMIT_WORKFLOW = HATCHET.workflow(name: "RateLimitWorkflow")
+
+
+# > Static
+RATE_LIMIT_KEY = "test-limit"
+
+RATE_LIMIT_WORKFLOW.task(
+ :step_1,
+ rate_limits: [Hatchet::RateLimit.new(static_key: RATE_LIMIT_KEY, units: 1)]
+) do |input, ctx|
+ puts "executed step_1"
+end
+
+
+# > Dynamic
+RATE_LIMIT_WORKFLOW.task(
+ :step_2,
+ rate_limits: [
+ Hatchet::RateLimit.new(
+ dynamic_key: "input.user_id",
+ units: 1,
+ limit: 10,
+ duration: :minute
+ )
+ ]
+) do |input, ctx|
+ puts "executed step_2"
+end
+
+
+# > Create a rate limit
+def main
+ HATCHET.rate_limits.put(RATE_LIMIT_KEY, 2, :second)
+
+ worker = HATCHET.worker(
+ "rate-limit-worker", slots: 10, workflows: [RATE_LIMIT_WORKFLOW]
+ )
+ worker.start
+end
+
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/retries/worker.rb b/examples/ruby/retries/worker.rb
new file mode 100644
index 000000000..1c4a7ff38
--- /dev/null
+++ b/examples/ruby/retries/worker.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SIMPLE_RETRY_WORKFLOW = HATCHET.workflow(name: "SimpleRetryWorkflow")
+BACKOFF_WORKFLOW = HATCHET.workflow(name: "BackoffWorkflow")
+
+# > Simple Step Retries
+SIMPLE_RETRY_WORKFLOW.task(:always_fail, retries: 3) do |input, ctx|
+ raise "simple task failed"
+end
+
+
+# > Retries with Count
+SIMPLE_RETRY_WORKFLOW.task(:fail_twice, retries: 3) do |input, ctx|
+ raise "simple task failed" if ctx.retry_count < 2
+
+ { "status" => "success" }
+end
+
+
+# > Retries with Backoff
+BACKOFF_WORKFLOW.task(
+ :backoff_task,
+ retries: 10,
+ # Maximum number of seconds to wait between retries
+ backoff_max_seconds: 10,
+ # Factor to increase the wait time between retries.
+ # This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit
+ backoff_factor: 2.0
+) do |input, ctx|
+ raise "backoff task failed" if ctx.retry_count < 3
+
+ { "status" => "success" }
+end
+
+
+def main
+ worker = HATCHET.worker("backoff-worker", slots: 4, workflows: [BACKOFF_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/return_exceptions/worker.rb b/examples/ruby/return_exceptions/worker.rb
new file mode 100644
index 000000000..6039a1915
--- /dev/null
+++ b/examples/ruby/return_exceptions/worker.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+RETURN_EXCEPTIONS_TASK = HATCHET.task(name: "return_exceptions_task") do |input, ctx|
+ if input["index"].to_i.even?
+ raise "error in task with index #{input['index']}"
+ end
+
+ { "message" => "this is a successful task." }
+end
diff --git a/examples/ruby/run_details/worker.rb b/examples/ruby/run_details/worker.rb
new file mode 100644
index 000000000..952cfccde
--- /dev/null
+++ b/examples/ruby/run_details/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+RUN_DETAIL_TEST_WORKFLOW = HATCHET.workflow(name: "RunDetailTest")
+
+DETAIL_STEP1 = RUN_DETAIL_TEST_WORKFLOW.task(:step1) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:cancel_step) do |input, ctx|
+ ctx.cancel
+ 10.times { sleep 1 }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:fail_step) do |input, ctx|
+ raise "Intentional Failure"
+end
+
+DETAIL_STEP2 = RUN_DETAIL_TEST_WORKFLOW.task(:step2) do |input, ctx|
+ sleep 5
+ { "random_number" => rand(1..100) }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:step3, parents: [DETAIL_STEP1, DETAIL_STEP2]) do |input, ctx|
+ one = ctx.task_output(DETAIL_STEP1)["random_number"]
+ two = ctx.task_output(DETAIL_STEP2)["random_number"]
+
+ { "sum" => one + two }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:step4, parents: [DETAIL_STEP1, :step3]) do |input, ctx|
+ puts(
+ "executed step4",
+ Time.now.strftime("%H:%M:%S"),
+ input.inspect,
+ ctx.task_output(DETAIL_STEP1).inspect,
+ ctx.task_output(:step3).inspect
+ )
+
+ { "step4" => "step4" }
+end
+
+def main
+ worker = HATCHET.worker("run-detail-worker", workflows: [RUN_DETAIL_TEST_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/scheduled/programatic_sync.rb b/examples/ruby/scheduled/programatic_sync.rb
new file mode 100644
index 000000000..d32fcc762
--- /dev/null
+++ b/examples/ruby/scheduled/programatic_sync.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+# > Create
+scheduled_run = hatchet.scheduled.create(
+ workflow_name: "simple-workflow",
+ trigger_at: Time.now + 10,
+ input: { "data" => "simple-workflow-data" },
+ additional_metadata: { "customer_id" => "customer-a" }
+)
+
+id = scheduled_run.metadata.id
+
+# > Reschedule
+hatchet.scheduled.update(
+ scheduled_run.metadata.id,
+ trigger_at: Time.now + 3600
+)
+
+# > Delete
+hatchet.scheduled.delete(scheduled_run.metadata.id)
+
+# > List
+scheduled_runs = hatchet.scheduled.list
+
+# > Bulk delete
+hatchet.scheduled.bulk_delete(scheduled_ids: [id])
+
+# > Bulk reschedule
+hatchet.scheduled.bulk_update(
+ [[id, Time.now + 7200]]
+)
diff --git a/examples/ruby/scheduled/worker.rb b/examples/ruby/scheduled/worker.rb
new file mode 100644
index 000000000..fd482f5e7
--- /dev/null
+++ b/examples/ruby/scheduled/worker.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SCHEDULED_WORKFLOW = HATCHET.workflow(name: "ScheduledWorkflow")
+
+SCHEDULED_WORKFLOW.task(:scheduled_task) do |input, ctx|
+ puts "Scheduled task executed at #{Time.now}"
+ { "status" => "success" }
+end
+
+# > Programmatic Schedule
+def schedule_workflow
+ future_time = Time.now + 60 # 1 minute from now
+ SCHEDULED_WORKFLOW.schedule(future_time, input: { "message" => "scheduled run" })
+end
+
+
+def main
+ worker = HATCHET.worker("scheduled-worker", workflows: [SCHEDULED_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/serde/worker.rb b/examples/ruby/serde/worker.rb
new file mode 100644
index 000000000..639f987b3
--- /dev/null
+++ b/examples/ruby/serde/worker.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+# > Custom Serialization/Deserialization
+
+require "hatchet-sdk"
+require "base64"
+require "zlib"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SERDE_WORKFLOW = HATCHET.workflow(name: "serde-example-workflow")
+
+GENERATE_RESULT = SERDE_WORKFLOW.task(:generate_result) do |input, ctx|
+ compressed = Base64.strict_encode64(Zlib::Deflate.deflate("my_result"))
+ { "result" => compressed }
+end
+
+SERDE_WORKFLOW.task(:read_result, parents: [GENERATE_RESULT]) do |input, ctx|
+ encoded = ctx.task_output(GENERATE_RESULT)["result"]
+ decoded = Zlib::Inflate.inflate(Base64.strict_decode64(encoded))
+ { "final_result" => decoded }
+end
+
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SERDE_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/simple/schedule.rb b/examples/ruby/simple/schedule.rb
new file mode 100644
index 000000000..8235dd19c
--- /dev/null
+++ b/examples/ruby/simple/schedule.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Schedule a task
+schedule = SIMPLE.schedule(Time.now + 86_400, input: { "message" => "Hello, World!" })
+
+## do something with the id
+puts schedule.metadata.id
diff --git a/examples/ruby/simple/trigger.rb b/examples/ruby/simple/trigger.rb
new file mode 100644
index 000000000..b06797c8c
--- /dev/null
+++ b/examples/ruby/simple/trigger.rb
@@ -0,0 +1,7 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Run a task
+result = SIMPLE.run({ "message" => "Hello, World!" })
+puts result
diff --git a/examples/ruby/simple/trigger_with_metadata.rb b/examples/ruby/simple/trigger_with_metadata.rb
new file mode 100644
index 000000000..36da6e2c6
--- /dev/null
+++ b/examples/ruby/simple/trigger_with_metadata.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Trigger with metadata
+SIMPLE.run(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "source" => "api" }
+ )
+)
diff --git a/examples/ruby/simple/worker.rb b/examples/ruby/simple/worker.rb
new file mode 100644
index 000000000..b4abbffc7
--- /dev/null
+++ b/examples/ruby/simple/worker.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+# > Simple
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SIMPLE = HATCHET.task(name: "simple") do |input, ctx|
+ { "result" => "Hello, world!" }
+end
+
+SIMPLE_DURABLE = HATCHET.durable_task(name: "simple_durable") do |input, ctx|
+ result = SIMPLE.run(input)
+ { "result" => result["result"] }
+end
+
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SIMPLE, SIMPLE_DURABLE])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/simple/workflow.rb b/examples/ruby/simple/workflow.rb
new file mode 100644
index 000000000..a0991d101
--- /dev/null
+++ b/examples/ruby/simple/workflow.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Define a workflow
+EXAMPLE_WORKFLOW = HATCHET.workflow(name: "example-workflow")
+
diff --git a/examples/ruby/sticky_workers/worker.rb b/examples/ruby/sticky_workers/worker.rb
new file mode 100644
index 000000000..3fa3ec08c
--- /dev/null
+++ b/examples/ruby/sticky_workers/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > StickyWorker
+STICKY_WORKFLOW = HATCHET.workflow(
+ name: "StickyWorkflow",
+ # Specify a sticky strategy when declaring the workflow
+ sticky: :soft
+)
+
+STEP1A = STICKY_WORKFLOW.task(:step1a) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+STEP1B = STICKY_WORKFLOW.task(:step1b) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+
+# > StickyChild
+STICKY_CHILD_WORKFLOW = HATCHET.workflow(
+ name: "StickyChildWorkflow",
+ sticky: :soft
+)
+
+STICKY_WORKFLOW.task(:step2, parents: [STEP1A, STEP1B]) do |input, ctx|
+ ref = STICKY_CHILD_WORKFLOW.run_no_wait(
+ options: Hatchet::TriggerWorkflowOptions.new(sticky: true)
+ )
+
+ ref.result
+
+ { "worker" => ctx.worker.id }
+end
+
+STICKY_CHILD_WORKFLOW.task(:child) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "sticky-worker", slots: 10, workflows: [STICKY_WORKFLOW, STICKY_CHILD_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/streaming/async_stream.rb b/examples/ruby/streaming/async_stream.rb
new file mode 100644
index 000000000..eb2fcc487
--- /dev/null
+++ b/examples/ruby/streaming/async_stream.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Consume
+ref = STREAM_TASK.run_no_wait
+
+HATCHET.runs.subscribe_to_stream(ref.workflow_run_id) do |chunk|
+ print chunk
+end
diff --git a/examples/ruby/streaming/worker.rb b/examples/ruby/streaming/worker.rb
new file mode 100644
index 000000000..5433d5345
--- /dev/null
+++ b/examples/ruby/streaming/worker.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+# > Streaming
+ANNA_KARENINA = <<~TEXT
+ Happy families are all alike; every unhappy family is unhappy in its own way.
+
+ Everything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.
+TEXT
+
+STREAM_CHUNKS = ANNA_KARENINA.scan(/.{1,10}/)
+
+STREAM_TASK = HATCHET.task(name: "stream_task") do |input, ctx|
+ # Sleeping to avoid race conditions
+ sleep 2
+
+ STREAM_CHUNKS.each do |chunk|
+ ctx.put_stream(chunk)
+ sleep 0.20
+ end
+end
+
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [STREAM_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/timeout/worker.rb b/examples/ruby/timeout/worker.rb
new file mode 100644
index 000000000..c89f5784f
--- /dev/null
+++ b/examples/ruby/timeout/worker.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > ScheduleTimeout
+TIMEOUT_WF = HATCHET.workflow(
+ name: "TimeoutWorkflow",
+ task_defaults: { execution_timeout: 120 } # 2 minutes
+)
+
+
+# > ExecutionTimeout
+# Specify an execution timeout on a task
+TIMEOUT_WF.task(:timeout_task, execution_timeout: 5, schedule_timeout: 600) do |input, ctx|
+ sleep 30
+ { "status" => "success" }
+end
+
+REFRESH_TIMEOUT_WF = HATCHET.workflow(name: "RefreshTimeoutWorkflow")
+
+
+# > RefreshTimeout
+REFRESH_TIMEOUT_WF.task(:refresh_task, execution_timeout: 4) do |input, ctx|
+ ctx.refresh_timeout(10)
+ sleep 5
+
+ { "status" => "success" }
+end
+
+
+def main
+ worker = HATCHET.worker(
+ "timeout-worker", slots: 4, workflows: [TIMEOUT_WF, REFRESH_TIMEOUT_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/trigger_methods/workflow.rb b/examples/ruby/trigger_methods/workflow.rb
new file mode 100644
index 000000000..629630046
--- /dev/null
+++ b/examples/ruby/trigger_methods/workflow.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+# > Define a task
+SAY_HELLO = hatchet.task(name: "say_hello") do |input, ctx|
+ { "greeting" => "Hello, #{input['name']}!" }
+end
+
+# > Sync
+ref = SAY_HELLO.run_no_wait({ "name" => "World" })
+
+# > Async
+# In Ruby, run_no_wait is the equivalent of async enqueuing
+ref = SAY_HELLO.run_no_wait({ "name" => "World" })
+
+# > Result sync
+result = ref.result
+
+# > Result async
+# In Ruby, result is synchronous - use poll for async-like behavior
+result = ref.result
diff --git a/examples/ruby/unit_testing/worker.rb b/examples/ruby/unit_testing/worker.rb
new file mode 100644
index 000000000..19b94a760
--- /dev/null
+++ b/examples/ruby/unit_testing/worker.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# Unit test workflow definitions
+SYNC_STANDALONE = HATCHET.task(name: "sync_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+ASYNC_STANDALONE = HATCHET.task(name: "async_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+DURABLE_SYNC_STANDALONE = HATCHET.durable_task(name: "durable_sync_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+DURABLE_ASYNC_STANDALONE = HATCHET.durable_task(name: "durable_async_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+SIMPLE_UNIT_TEST_WORKFLOW = HATCHET.workflow(name: "simple-unit-test-workflow")
+
+SIMPLE_UNIT_TEST_WORKFLOW.task(:sync_simple_workflow) do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+COMPLEX_UNIT_TEST_WORKFLOW = HATCHET.workflow(name: "complex-unit-test-workflow")
+
+UNIT_START = COMPLEX_UNIT_TEST_WORKFLOW.task(:start) do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+COMPLEX_UNIT_TEST_WORKFLOW.task(:sync_complex_workflow, parents: [UNIT_START]) do |input, ctx|
+ ctx.task_output(UNIT_START)
+end
diff --git a/examples/ruby/webhook_with_scope/worker.rb b/examples/ruby/webhook_with_scope/worker.rb
new file mode 100644
index 000000000..ce12a2ea5
--- /dev/null
+++ b/examples/ruby/webhook_with_scope/worker.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+WEBHOOK_WITH_SCOPE = HATCHET.task(
+ name: "webhook_with_scope",
+ on_events: ["webhook-scope:test"],
+ default_filters: [
+ Hatchet::DefaultFilter.new(
+ expression: "true",
+ scope: "test-scope-value",
+ payload: {}
+ )
+ ]
+) do |input, ctx|
+ input
+end
+
+WEBHOOK_WITH_STATIC_PAYLOAD = HATCHET.task(
+ name: "webhook_with_static_payload",
+ on_events: ["webhook-static:test"]
+) do |input, ctx|
+ input
+end
+
+def main
+ worker = HATCHET.worker(
+ "webhook-scope-worker",
+ workflows: [WEBHOOK_WITH_SCOPE, WEBHOOK_WITH_STATIC_PAYLOAD]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/webhooks/worker.rb b/examples/ruby/webhooks/worker.rb
new file mode 100644
index 000000000..7362abf17
--- /dev/null
+++ b/examples/ruby/webhooks/worker.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+# > Webhooks
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+WEBHOOK_TASK = HATCHET.task(
+ name: "webhook",
+ on_events: ["webhook:test"]
+) do |input, ctx|
+ {
+ "type" => input["type"],
+ "message" => input["message"]
+ }
+end
+
+
+def main
+ worker = HATCHET.worker("webhook-worker", workflows: [WEBHOOK_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/examples/ruby/worker.rb b/examples/ruby/worker.rb
new file mode 100644
index 000000000..06627cb59
--- /dev/null
+++ b/examples/ruby/worker.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+# Main worker that registers all example workflows.
+
+require "hatchet-sdk"
+
+# Load all example workflows
+require_relative "simple/worker"
+require_relative "dag/worker"
+require_relative "events/worker"
+require_relative "cancellation/worker"
+require_relative "on_failure/worker"
+require_relative "on_success/worker"
+require_relative "timeout/worker"
+require_relative "retries/worker"
+require_relative "non_retryable/worker"
+require_relative "logger/worker"
+require_relative "delayed/worker"
+require_relative "priority/worker"
+require_relative "run_details/worker"
+require_relative "concurrency_limit/worker"
+require_relative "concurrency_limit_rr/worker"
+require_relative "concurrency_cancel_in_progress/worker"
+require_relative "concurrency_cancel_newest/worker"
+require_relative "concurrency_multiple_keys/worker"
+require_relative "concurrency_workflow_level/worker"
+require_relative "rate_limit/worker"
+require_relative "child/worker"
+require_relative "fanout/worker"
+require_relative "bulk_fanout/worker"
+require_relative "durable/worker"
+require_relative "durable_event/worker"
+require_relative "durable_sleep/worker"
+require_relative "conditions/worker"
+require_relative "dependency_injection/worker"
+require_relative "streaming/worker"
+require_relative "serde/worker"
+require_relative "dataclasses/worker"
+require_relative "dedupe/worker"
+require_relative "cron/worker"
+require_relative "scheduled/worker"
+require_relative "bulk_operations/worker"
+require_relative "return_exceptions/worker"
+require_relative "manual_slot_release/worker"
+require_relative "affinity_workers/worker"
+require_relative "sticky_workers/worker"
+require_relative "webhooks/worker"
+require_relative "webhook_with_scope/worker"
+require_relative "unit_testing/worker"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+ALL_WORKFLOWS = [
+ # Tier 1
+ SIMPLE, SIMPLE_DURABLE,
+ DAG_WORKFLOW,
+ EVENT_WORKFLOW,
+ CANCELLATION_WORKFLOW,
+ ON_FAILURE_WF, ON_FAILURE_WF_WITH_DETAILS,
+ ON_SUCCESS_WORKFLOW,
+ TIMEOUT_WF, REFRESH_TIMEOUT_WF,
+ SIMPLE_RETRY_WORKFLOW, BACKOFF_WORKFLOW,
+ NON_RETRYABLE_WORKFLOW,
+ LOGGING_WORKFLOW,
+ PRINT_SCHEDULE_WF, PRINT_PRINTER_WF,
+ PRIORITY_WORKFLOW,
+ RUN_DETAIL_TEST_WORKFLOW,
+
+ # Tier 2
+ CONCURRENCY_LIMIT_WORKFLOW,
+ CONCURRENCY_LIMIT_RR_WORKFLOW,
+ CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW,
+ CONCURRENCY_CANCEL_NEWEST_WORKFLOW,
+ CONCURRENCY_MULTIPLE_KEYS_WORKFLOW,
+ CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW,
+ RATE_LIMIT_WORKFLOW,
+
+ # Tier 3
+ CHILD_TASK_WF,
+ FANOUT_PARENT_WF, FANOUT_CHILD_WF,
+ BULK_PARENT_WF, BULK_CHILD_WF,
+ DURABLE_WORKFLOW, EPHEMERAL_WORKFLOW, WAIT_FOR_SLEEP_TWICE,
+ DURABLE_EVENT_TASK, DURABLE_EVENT_TASK_WITH_FILTER,
+ DURABLE_SLEEP_TASK,
+ TASK_CONDITION_WORKFLOW,
+ ASYNC_TASK_WITH_DEPS, SYNC_TASK_WITH_DEPS,
+ DURABLE_ASYNC_TASK_WITH_DEPS, DURABLE_SYNC_TASK_WITH_DEPS,
+ DI_WORKFLOW,
+
+ # Tier 4-5
+ STREAM_TASK,
+ SERDE_WORKFLOW,
+ SAY_HELLO,
+ DEDUPE_PARENT_WF, DEDUPE_CHILD_WF,
+ CRON_WORKFLOW,
+ SCHEDULED_WORKFLOW,
+ BULK_REPLAY_TEST_1, BULK_REPLAY_TEST_2, BULK_REPLAY_TEST_3,
+ RETURN_EXCEPTIONS_TASK,
+ SLOT_RELEASE_WORKFLOW,
+ AFFINITY_WORKER_WORKFLOW,
+ STICKY_WORKFLOW, STICKY_CHILD_WORKFLOW,
+ WEBHOOK_TASK,
+ WEBHOOK_WITH_SCOPE, WEBHOOK_WITH_STATIC_PAYLOAD,
+ SYNC_STANDALONE, ASYNC_STANDALONE,
+ DURABLE_SYNC_STANDALONE, DURABLE_ASYNC_STANDALONE,
+ SIMPLE_UNIT_TEST_WORKFLOW, COMPLEX_UNIT_TEST_WORKFLOW
+].freeze
+
+worker = HATCHET.worker("all-examples-worker", slots: 40, workflows: ALL_WORKFLOWS)
+worker.start
diff --git a/examples/ruby/worker_fixture.rb b/examples/ruby/worker_fixture.rb
new file mode 100644
index 000000000..d4dfbf31e
--- /dev/null
+++ b/examples/ruby/worker_fixture.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require "open3"
+require "net/http"
+require "logger"
+require "timeout"
+
+module HatchetWorkerFixture
+ LOGGER = Logger.new($stdout)
+
+ # Wait for the worker health check endpoint to respond
+ #
+ # @param port [Integer] Health check port
+ # @param max_attempts [Integer] Maximum number of attempts
+ # @return [Boolean] true if healthy
+ # @raise [RuntimeError] if worker fails to start
+ def self.wait_for_worker_health(port:, max_attempts: 25)
+ attempts = 0
+
+ loop do
+ if attempts > max_attempts
+ raise "Worker failed to start within #{max_attempts} seconds"
+ end
+
+ begin
+ uri = URI("http://localhost:#{port}/health")
+ response = Net::HTTP.get_response(uri)
+ return true if response.code == "200"
+ rescue StandardError
+ # Worker not ready yet
+ end
+
+ sleep 1
+ attempts += 1
+ end
+ end
+
+ # Start a worker subprocess and wait for it to be healthy
+ #
+ # @param command [Array] Command to run
+ # @param healthcheck_port [Integer] Port for health checks
+ # @yield [pid] Yields the process PID
+ # @return [void]
+ def self.with_worker(command, healthcheck_port: 8001)
+ LOGGER.info("Starting background worker: #{command.join(' ')}")
+
+ ENV["HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT"] = healthcheck_port.to_s
+
+ stdin, stdout, stderr, wait_thr = Open3.popen3(*command)
+ pid = wait_thr.pid
+
+ # Log output in background threads
+ Thread.new do
+ stdout.each_line { |line| puts line.chomp }
+ rescue IOError
+ # Stream closed
+ end
+
+ Thread.new do
+ stderr.each_line { |line| $stderr.puts line.chomp }
+ rescue IOError
+ # Stream closed
+ end
+
+ wait_for_worker_health(port: healthcheck_port)
+
+ yield pid
+ ensure
+ LOGGER.info("Cleaning up background worker (PID: #{pid})")
+
+ if pid
+ begin
+ # Kill process group to get children too
+ Process.kill("TERM", -Process.getpgid(pid))
+ rescue Errno::ESRCH, Errno::EPERM
+ # Process already gone
+ end
+
+ begin
+ Timeout.timeout(5) { Process.wait(pid) }
+ rescue Timeout::Error
+ begin
+ Process.kill("KILL", pid)
+ Process.wait(pid)
+ rescue Errno::ESRCH, Errno::ECHILD
+ # Already gone
+ end
+ rescue Errno::ECHILD
+ # Already reaped
+ end
+ end
+
+ [stdin, stdout, stderr].each do |io|
+ io&.close rescue nil
+ end
+ end
+end
diff --git a/frontend/app/src/lib/api/generated/cloud/data-contracts.ts b/frontend/app/src/lib/api/generated/cloud/data-contracts.ts
index 3027e990b..34b66d3d5 100644
--- a/frontend/app/src/lib/api/generated/cloud/data-contracts.ts
+++ b/frontend/app/src/lib/api/generated/cloud/data-contracts.ts
@@ -821,18 +821,143 @@ export interface AutumnCustomerProductsUpdatedEvent {
export interface AutumnCustomerProductsUpdatedEventData {
customer: AutumnCustomer;
entity: {
- id: string;
+ /** @format int64 */
+ created_at?: number;
customer_id: string;
+ env?: string;
+ features?: AutumnFeaturesMap;
+ id: string;
+ name?: string;
products: AutumnCustomerProduct[];
};
+ scenario?: string;
+ updated_product?: {
+ archived?: boolean;
+ base_variant_id?: string;
+ /** @format int64 */
+ created_at?: number;
+ env?: string;
+ free_trial?: object;
+ group?: string;
+ id: string;
+ is_add_on?: boolean;
+ is_default?: boolean;
+ items?: AutumnProductItem[];
+ name?: string;
+ properties?: {
+ has_trial?: boolean;
+ interval_group?: string;
+ is_free?: boolean;
+ is_one_off?: boolean;
+ updateable?: boolean;
+ };
+ version?: number;
+ };
}
export interface AutumnCustomer {
+ autumn_id?: string;
+ /** @format int64 */
+ created_at?: number;
+ email?: string;
+ env?: string;
+ features?: AutumnFeaturesMap;
+ fingerprint?: string;
id: string;
- metadata: object;
+ metadata: Record;
name: string;
+ products?: AutumnCustomerProduct[];
+ send_email_receipts?: boolean;
+ stripe_id?: string;
}
export interface AutumnCustomerProduct {
+ /** @format int64 */
+ canceled_at?: number;
+ /** @format int64 */
+ current_period_end?: number;
+ /** @format int64 */
+ current_period_start?: number;
+ group?: string;
id: string;
+ is_add_on?: boolean;
+ is_default?: boolean;
+ items?: AutumnProductItem[];
+ name?: string;
+ quantity?: number;
+ /** @format int64 */
+ started_at?: number;
+ status?: string;
+ version?: number;
+}
+
+export type AutumnFeaturesMap = Record;
+
+export interface AutumnFeature {
+ balance?: number;
+ breakdown?: AutumnFeatureBreakdown[];
+ credit_schema?: AutumnFeatureCreditSchemaItem[];
+ id: string;
+ included_usage?: number;
+ interval?: string;
+ interval_count?: number;
+ name: string;
+ /** @format int64 */
+ next_reset_at?: number;
+ overage_allowed?: boolean;
+ type: string;
+ unlimited?: boolean;
+ usage?: number;
+}
+
+export interface AutumnFeatureBreakdown {
+ balance?: number;
+ /** @format int64 */
+ expires_at?: number;
+ included_usage?: number;
+ interval?: string;
+ interval_count?: number;
+ /** @format int64 */
+ next_reset_at?: number;
+ overage_allowed?: boolean;
+ usage?: number;
+}
+
+export interface AutumnFeatureCreditSchemaItem {
+ credit_amount: number;
+ feature_id: string;
+}
+
+export interface AutumnProductItem {
+ billing_units?: number;
+ display?: AutumnProductItemDisplay;
+ entity_feature_id?: string;
+ feature?: {
+ archived?: boolean;
+ credit_schema?: {
+ credit_cost: number;
+ metered_feature_id: string;
+ }[];
+ display?: {
+ plural?: string;
+ singular?: string;
+ };
+ id: string;
+ name: string;
+ type: string;
+ };
+ feature_id?: string;
+ feature_type?: string;
+ included_usage?: number;
+ interval?: string;
+ interval_count?: number;
+ price?: number;
+ reset_usage_when_enabled?: boolean;
+ type: string;
+ usage_model?: string;
+}
+
+export interface AutumnProductItemDisplay {
+ primary_text?: string;
+ secondary_text?: string;
}
diff --git a/frontend/app/src/lib/api/generated/data-contracts.ts b/frontend/app/src/lib/api/generated/data-contracts.ts
index 85cd41d30..4ea60efaa 100644
--- a/frontend/app/src/lib/api/generated/data-contracts.ts
+++ b/frontend/app/src/lib/api/generated/data-contracts.ts
@@ -33,6 +33,7 @@ export enum WorkerRuntimeSDKs {
GOLANG = "GOLANG",
PYTHON = "PYTHON",
TYPESCRIPT = "TYPESCRIPT",
+ RUBY = "RUBY",
}
export enum WorkerType {
@@ -986,7 +987,7 @@ export type V1CreateWebhookRequest =
export interface V1UpdateWebhookRequest {
/** The CEL expression to use for the event key. This is used to create the event key from the webhook payload. */
- eventKeyExpression: string;
+ eventKeyExpression?: string;
/** The CEL expression to use for the scope. This is used to filter the correct workflow to trigger. */
scopeExpression?: string;
/** The static payload to use for the webhook. This is used to send a static payload with the webhook. */
diff --git a/frontend/app/src/pages/main/v1/overview/components/learn-workflow-section.tsx b/frontend/app/src/pages/main/v1/overview/components/learn-workflow-section.tsx
index 4ad3c8993..2b1245874 100644
--- a/frontend/app/src/pages/main/v1/overview/components/learn-workflow-section.tsx
+++ b/frontend/app/src/pages/main/v1/overview/components/learn-workflow-section.tsx
@@ -17,6 +17,7 @@ export const workflowStepOptions = {
profile: { value: 'profile', label: 'Set your profile' },
quickstart: { value: 'quickstart', label: 'Project quickstart' },
runTask: { value: 'runTask', label: 'Run a task' },
+ aiDocs: { value: 'aiDocs', label: 'Install Docs MCP (optional)' },
} as const;
export const workflowLanguageOptions = {
@@ -314,6 +315,46 @@ export function LearnWorkflowSection({
+
+ >
+ ),
+ },
+ {
+ ...workflowStepOptions.aiDocs,
+ content: (
+ <>
+
+ Get Hatchet documentation directly in your AI coding assistant
+ (Cursor, Claude Code, Claude Desktop, and more).
+
+
+
+ See the{' '}
+
+ full setup guide
+ {' '}
+ for manual configuration options.
+
+ );
+}
+
+/** Cursor IDE tab label with official logo. */
+export function CursorTabLabel() {
+ return (
+
+
+ Cursor
+
+ );
+}
+
+/** Claude Code tab label with official Claude logo. */
+export function ClaudeCodeTabLabel() {
+ return (
+
+
+ Claude Code
+
+ );
+}
+
+/** Globe icon – used for the "Other Agents" tab. */
+export function OtherAgentsTabLabel() {
+ return (
+
+
+ Other Agents
+
+ );
+}
+
+/** Returns the MCP endpoint URL based on current origin. */
+function useMcpUrl(): string {
+ const [origin, setOrigin] = useState("https://docs.hatchet.run");
+ useEffect(() => {
+ setOrigin(window.location.origin);
+ }, []);
+ return `${origin}/api/mcp`;
+}
+
+/** Renders the MCP endpoint URL as inline code. */
+export function McpUrl() {
+ const url = useMcpUrl();
+ return {url};
+}
+
+/** Cursor one-click install deeplink button. */
+export function CursorDeeplinkButton() {
+ const url = useMcpUrl();
+ const config = JSON.stringify({
+ command: "npx",
+ args: ["-y", "mcp-remote", url],
+ });
+ const encoded =
+ typeof window !== "undefined"
+ ? btoa(config)
+ : Buffer.from(config).toString("base64");
+ const deeplink = `cursor://anysphere.cursor-deeplink/mcp/install?name=hatchet-docs&config=${encoded}`;
+
+ return (
+
+ );
+}
+
+/** Renders a JSON config code block with the dynamic MCP URL. */
+export function CursorMcpConfig() {
+ const url = useMcpUrl();
+ const config = JSON.stringify(
+ { "hatchet-docs": { command: "npx", args: ["-y", "mcp-remote", url] } },
+ null,
+ 2,
+ );
+ return ;
+}
+
+/** Renders the claude mcp add command with dynamic URL. */
+export function ClaudeCodeCommand() {
+ const url = useMcpUrl();
+ return (
+
+ );
+}
diff --git a/frontend/docs/components/Search.tsx b/frontend/docs/components/Search.tsx
new file mode 100644
index 000000000..abfc09522
--- /dev/null
+++ b/frontend/docs/components/Search.tsx
@@ -0,0 +1,566 @@
+import React, {
+ useCallback,
+ useEffect,
+ useRef,
+ useState,
+ type KeyboardEvent,
+} from "react";
+import { createPortal } from "react-dom";
+import { useRouter } from "next/router";
+import MiniSearch, { type SearchResult } from "minisearch";
+import posthog from "posthog-js";
+import {
+ MINISEARCH_OPTIONS,
+ SEARCH_OPTIONS,
+ rerankResults,
+ expandSynonyms,
+} from "@/lib/search-config";
+
+// ---------------------------------------------------------------------------
+// Lazy singleton for the search index
+// ---------------------------------------------------------------------------
+let indexPromise: Promise | null = null;
+
+function loadIndex(): Promise {
+ if (!indexPromise) {
+ indexPromise = fetch("/llms-search-index.json")
+ .then((res) => {
+ if (!res.ok)
+ throw new Error(`Failed to load search index: ${res.status}`);
+ return res.text();
+ })
+ .then((json) => MiniSearch.loadJSON(json, MINISEARCH_OPTIONS));
+ }
+ return indexPromise;
+}
+
+/** Convert a MiniSearch doc id to a Next.js route. */
+function idToRoute(id: string): string {
+ return "/" + id.replace("hatchet://docs/", "");
+}
+
+/** Extract the page route (without anchor) from a result. */
+function getPageRoute(result: SearchResult): string {
+ return (result.pageRoute as string) || result.id.replace(/#.*$/, "");
+}
+
+/** Get the page title from a result. */
+function getPageTitle(result: SearchResult): string {
+ return (result.pageTitle as string) || (result.title as string) || result.id;
+}
+
+/** Group results by page, maintaining overall order by first appearance. */
+function groupByPage(
+ results: SearchResult[],
+): Array<{ pageRoute: string; pageTitle: string; items: SearchResult[] }> {
+ const groups: Array<{
+ pageRoute: string;
+ pageTitle: string;
+ items: SearchResult[];
+ }> = [];
+ const seen = new Map();
+
+ for (const r of results) {
+ const route = getPageRoute(r);
+ const idx = seen.get(route);
+ if (idx !== undefined) {
+ groups[idx].items.push(r);
+ } else {
+ seen.set(route, groups.length);
+ groups.push({
+ pageRoute: route,
+ pageTitle: getPageTitle(r),
+ items: [r],
+ });
+ }
+ }
+
+ return groups;
+}
+
+// ---------------------------------------------------------------------------
+// Detect Mac for keyboard shortcut display
+// ---------------------------------------------------------------------------
+function useIsMac() {
+ const [isMac, setIsMac] = useState(false);
+ useEffect(() => {
+ setIsMac(/(Mac|iPhone|iPod|iPad)/i.test(navigator.platform));
+ }, []);
+ return isMac;
+}
+
+// ---------------------------------------------------------------------------
+// Highlight matches in text
+// ---------------------------------------------------------------------------
+/** Max number of words used for highlight regex (ReDoS prevention). */
+const HIGHLIGHT_MAX_WORDS = 16;
+/** Max total character length for the combined regex pattern. */
+const HIGHLIGHT_MAX_PATTERN_LEN = 256;
+
+function HighlightMatches({ text, query }: { text: string; query: string }) {
+ if (!query.trim()) return <>{text}>;
+
+ try {
+ // Build regex from individual query words for better highlighting.
+ // Limit the number of words and total pattern length to prevent ReDoS.
+ let words = query
+ .trim()
+ .split(/\s+/)
+ .filter((w) => w.length > 1)
+ .map((w) => w.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"))
+ .slice(0, HIGHLIGHT_MAX_WORDS);
+ if (words.length === 0) return <>{text}>;
+
+ // Trim the word list further if the joined pattern exceeds the length cap.
+ let pattern = words.join("|");
+ while (pattern.length > HIGHLIGHT_MAX_PATTERN_LEN && words.length > 1) {
+ words = words.slice(0, -1);
+ pattern = words.join("|");
+ }
+ if (pattern.length > HIGHLIGHT_MAX_PATTERN_LEN) return <>{text}>;
+
+ const re = new RegExp(`(${pattern})`, "i");
+ const parts = text.split(new RegExp(`(${pattern})`, "ig"));
+ return (
+ <>
+ {parts.map((part, i) =>
+ re.test(part) ? (
+
+ {part}
+
+ ) : (
+ {part}
+ ),
+ )}
+ >
+ );
+ } catch {
+ return <>{text}>;
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Spinner icon (matches Nextra's loading spinner)
+// ---------------------------------------------------------------------------
+function SpinnerIcon() {
+ return (
+
+ );
+}
+
+// ---------------------------------------------------------------------------
+// Search component
+// ---------------------------------------------------------------------------
+export default function Search({ className }: { className?: string }) {
+ const router = useRouter();
+ const isMac = useIsMac();
+ const inputRef = useRef(null);
+ const containerRef = useRef(null);
+ const listRef = useRef(null);
+
+ const [focused, setFocused] = useState(false);
+ const [query, setQuery] = useState("");
+ const [results, setResults] = useState([]);
+ const [isOpen, setIsOpen] = useState(false);
+ const [activeIndex, setActiveIndex] = useState(-1);
+ const [indexReady, setIndexReady] = useState(false);
+ const [loading, setLoading] = useState(false);
+ const [dropdownPos, setDropdownPos] = useState<{
+ top: number;
+ right: number;
+ width: number;
+ } | null>(null);
+
+ // ---------------------------------------------------------------------------
+ // PostHog search-miss tracking
+ // ---------------------------------------------------------------------------
+ // Mutable ref tracks the current search session without triggering re-renders.
+ // We capture events when the dropdown closes (isOpen → false).
+ const searchSessionRef = useRef({
+ query: "",
+ resultCount: 0,
+ clicked: false,
+ });
+ const prevIsOpenRef = useRef(false);
+
+ // Fire PostHog events when the search dropdown closes
+ useEffect(() => {
+ if (prevIsOpenRef.current && !isOpen) {
+ const { query: q, resultCount, clicked } = searchSessionRef.current;
+ const trimmed = q.trim();
+ if (trimmed) {
+ if (resultCount === 0) {
+ posthog.capture("docs_search_no_results", { query: trimmed });
+ } else if (!clicked) {
+ posthog.capture("docs_search_abandoned", {
+ query: trimmed,
+ result_count: resultCount,
+ });
+ }
+ }
+ searchSessionRef.current = { query: "", resultCount: 0, clicked: false };
+ }
+ prevIsOpenRef.current = isOpen;
+ }, [isOpen]);
+
+ // Lazy-load the search index on first interaction (focus / open) rather
+ // than on every page load. The search-query effect below already handles
+ // the case where the index isn't ready yet, so this is purely a preload
+ // optimisation that fires as soon as the user clicks into the search box.
+ const preloadTriggered = useRef(false);
+ const preloadIndex = useCallback(() => {
+ if (!preloadTriggered.current) {
+ preloadTriggered.current = true;
+ loadIndex().then(() => setIndexReady(true));
+ }
+ }, []);
+
+ // Run the search when the query changes
+ useEffect(() => {
+ if (!query.trim()) {
+ setResults([]);
+ return;
+ }
+
+ function runSearch(idx: MiniSearch) {
+ try {
+ const expanded = expandSynonyms(query);
+ const raw = idx.search(expanded, SEARCH_OPTIONS);
+ // Rerank against the original query so title matching is accurate
+ const reranked = rerankResults(raw, query).slice(0, 20);
+ setResults(reranked);
+ searchSessionRef.current.resultCount = reranked.length;
+ } catch {
+ // Gracefully handle invalid queries (e.g. punctuation-only input)
+ setResults([]);
+ searchSessionRef.current.resultCount = 0;
+ }
+ }
+
+ if (!indexReady) {
+ setLoading(true);
+ loadIndex()
+ .then((idx) => {
+ setIndexReady(true);
+ setLoading(false);
+ runSearch(idx);
+ })
+ .catch(() => setLoading(false));
+ return;
+ }
+
+ loadIndex()
+ .then(runSearch)
+ .catch(() => {});
+ }, [query, indexReady]);
+
+ // Global keyboard shortcut: / or Cmd/Ctrl+K
+ useEffect(() => {
+ function onKeyDown(e: globalThis.KeyboardEvent) {
+ if (
+ e.key === "/" &&
+ !e.metaKey &&
+ !e.ctrlKey &&
+ !["INPUT", "TEXTAREA"].includes(
+ (e.target as HTMLElement)?.tagName || "",
+ )
+ ) {
+ e.preventDefault();
+ preloadIndex();
+ inputRef.current?.focus();
+ }
+ if (e.key === "k" && (e.metaKey || e.ctrlKey)) {
+ e.preventDefault();
+ preloadIndex();
+ inputRef.current?.focus();
+ }
+ }
+ window.addEventListener("keydown", onKeyDown);
+ return () => window.removeEventListener("keydown", onKeyDown);
+ }, [preloadIndex]);
+
+ // Close on outside click
+ useEffect(() => {
+ function onClick(e: MouseEvent) {
+ const target = e.target as Node;
+ if (
+ containerRef.current &&
+ !containerRef.current.contains(target) &&
+ listRef.current &&
+ !listRef.current.contains(target)
+ ) {
+ setIsOpen(false);
+ } else if (
+ containerRef.current &&
+ !containerRef.current.contains(target) &&
+ !listRef.current
+ ) {
+ setIsOpen(false);
+ }
+ }
+ document.addEventListener("mousedown", onClick);
+ return () => document.removeEventListener("mousedown", onClick);
+ }, []);
+
+ // Close on route change
+ useEffect(() => {
+ const handleRouteChange = () => {
+ setIsOpen(false);
+ setQuery("");
+ inputRef.current?.blur();
+ };
+ router.events.on("routeChangeComplete", handleRouteChange);
+ return () => router.events.off("routeChangeComplete", handleRouteChange);
+ }, [router]);
+
+ // Scroll active item into view
+ useEffect(() => {
+ if (activeIndex >= 0 && listRef.current) {
+ const item = listRef.current.querySelector(
+ `[data-result-index="${activeIndex}"]`,
+ ) as HTMLElement;
+ item?.scrollIntoView({ block: "nearest" });
+ }
+ }, [activeIndex]);
+
+ const showDropdown = isOpen && query.trim().length > 0;
+ const hasResults = results.length > 0;
+ const grouped = hasResults ? groupByPage(results) : [];
+
+ // Build a flat list of items for keyboard navigation
+ const flatItems: SearchResult[] = grouped.flatMap((g) => g.items);
+
+ // Compute dropdown position based on input bounding rect
+ useEffect(() => {
+ if (!showDropdown || !containerRef.current) {
+ setDropdownPos(null);
+ return;
+ }
+ const updatePos = () => {
+ const rect = containerRef.current?.getBoundingClientRect();
+ if (rect) {
+ setDropdownPos({
+ top: rect.bottom + 8,
+ right: window.innerWidth - rect.right,
+ width: Math.max(rect.width, 576),
+ });
+ }
+ };
+ updatePos();
+ window.addEventListener("scroll", updatePos, true);
+ window.addEventListener("resize", updatePos);
+ return () => {
+ window.removeEventListener("scroll", updatePos, true);
+ window.removeEventListener("resize", updatePos);
+ };
+ }, [showDropdown]);
+
+ const navigate = useCallback(
+ (id: string) => {
+ const route = idToRoute(id);
+ searchSessionRef.current.clicked = true;
+ posthog.capture("docs_search_result_clicked", {
+ query: searchSessionRef.current.query.trim(),
+ result_id: id,
+ result_route: route,
+ });
+ setIsOpen(false);
+ setQuery("");
+ router.push(route);
+ },
+ [router],
+ );
+
+ const onKeyDown = useCallback(
+ (e: KeyboardEvent) => {
+ if (e.key === "ArrowDown") {
+ e.preventDefault();
+ setActiveIndex((i) => Math.min(i + 1, flatItems.length - 1));
+ } else if (e.key === "ArrowUp") {
+ e.preventDefault();
+ setActiveIndex((i) => Math.max(i - 1, 0));
+ } else if (e.key === "Enter") {
+ e.preventDefault();
+ const idx = activeIndex >= 0 ? activeIndex : 0;
+ if (flatItems[idx]) navigate(flatItems[idx].id);
+ } else if (e.key === "Escape") {
+ setIsOpen(false);
+ inputRef.current?.blur();
+ }
+ },
+ [flatItems, activeIndex, navigate],
+ );
+
+ return (
+
+ {/* Search input */}
+
{
+ const val = e.target.value;
+ setQuery(val);
+ searchSessionRef.current.query = val;
+ setIsOpen(true);
+ setActiveIndex(-1);
+ }}
+ onFocus={() => {
+ preloadIndex();
+ setFocused(true);
+ if (query.trim()) setIsOpen(true);
+ }}
+ onBlur={() => setFocused(false)}
+ onKeyDown={onKeyDown}
+ className={[
+ "_rounded-lg _px-3 _py-2 _transition-colors",
+ "_w-full md:_w-64",
+ "_text-base _leading-tight md:_text-sm",
+ focused
+ ? "_bg-transparent nextra-focusable"
+ : "_bg-black/[.05] dark:_bg-gray-50/10",
+ "placeholder:_text-gray-500 dark:placeholder:_text-gray-400",
+ "contrast-more:_border contrast-more:_border-current",
+ "[&::-webkit-search-cancel-button]:_appearance-none",
+ ].join(" ")}
+ />
+
+ {/* Keyboard shortcut indicator */}
+ {!focused && !query && (
+
+ {isMac ? (
+ <>
+ ⌘ K
+ >
+ ) : (
+ "CTRL K"
+ )}
+
+ )}
+
+ {/* Results dropdown (portaled to body to escape overflow:hidden ancestors) */}
+ {showDropdown &&
+ dropdownPos &&
+ typeof document !== "undefined" &&
+ createPortal(
+
+ {loading && (
+ -
+
+ Loading…
+
+ )}
+
+ {!loading && !hasResults && (
+ -
+ No results for “{query}”
+
+ )}
+
+ {(() => {
+ let flatIdx = 0;
+ return grouped.map((group) => (
+ -
+ {/* Page title header */}
+
+
+
+ {/* Section items */}
+
+
+ ));
+ })()}
+
,
+ document.body,
+ )}
+
+ );
+}
diff --git a/frontend/docs/components/UniversalTabs.tsx b/frontend/docs/components/UniversalTabs.tsx
index fa9729293..f352def20 100644
--- a/frontend/docs/components/UniversalTabs.tsx
+++ b/frontend/docs/components/UniversalTabs.tsx
@@ -1,7 +1,85 @@
import React from "react";
-import { Tabs } from "nextra/components";
+import { Callout, Tabs } from "nextra/components";
import { useLanguage } from "../context/LanguageContext";
+/* ── Logo map ──────────────────────────────────────────────── */
+
+const LOGO_MAP: Record = {
+ Python: "/python-logo.svg",
+ "Python-Sync": "/python-logo.svg",
+ "Python-Async": "/python-logo.svg",
+ Typescript: "/typescript-logo.svg",
+ TypeScript: "/typescript-logo.svg",
+ Go: "/go-logo.svg",
+ Ruby: "/ruby-logo.svg",
+};
+
+const tabLabelStyle: React.CSSProperties = {
+ display: "inline-flex",
+ alignItems: "center",
+ gap: "6px",
+};
+
+/** Renders an SVG as a CSS mask filled with currentColor (works in light + dark mode). */
+function ThemedIcon({ src }: { src: string }) {
+ return (
+
+ );
+}
+
+/** Returns a logo-enhanced label if a logo exists, otherwise the plain string. */
+function toTabLabel(name: string): string | React.ReactElement {
+ const logo = LOGO_MAP[name];
+ if (!logo) return name;
+ return (
+
+
+ {name}
+
+ );
+}
+
+/* ── Early access ─────────────────────────────────────────── */
+
+const EARLY_ACCESS_SDKS = ["Ruby"];
+
+const EarlyAccessCallout: React.FC<{ language: string }> = ({ language }) => (
+
+
+ The {language} SDK is in early access, and may change. We'd love your{" "}
+
+ feedback
+
+ !
+
+
+);
+
+/* ── Component ─────────────────────────────────────────────── */
+
interface UniversalTabsProps {
items: string[];
children: React.ReactNode;
@@ -31,15 +109,42 @@ export const UniversalTabs: React.FC = ({
}
};
+ const tabLabels = items.map(toTabLabel);
+
+ // Inject early access callout into SDK tabs that are in early access
+ const processedChildren =
+ optionKey === "language"
+ ? React.Children.map(children, (child) => {
+ if (
+ React.isValidElement<{
+ title?: string;
+ children?: React.ReactNode;
+ }>(child) &&
+ child.props.title &&
+ EARLY_ACCESS_SDKS.includes(child.props.title)
+ ) {
+ return React.cloneElement(child, {
+ children: (
+ <>
+
+ {child.props.children}
+ >
+ ),
+ });
+ }
+ return child;
+ })
+ : children;
+
return (
- {children}
+ {processedChildren}
);
};
diff --git a/frontend/docs/components/code/CodeTabs.tsx b/frontend/docs/components/code/CodeTabs.tsx
index a0c177c41..8b59e9f83 100644
--- a/frontend/docs/components/code/CodeTabs.tsx
+++ b/frontend/docs/components/code/CodeTabs.tsx
@@ -1,7 +1,7 @@
import React from "react";
import UniversalTabs from "../UniversalTabs";
-const languages = ["Python", "Typescript", "Go"];
+const languages = ["Python", "Typescript", "Go", "Ruby"];
type CodeSource = {
path?: string;
diff --git a/frontend/docs/components/code/Snippet.tsx b/frontend/docs/components/code/Snippet.tsx
index 1a7ac0545..b65da80c6 100644
--- a/frontend/docs/components/code/Snippet.tsx
+++ b/frontend/docs/components/code/Snippet.tsx
@@ -14,6 +14,8 @@ const languageToHighlightAbbreviation = (language: Language) => {
return "ts";
case "go":
return "go";
+ case "ruby":
+ return "rb";
default:
const exhaustiveCheck: never = language;
throw new Error(`Unsupported language: ${exhaustiveCheck}`);
diff --git a/frontend/docs/lib/search-config.ts b/frontend/docs/lib/search-config.ts
new file mode 100644
index 000000000..b7b256dab
--- /dev/null
+++ b/frontend/docs/lib/search-config.ts
@@ -0,0 +1,398 @@
+/**
+ * Shared MiniSearch configuration used at:
+ * 1. Index generation time (scripts/generate-llms.ts)
+ * 2. MCP server query time (pages/api/mcp.ts)
+ * 3. Browser search UI (components/Search.tsx)
+ *
+ * IMPORTANT: Any change here requires regenerating the index
+ * with `pnpm run generate-llms`.
+ */
+
+const STOP_WORDS = new Set([
+ // Articles & determiners
+ "a",
+ "an",
+ "the",
+ "this",
+ "that",
+ "these",
+ "those",
+ // Pronouns
+ "i",
+ "me",
+ "my",
+ "we",
+ "our",
+ "you",
+ "your",
+ "he",
+ "she",
+ "it",
+ "its",
+ "they",
+ "them",
+ "their",
+ // Prepositions
+ "in",
+ "on",
+ "at",
+ "to",
+ "of",
+ "for",
+ "from",
+ "by",
+ "with",
+ "about",
+ "into",
+ "between",
+ "through",
+ "during",
+ "before",
+ "after",
+ "above",
+ "below",
+ "up",
+ "down",
+ "out",
+ "off",
+ "over",
+ "under",
+ // Conjunctions
+ "and",
+ "but",
+ "or",
+ "nor",
+ "so",
+ "yet",
+ // Verbs (common/auxiliary)
+ "is",
+ "am",
+ "are",
+ "was",
+ "were",
+ "be",
+ "been",
+ "being",
+ "have",
+ "has",
+ "had",
+ "do",
+ "does",
+ "did",
+ "will",
+ "would",
+ "shall",
+ "should",
+ "may",
+ "might",
+ "must",
+ "can",
+ "could",
+ // Question words (common in NL queries)
+ "how",
+ "what",
+ "when",
+ "where",
+ "which",
+ "who",
+ "whom",
+ "why",
+ // Other common words
+ "not",
+ "no",
+ "all",
+ "each",
+ "every",
+ "both",
+ "few",
+ "more",
+ "most",
+ "other",
+ "some",
+ "such",
+ "than",
+ "too",
+ "very",
+ "just",
+ "also",
+ "here",
+ "there",
+ "then",
+ "now",
+]);
+
+/**
+ * MiniSearch processTerm — lowercases, filters stop words, and drops
+ * empty tokens produced by the default tokenizer for trailing punctuation
+ * (e.g. `hatchet.task(` → tokens: ["hatchet", "task", ""]).
+ *
+ * Must be identical at index time and query time.
+ */
+function processTerm(term: string): string | null {
+ if (term.length === 0) return null;
+ const lower = term.toLowerCase();
+ if (STOP_WORDS.has(lower)) return null;
+ return lower;
+}
+
+/**
+ * MiniSearch options — must be passed identically to `new MiniSearch()`
+ * and `MiniSearch.loadJSON()`.
+ *
+ * The `codeIdentifiers` field contains compound code tokens extracted from
+ * fenced code blocks (e.g. "hatchet.task", "ctx.spawn"). These are indexed
+ * as single tokens so that code-pattern queries match precisely.
+ */
+export const MINISEARCH_OPTIONS = {
+ fields: ["title", "content", "codeIdentifiers"] as string[],
+ storeFields: ["title", "pageTitle", "pageRoute"] as string[],
+ processTerm,
+};
+
+/**
+ * Default search options for querying the index.
+ */
+export const SEARCH_OPTIONS = {
+ boost: { title: 2, codeIdentifiers: 3 },
+ prefix: true,
+ fuzzy: 0.2,
+ combineWith: "OR" as const,
+};
+
+// ---------------------------------------------------------------------------
+// Synonym / alias expansion
+// ---------------------------------------------------------------------------
+
+/**
+ * Maps common synonyms, abbreviations, and alternate phrasings to terms
+ * that actually appear in the documentation. This lets users find pages
+ * even when they use different vocabulary than the docs.
+ *
+ * Keys are lowercased. Values are additional terms to append to the query.
+ * The original query terms are always kept.
+ */
+const SYNONYMS: Record = {
+ // Scheduling & timing
+ delay: "schedule sleep durable",
+ pause: "sleep durable",
+ debounce: "concurrency",
+ dedup: "concurrency deduplicate idempotent",
+ deduplicate: "concurrency idempotent",
+ idempotent: "concurrency retry additional-metadata",
+ recurring: "cron",
+ periodic: "cron scheduled",
+ interval: "cron scheduled",
+ timer: "cron scheduled sleep",
+
+ // Execution patterns
+ "background job": "task run worker",
+ "background task": "task run worker",
+ enqueue: "run-no-wait",
+ dispatch: "run trigger",
+ invoke: "run trigger",
+ trigger: "run event cron scheduled",
+ "fan out": "child spawn bulk",
+ fanout: "child spawn bulk",
+ parallel: "child spawn run-with-results",
+ scatter: "child spawn",
+ gather: "child run-with-results",
+ batch: "bulk run",
+ "fire and forget": "run-no-wait",
+ "long running": "durable execution",
+ async: "asyncio",
+ await: "asyncio run-with-results",
+
+ // Error handling & reliability
+ "error handling": "retry on-failure",
+ "error recovery": "retry on-failure durable",
+ fallback: "on-failure",
+ "try catch": "retry on-failure",
+ exception: "retry on-failure",
+ resilience: "retry durable on-failure",
+ reliable: "durable retry guarantees",
+
+ // Observability & debugging
+ monitor: "prometheus opentelemetry logging metrics",
+ monitoring: "prometheus opentelemetry logging metrics",
+ tracing: "opentelemetry",
+ traces: "opentelemetry sampling",
+ observability: "opentelemetry prometheus logging metrics",
+ metrics: "prometheus opentelemetry",
+ debug: "troubleshooting logging",
+ troubleshoot: "troubleshooting workers",
+ "not working": "troubleshooting workers",
+
+ // Infrastructure & deployment
+ deploy: "docker kubernetes compute",
+ install: "setup quickstart",
+ "getting started": "quickstart setup",
+ "env var": "environment variable configuration",
+ "env vars": "environment variables configuration",
+ "environment variable": "configuration compute",
+ scale: "autoscaling workers",
+ autoscale: "autoscaling workers",
+ "high availability": "ha helm",
+ postgres: "database configuration external",
+ database: "postgres configuration external",
+ performance: "improving benchmarking",
+ benchmark: "benchmarking performance",
+ upgrade: "migration guide",
+ downgrade: "downgrading versions",
+ migrate: "migration guide",
+
+ // Concurrency & flow control
+ throttle: "rate limit concurrency",
+ "rate limiting": "rate limits",
+ limit: "rate concurrency",
+ lock: "concurrency",
+ semaphore: "concurrency",
+ mutex: "concurrency",
+
+ // Workflow patterns
+ step: "dag task workflow",
+ pipeline: "dag workflow orchestration",
+ graph: "dag workflow",
+ "if else": "conditional workflows",
+ branch: "conditional workflows",
+ condition: "conditional workflows",
+ orchestrate: "orchestration dag workflow",
+
+ // Worker & execution concepts
+ queue: "task concurrency worker",
+ "job queue": "task worker",
+ "task queue": "task worker concurrency",
+ slot: "manual release concurrency worker",
+ "health check": "healthcheck worker",
+ liveness: "healthcheck worker",
+ readiness: "healthcheck worker",
+ sticky: "sticky assignment worker affinity",
+ affinity: "worker affinity sticky",
+
+ // Communication & events
+ signal: "event durable",
+ callback: "event webhook",
+ hook: "webhook event",
+ "wait for event": "durable events",
+ subscribe: "event trigger",
+ publish: "event trigger",
+ "api call": "inter-service triggering",
+
+ // SDK specific
+ decorator: "hatchet.task python",
+ middleware: "lifecycle dependency-injection",
+ context: "ctx spawn",
+ dataclass: "dataclasses pydantic python",
+ lifespan: "lifespans lifecycle worker",
+ cleanup: "lifespans lifecycle",
+ teardown: "lifespans lifecycle",
+ startup: "lifespans lifecycle worker",
+ "type safe": "pydantic dataclasses",
+
+ // CLI & tools
+ terminal: "cli tui",
+ "command line": "cli",
+ dashboard: "tui",
+};
+
+/**
+ * Expand a search query by appending synonym terms.
+ *
+ * For each word (or consecutive word pair) in the query that matches
+ * a synonym key, the mapped terms are appended. The original query
+ * is always preserved so exact matches still work.
+ */
+export function expandSynonyms(query: string): string {
+ const lower = query.toLowerCase().trim();
+ const words = lower.split(/\s+/).filter((w) => w.length > 0);
+ const extra: string[] = [];
+
+ // Check full query first
+ if (SYNONYMS[lower]) {
+ extra.push(SYNONYMS[lower]);
+ }
+
+ // Check bigrams (consecutive word pairs)
+ for (let i = 0; i < words.length - 1; i++) {
+ const bigram = words[i] + " " + words[i + 1];
+ if (SYNONYMS[bigram]) {
+ extra.push(SYNONYMS[bigram]);
+ }
+ }
+
+ // Check individual words
+ for (const word of words) {
+ if (SYNONYMS[word]) {
+ extra.push(SYNONYMS[word]);
+ }
+ }
+
+ if (extra.length === 0) return query;
+ return query + " " + extra.join(" ");
+}
+
+/**
+ * Tokenize a query string similarly to MiniSearch — split on common
+ * punctuation and whitespace, then lowercase and filter stop words.
+ * This mirrors the default tokenizer's behavior for reranking purposes.
+ */
+function tokenizeQuery(text: string): string[] {
+ return text
+ .split(/[\s\-_.,:;!?'"()[\]{}<>@#$%^&*+=|/\\~`]+/)
+ .map((t) => t.toLowerCase())
+ .filter((t) => t.length > 0 && !STOP_WORDS.has(t));
+}
+
+/**
+ * Post-search reranking: boost results whose title or route closely matches
+ * the query.
+ *
+ * BM25 scores terms independently, so a title that is a near-exact match
+ * for the full query (e.g. "Durable Execution" for "durable execut") may
+ * rank below documents that score well on individual terms. This fixes that.
+ */
+export function rerankResults(
+ results: T[],
+ query: string,
+): T[] {
+ const queryLower = query.toLowerCase().trim();
+ const queryTerms = tokenizeQuery(queryLower);
+
+ if (queryTerms.length === 0) return results;
+
+ return results
+ .map((r) => {
+ const title = ((r.title as string) || "").toLowerCase();
+ // Strip the hatchet://docs/ prefix so it doesn't pollute route matching
+ const rawRoute = ((r.pageRoute as string) || r.id || "").toLowerCase();
+ const route = rawRoute.replace("hatchet://docs/", "");
+ let boost = 1;
+
+ // Big boost if the full query (cleaned) appears in the title
+ const queryClean = queryTerms.join(" ");
+ if (title.includes(queryClean)) {
+ boost *= 5;
+ }
+
+ // Boost for each query term found in the title
+ let titleTermHits = 0;
+ for (const term of queryTerms) {
+ if (title.includes(term)) titleTermHits++;
+ }
+ if (queryTerms.length > 0) {
+ boost *= 1 + (titleTermHits / queryTerms.length) * 2;
+ }
+
+ // Smaller boost for query terms found in the page route / slug
+ // (e.g. "task" matches "your-first-task" in the URL)
+ let routeTermHits = 0;
+ for (const term of queryTerms) {
+ if (route.includes(term)) routeTermHits++;
+ }
+ if (queryTerms.length > 0) {
+ boost *= 1 + (routeTermHits / queryTerms.length) * 0.5;
+ }
+
+ return { ...r, score: r.score * boost };
+ })
+ .sort((a, b) => b.score - a.score);
+}
diff --git a/frontend/docs/lib/snippet.ts b/frontend/docs/lib/snippet.ts
index 8919eda48..9885991a9 100644
--- a/frontend/docs/lib/snippet.ts
+++ b/frontend/docs/lib/snippet.ts
@@ -3,5 +3,5 @@ export type Snippet = {
content: string;
githubUrl: string;
codePath: string;
- language: 'python' | 'typescript' | 'go'
+ language: 'python' | 'typescript' | 'go' | 'ruby'
};
diff --git a/frontend/docs/next.config.mjs b/frontend/docs/next.config.mjs
index 3d25b10db..559e6e611 100644
--- a/frontend/docs/next.config.mjs
+++ b/frontend/docs/next.config.mjs
@@ -31,7 +31,7 @@ const nextConfig = {
permanent: true,
},
{
- source: '/:path((?!api|home|cli|v1|v0|compute|sdk|contributing|self-hosting|launches|blog|favicon\\.ico|.*\\.png|.*\\.gif|_next/.*|monitoring\-demo\.mp4).*)',
+ source: '/:path((?!api|home|cli|v1|v0|compute|sdk|contributing|self-hosting|launches|blog|llms|favicon\\.ico|.*\\.png|.*\\.gif|.*\\.svg|_next/.*|monitoring\-demo\.mp4).*)',
destination: '/home/:path*',
permanent: false,
},
diff --git a/frontend/docs/package.json b/frontend/docs/package.json
index 93ff32de9..c5b34f6ec 100644
--- a/frontend/docs/package.json
+++ b/frontend/docs/package.json
@@ -4,15 +4,17 @@
"description": "Nextra docs template",
"packageManager": "pnpm@10.16.1",
"scripts": {
- "taskfile-dev": "pnpm run generate-examples && next dev",
- "dev": "next dev",
- "build": "pnpm run generate-examples && next build",
+ "taskfile-dev": "pnpm run generate-examples && pnpm run generate-llms && pnpm run test-search -- --warn && next dev",
+ "dev": "pnpm run generate-llms && pnpm run test-search -- --warn && next dev",
+ "build": "pnpm run generate-examples && pnpm run generate-llms && pnpm run test-search && next build",
"start": "next start",
"lint:check": "npm run prettier:check",
"lint:fix": "npm run prettier:fix",
"prettier:check": "prettier \"(pages|components)/**/*.{tsx,mdx,js,ts}\" --list-different",
"prettier:fix": "prettier \"(pages|components)/**/*.{tsx,mdx,js,ts}\" --write",
- "generate-examples": "cd ../snippets/ && python3 generate.py"
+ "generate-llms": "tsx scripts/generate-llms.ts",
+ "test-search": "tsx scripts/test-search-quality.ts",
+ "generate-examples": "cd ../snippets/ && python3 generate.py "
},
"repository": {
"type": "git",
@@ -36,11 +38,13 @@
"js-yaml": "^4.1.1",
"loops": "^5.0.1",
"lucide-react": "^0.459.0",
+ "minisearch": "^7.2.0",
"next": "^14.2.35",
"nextra": "^3.3.1",
"nextra-theme-docs": "^3.3.1",
"postcss": "^8.5.6",
"posthog-js": "^1.272.1",
+ "posthog-node": "^5.24.15",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"react-lottie-player": "^2.1.0",
@@ -50,7 +54,8 @@
"swagger-ui-react": "^5.29.3",
"tailwind-merge": "^2.6.0",
"tailwindcss": "^3.4.18",
- "tailwindcss-animate": "^1.0.7"
+ "tailwindcss-animate": "^1.0.7",
+ "zod": "^4.3.6"
},
"devDependencies": {
"@types/node": "18.11.10",
@@ -58,6 +63,7 @@
"@types/react-dom": "^18.3.7",
"postcss-import": "^16.1.1",
"prettier": "^3.6.2",
+ "tsx": "^4.21.0",
"typescript": "^5.9.3"
},
"resolutions": {
diff --git a/frontend/docs/pages/_meta.js b/frontend/docs/pages/_meta.js
index 70c9c9e89..e27dad1a1 100644
--- a/frontend/docs/pages/_meta.js
+++ b/frontend/docs/pages/_meta.js
@@ -1,6 +1,6 @@
export default {
home: {
- title: "User Guide",
+ title: "Guide",
type: "page",
theme: {
toc: false,
diff --git a/frontend/docs/pages/api/mcp.ts b/frontend/docs/pages/api/mcp.ts
new file mode 100644
index 000000000..4d2a5c978
--- /dev/null
+++ b/frontend/docs/pages/api/mcp.ts
@@ -0,0 +1,644 @@
+/**
+ * MCP (Model Context Protocol) server for Hatchet documentation.
+ *
+ * Implements the Streamable HTTP transport (stateless mode) so that
+ * AI editors like Cursor, Claude Code, and Claude Desktop can query
+ * Hatchet docs as MCP resources.
+ *
+ * Endpoint: POST /api/mcp (JSON-RPC 2.0)
+ * GET /api/mcp (returns server metadata)
+ */
+import type { NextApiRequest, NextApiResponse } from "next";
+import fs from "node:fs";
+import path from "node:path";
+import { PostHog } from "posthog-node";
+
+// ---------------------------------------------------------------------------
+// Types
+// ---------------------------------------------------------------------------
+interface JsonRpcRequest {
+ jsonrpc: "2.0";
+ id?: string | number | null;
+ method: string;
+ params?: Record;
+}
+
+interface JsonRpcResponse {
+ jsonrpc: "2.0";
+ id: string | number | null;
+ result?: unknown;
+ error?: { code: number; message: string; data?: unknown };
+}
+
+interface McpResource {
+ uri: string;
+ name: string;
+ description: string;
+ mimeType: string;
+}
+
+interface DocEntry {
+ uri: string;
+ name: string;
+ description: string;
+ filePath: string;
+}
+
+// ---------------------------------------------------------------------------
+// Constants
+// ---------------------------------------------------------------------------
+const PROTOCOL_VERSION = "2024-11-05";
+const SERVER_NAME = "hatchet-docs";
+const SERVER_VERSION = "1.0.0";
+
+const LLMS_DIR = path.join(process.cwd(), "public", "llms");
+const LLMS_TXT_PATH = path.join(process.cwd(), "public", "llms.txt");
+const SEARCH_INDEX_PATH = path.join(
+ process.cwd(),
+ "public",
+ "llms-search-index.json",
+);
+
+// ---------------------------------------------------------------------------
+// PostHog server-side analytics
+// ---------------------------------------------------------------------------
+let posthogClient: PostHog | null = null;
+
+function getPostHog(): PostHog | null {
+ if (posthogClient) return posthogClient;
+ const key = process.env.NEXT_PUBLIC_POSTHOG_KEY;
+ if (!key) return null;
+ posthogClient = new PostHog(key, {
+ host: process.env.NEXT_PUBLIC_POSTHOG_HOST || "https://us.i.posthog.com",
+ flushAt: 10,
+ flushInterval: 5000,
+ });
+ return posthogClient;
+}
+
+function trackMcpEvent(
+ req: NextApiRequest,
+ method: string,
+ properties?: Record,
+): void {
+ const ph = getPostHog();
+ if (!ph) return;
+ // Use a session ID from the MCP client if available, otherwise anonymous
+ const sessionId = (req.headers["mcp-session-id"] as string) || "anonymous";
+ ph.capture({
+ distinctId: `mcp:${sessionId}`,
+ event: "mcp_request",
+ properties: {
+ method,
+ user_agent: req.headers["user-agent"] || "",
+ ...properties,
+ },
+ });
+}
+
+// ---------------------------------------------------------------------------
+// MiniSearch index (pre-generated by scripts/generate-llms.ts)
+// ---------------------------------------------------------------------------
+import MiniSearch from "minisearch";
+import {
+ MINISEARCH_OPTIONS,
+ SEARCH_OPTIONS,
+ rerankResults,
+ expandSynonyms,
+} from "@/lib/search-config";
+
+interface SearchDoc {
+ id: string;
+ title: string;
+ content: string;
+}
+
+let cachedMiniSearch: MiniSearch | null = null;
+
+function getSearchIndex(): MiniSearch | null {
+ if (cachedMiniSearch) return cachedMiniSearch;
+ try {
+ const raw = fs.readFileSync(SEARCH_INDEX_PATH, "utf-8");
+ cachedMiniSearch = MiniSearch.loadJSON(raw, MINISEARCH_OPTIONS);
+ return cachedMiniSearch;
+ } catch {
+ return null;
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Build the resource catalogue from public/llms/
+// ---------------------------------------------------------------------------
+let cachedDocs: DocEntry[] | null = null;
+
+function collectDocs(): DocEntry[] {
+ if (cachedDocs) return cachedDocs;
+
+ const entries: DocEntry[] = [];
+
+ // Parse llms.txt to get titles and URLs
+ const titleMap = new Map();
+ if (fs.existsSync(LLMS_TXT_PATH)) {
+ const llmsTxt = fs.readFileSync(LLMS_TXT_PATH, "utf-8");
+ const linkPattern =
+ /- \[([^\]]+)\]\(https:\/\/docs\.hatchet\.run\/([^)]+)\)/g;
+ let m: RegExpExecArray | null;
+ while ((m = linkPattern.exec(llmsTxt)) !== null) {
+ titleMap.set(m[2], m[1]);
+ }
+ }
+
+ function walk(dir: string, prefix: string): void {
+ if (!fs.existsSync(dir)) return;
+ const items = fs.readdirSync(dir, { withFileTypes: true });
+ for (const item of items) {
+ if (item.isDirectory()) {
+ walk(
+ path.join(dir, item.name),
+ prefix ? `${prefix}/${item.name}` : item.name,
+ );
+ } else if (item.name.endsWith(".md")) {
+ const slug = item.name.replace(/\.md$/, "");
+ const docPath = prefix ? `${prefix}/${slug}` : slug;
+
+ // Skip duplicates (e.g. home.md vs home/index.md)
+ if (
+ slug === "index" &&
+ entries.some((e) => e.uri === `hatchet://docs/${prefix}`)
+ ) {
+ continue;
+ }
+ const lookupKey = slug === "index" ? `${prefix}/index` : docPath;
+ const title = titleMap.get(lookupKey) || titleMap.get(docPath) || slug;
+
+ const uri = `hatchet://docs/${docPath}`;
+ if (entries.some((e) => e.uri === uri)) continue;
+
+ entries.push({
+ uri,
+ name: title,
+ description: `Hatchet documentation: ${title}`,
+ filePath: path.join(dir, item.name),
+ });
+ }
+ }
+ }
+
+ walk(LLMS_DIR, "");
+ cachedDocs = entries;
+ return entries;
+}
+
+// ---------------------------------------------------------------------------
+// MCP method handlers
+// ---------------------------------------------------------------------------
+function handleInitialize(id: string | number | null): JsonRpcResponse {
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: {
+ protocolVersion: PROTOCOL_VERSION,
+ capabilities: {
+ resources: { listChanged: false },
+ tools: {},
+ },
+ serverInfo: {
+ name: SERVER_NAME,
+ version: SERVER_VERSION,
+ },
+ },
+ };
+}
+
+function handleResourcesList(id: string | number | null): JsonRpcResponse {
+ const docs = collectDocs();
+ const resources: McpResource[] = docs.map((d) => ({
+ uri: d.uri,
+ name: d.name,
+ description: d.description,
+ mimeType: "text/markdown",
+ }));
+
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: { resources },
+ };
+}
+
+function handleResourcesRead(
+ id: string | number | null,
+ params: Record,
+): JsonRpcResponse {
+ const uri = params.uri as string | undefined;
+ if (!uri) {
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32602, message: "Missing required parameter: uri" },
+ };
+ }
+
+ const docs = collectDocs();
+ const doc = docs.find((d) => d.uri === uri);
+ if (!doc) {
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32602, message: `Resource not found: ${uri}` },
+ };
+ }
+
+ let content = "";
+ try {
+ content = fs.readFileSync(doc.filePath, "utf-8");
+ } catch {
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32603, message: `Failed to read resource: ${uri}` },
+ };
+ }
+
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: {
+ contents: [
+ {
+ uri: doc.uri,
+ mimeType: "text/markdown",
+ text: content,
+ },
+ ],
+ },
+ };
+}
+
+function handleToolsList(id: string | number | null): JsonRpcResponse {
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: {
+ tools: [
+ {
+ name: "search_docs",
+ description:
+ "Search Hatchet documentation by keyword. Returns matching page titles and URIs.",
+ inputSchema: {
+ type: "object",
+ properties: {
+ query: {
+ type: "string",
+ description:
+ "Search query (keywords to match against page titles and content)",
+ },
+ max_results: {
+ type: "number",
+ description:
+ "Maximum number of results to return (default: 10)",
+ },
+ },
+ required: ["query"],
+ },
+ },
+ {
+ name: "get_full_docs",
+ description:
+ "Get the complete Hatchet documentation as a single document. Useful for comprehensive context.",
+ inputSchema: {
+ type: "object",
+ properties: {},
+ },
+ },
+ ],
+ },
+ };
+}
+
+function handleToolsCall(
+ id: string | number | null,
+ params: Record,
+): JsonRpcResponse {
+ const toolName = params.name as string | undefined;
+ const args = (params.arguments || {}) as Record;
+
+ if (toolName === "search_docs") {
+ return handleSearchDocs(id, args);
+ }
+
+ if (toolName === "get_full_docs") {
+ return handleGetFullDocs(id);
+ }
+
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32602, message: `Unknown tool: ${toolName}` },
+ };
+}
+
+function handleSearchDocs(
+ id: string | number | null,
+ args: Record,
+): JsonRpcResponse {
+ const query = ((args.query as string) || "").trim();
+ const maxResults = (args.max_results as number) || 10;
+
+ if (!query) {
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32602, message: "Missing required argument: query" },
+ };
+ }
+
+ const miniSearch = getSearchIndex();
+ const docCatalogue = collectDocs();
+
+ // Build a lookup from URI to DocEntry for snippet extraction
+ const docByUri = new Map();
+ for (const d of docCatalogue) {
+ docByUri.set(d.uri, d);
+ }
+
+ let results: Array<{ uri: string; title: string; score: number }> = [];
+
+ if (miniSearch) {
+ // Expand synonyms so alternate phrasings (e.g. "delay" → "schedule sleep")
+ // still surface the right pages, then rerank against the original query.
+ const expanded = expandSynonyms(query);
+ const searchResults = miniSearch.search(expanded, SEARCH_OPTIONS);
+ const reranked = rerankResults(searchResults, query);
+
+ const seenPages = new Set();
+ const deduped: typeof reranked = [];
+ for (const r of reranked) {
+ const pageUri = r.id.replace(/#.*$/, "");
+ if (!seenPages.has(pageUri)) {
+ seenPages.add(pageUri);
+ deduped.push(r);
+ }
+ }
+
+ results = deduped.slice(0, maxResults).map((r) => ({
+ uri: r.id.replace(/#.*$/, ""),
+ title: (r.pageTitle as string) || (r.title as string) || r.id,
+ score: r.score,
+ }));
+ } else {
+ // Fallback: simple keyword matching if index unavailable
+ const keywords = query.toLowerCase().split(/\s+/);
+ const scored: Array<{ uri: string; title: string; score: number }> = [];
+ for (const doc of docCatalogue) {
+ let score = 0;
+ const nameLower = doc.name.toLowerCase();
+ const uriLower = doc.uri.toLowerCase();
+ for (const kw of keywords) {
+ if (nameLower.includes(kw)) score += 10;
+ if (uriLower.includes(kw)) score += 5;
+ }
+ if (score > 0) {
+ scored.push({ uri: doc.uri, title: doc.name, score });
+ }
+ }
+ scored.sort((a, b) => b.score - a.score);
+ results = scored.slice(0, maxResults);
+ }
+
+ // Extract snippets from the matching docs
+ const formatted = results.map((r, i) => {
+ let snippet = "";
+ const doc = docByUri.get(r.uri);
+ if (doc) {
+ try {
+ const content = fs.readFileSync(doc.filePath, "utf-8");
+ const contentLower = content.toLowerCase();
+ const keywords = query.toLowerCase().split(/\s+/);
+ const firstKw = keywords.find((kw) => contentLower.includes(kw));
+ if (firstKw) {
+ const idx = contentLower.indexOf(firstKw);
+ const start = Math.max(0, idx - 80);
+ const end = Math.min(content.length, idx + firstKw.length + 80);
+ snippet =
+ (start > 0 ? "..." : "") +
+ content.slice(start, end).trim() +
+ (end < content.length ? "..." : "");
+ }
+ } catch {
+ // skip snippet on read error
+ }
+ }
+ return `${i + 1}. **${r.title}** (${r.uri})\n ${snippet}`;
+ });
+
+ const text =
+ results.length === 0
+ ? `No results found for "${query}".`
+ : formatted.join("\n\n");
+
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: {
+ content: [{ type: "text", text }],
+ },
+ };
+}
+
+function handleGetFullDocs(id: string | number | null): JsonRpcResponse {
+ const fullDocsPath = path.join(process.cwd(), "public", "llms-full.txt");
+ let content = "";
+ try {
+ content = fs.readFileSync(fullDocsPath, "utf-8");
+ } catch {
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: {
+ code: -32603,
+ message: "Failed to read full documentation file",
+ },
+ };
+ }
+
+ return {
+ jsonrpc: "2.0",
+ id,
+ result: {
+ content: [{ type: "text", text: content }],
+ },
+ };
+}
+
+// ---------------------------------------------------------------------------
+// Notifications (no response needed)
+// ---------------------------------------------------------------------------
+const NOTIFICATION_METHODS = new Set([
+ "notifications/initialized",
+ "notifications/cancelled",
+ "notifications/progress",
+]);
+
+// ---------------------------------------------------------------------------
+// Route JSON-RPC request to handler
+// ---------------------------------------------------------------------------
+function routeRequest(
+ rpcReq: JsonRpcRequest,
+ httpReq: NextApiRequest,
+): JsonRpcResponse | null {
+ const { id, method, params } = rpcReq;
+
+ // Notifications have no id and expect no response
+ if (id === undefined || id === null) {
+ if (NOTIFICATION_METHODS.has(method)) return null;
+ // Unknown notification — ignore
+ return null;
+ }
+
+ // Track MCP usage
+ const trackProps: Record = {};
+ if (method === "tools/call" && params?.name) {
+ trackProps.tool = params.name;
+ const args = params.arguments as Record | undefined;
+ if (args?.query) trackProps.tool_query = args.query;
+ }
+ if (method === "resources/read" && params?.uri) {
+ trackProps.resource_uri = params.uri;
+ }
+ trackMcpEvent(httpReq, method, trackProps);
+
+ switch (method) {
+ case "initialize":
+ return handleInitialize(id);
+ case "resources/list":
+ return handleResourcesList(id);
+ case "resources/read":
+ return handleResourcesRead(id, params || {});
+ case "tools/list":
+ return handleToolsList(id);
+ case "tools/call":
+ return handleToolsCall(id, params || {});
+ case "ping":
+ return { jsonrpc: "2.0", id, result: {} };
+ default:
+ return {
+ jsonrpc: "2.0",
+ id,
+ error: { code: -32601, message: `Method not found: ${method}` },
+ };
+ }
+}
+
+// ---------------------------------------------------------------------------
+// Next.js API handler
+// ---------------------------------------------------------------------------
+
+export const config = {
+ // Disable body size limit and response size limit for SSE / large docs
+ api: { responseLimit: false },
+};
+
+export default function handler(
+ req: NextApiRequest,
+ res: NextApiResponse,
+): void {
+ // CORS headers for cross-origin MCP clients
+ res.setHeader("Access-Control-Allow-Origin", "*");
+ res.setHeader("Access-Control-Allow-Methods", "GET, POST, DELETE, OPTIONS");
+ res.setHeader(
+ "Access-Control-Allow-Headers",
+ "Content-Type, Accept, Mcp-Session-Id",
+ );
+ res.setHeader("Access-Control-Expose-Headers", "Mcp-Session-Id");
+
+ if (req.method === "OPTIONS") {
+ res.status(204).end();
+ return;
+ }
+
+ // -----------------------------------------------------------------------
+ // GET — Streamable HTTP SSE endpoint for server-to-client notifications.
+ // mcp-remote establishes this connection first before sending POST.
+ // For a stateless server we just keep the stream open.
+ // -----------------------------------------------------------------------
+ if (req.method === "GET") {
+ const accept = (req.headers.accept || "").toLowerCase();
+
+ if (accept.includes("text/event-stream")) {
+ // SSE stream — required by MCP Streamable HTTP transport
+ res.writeHead(200, {
+ "Content-Type": "text/event-stream",
+ "Cache-Control": "no-cache, no-transform",
+ Connection: "keep-alive",
+ });
+
+ // Send a keep-alive comment so the client knows the connection is alive
+ res.write(": connected\n\n");
+
+ // Keep the connection open; the client closes when it's done
+ const keepAlive = setInterval(() => {
+ res.write(": ping\n\n");
+ }, 15_000);
+
+ req.on("close", () => {
+ clearInterval(keepAlive);
+ res.end();
+ });
+ return;
+ }
+
+ // Plain GET returns server metadata (useful for browser discovery)
+ res.status(200).json({
+ name: SERVER_NAME,
+ version: SERVER_VERSION,
+ protocolVersion: PROTOCOL_VERSION,
+ description:
+ "MCP server for Hatchet documentation. Send JSON-RPC 2.0 POST requests to interact.",
+ });
+ return;
+ }
+
+ // -----------------------------------------------------------------------
+ // DELETE — session termination (no-op for stateless server)
+ // -----------------------------------------------------------------------
+ if (req.method === "DELETE") {
+ res.status(200).end();
+ return;
+ }
+
+ if (req.method !== "POST") {
+ res.status(405).json({ error: "Method not allowed" });
+ return;
+ }
+
+ // -----------------------------------------------------------------------
+ // POST — JSON-RPC 2.0 request handling
+ // -----------------------------------------------------------------------
+ const body = req.body;
+
+ // Handle batch requests (array of JSON-RPC messages)
+ if (Array.isArray(body)) {
+ const responses: JsonRpcResponse[] = [];
+ for (const item of body) {
+ const result = routeRequest(item as JsonRpcRequest, req);
+ if (result) responses.push(result);
+ }
+ if (responses.length === 0) {
+ res.status(204).end();
+ } else {
+ res.status(200).json(responses);
+ }
+ return;
+ }
+
+ // Single request
+ const result = routeRequest(body as JsonRpcRequest, req);
+ if (!result) {
+ // Notification — no response
+ res.status(204).end();
+ return;
+ }
+
+ res.status(200).json(result);
+}
diff --git a/frontend/docs/pages/home/_meta.js b/frontend/docs/pages/home/_meta.js
index 26fb71fa5..be9674e5d 100644
--- a/frontend/docs/pages/home/_meta.js
+++ b/frontend/docs/pages/home/_meta.js
@@ -7,18 +7,19 @@ export default {
architecture: "Architecture",
"guarantees-and-tradeoffs": "Guarantees & Tradeoffs",
"--quickstart": {
- title: "Quickstart",
+ title: "Setup",
type: "separator",
},
"hatchet-cloud-quickstart": "Hatchet Cloud Quickstart",
+ setup: "Advanced Setup",
+ "install-docs-mcp": "Install Docs MCP",
"--guide": {
- title: "Walkthrough",
+ title: "Fundamentals",
type: "separator",
},
- setup: "Setup",
"your-first-task": "Tasks",
workers: "Workers",
- "running-your-task": "Your First Task",
+ "running-your-task": "Running Tasks",
environments: "Environments",
"--running-tasks": {
title: "Ways of Running Tasks",
diff --git a/frontend/docs/pages/home/additional-metadata.mdx b/frontend/docs/pages/home/additional-metadata.mdx
index d6a7d3a28..ab11da06b 100644
--- a/frontend/docs/pages/home/additional-metadata.mdx
+++ b/frontend/docs/pages/home/additional-metadata.mdx
@@ -17,7 +17,7 @@ You can attach additional metadata when pushing events or triggering task runs u
-
+
@@ -33,10 +33,13 @@ You can attach additional metadata when pushing events or triggering task runs u
+
+
+
-
+
+
+
+
diff --git a/frontend/docs/pages/home/architecture.mdx b/frontend/docs/pages/home/architecture.mdx
index 77ef9bf57..4014b9024 100644
--- a/frontend/docs/pages/home/architecture.mdx
+++ b/frontend/docs/pages/home/architecture.mdx
@@ -80,7 +80,7 @@ Heterogeneous workers can also be polyglot, meaning they can run multiple langua
The platform maintains durable state for all aspects of workflow execution, including task queue state for queued, running, and completed tasks. Workflow definitions with their dependencies, configuration, and metadata are stored persistently, ensuring your orchestration logic survives system restarts.
-In [self-hosted deployments](./self-hosting), this can be a single PostgreSQL database, or for high-throughput workloads you can use RabbitMQ for inter-service communication. In [Hatchet Cloud](https://hatchet.run), this is managed for you with enterprise-grade reliability and performance, handling backups, scaling, and maintenance automatically.
+In [self-hosted deployments](../self-hosting), this can be a single PostgreSQL database, or for high-throughput workloads you can use RabbitMQ for inter-service communication. In [Hatchet Cloud](https://hatchet.run), this is managed for you with enterprise-grade reliability and performance, handling backups, scaling, and maintenance automatically.
## Design Philosophy
diff --git a/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx b/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx
index f3c7172ac..418b11409 100644
--- a/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx
+++ b/frontend/docs/pages/home/bulk-retries-and-cancellations.mdx
@@ -16,7 +16,7 @@ There are two ways of bulk cancelling or replaying tasks in both cases:
The first way to bulk cancel or replay runs is by providing a list of run ids. This is the most straightforward way to cancel or replay runs in bulk.
-
+
{/* TODO V1 DOCS - Add TS */}
@@ -70,13 +70,18 @@ The first way to bulk cancel or replay runs is by providing a list of run ids. T
+
+
+
+
+
### Bulk Operations by Filters
The second way to bulk cancel or replay runs is by providing a list of filters. This is the most powerful way to cancel or replay runs in bulk, as it allows you to cancel or replay all runs matching a set of arbitrary filters without needing to provide IDs for the runs in advance.
-
+
{/* TODO V1 DOCS - Add TS */}
@@ -99,6 +104,9 @@ The example below provides some filters you might use to cancel or replay runs i
Running this request will cancel all task runs matching the filters provided.
+
+
+
# Manual Retries
diff --git a/frontend/docs/pages/home/bulk-run.mdx b/frontend/docs/pages/home/bulk-run.mdx
index 54950edbd..8632ae5f8 100644
--- a/frontend/docs/pages/home/bulk-run.mdx
+++ b/frontend/docs/pages/home/bulk-run.mdx
@@ -7,7 +7,7 @@ import { snippets } from "@/lib/generated/snippets";
Often you may want to run a task multiple times with different inputs. There is significant overhead (i.e. network roundtrips) to write the task, so if you're running multiple tasks, it's best to use the bulk run methods.
-
+
You can use the `aio_run_many` method to bulk run a task. This will return a list of results.
@@ -61,4 +61,7 @@ You can use the `RunMany` method directly on the `Workflow` or `StandaloneTask`
Additional bulk methods are coming soon for the Go SDK. Join our [Discord](https://hatchet.run/discord) to stay up to date.
+
+
+
diff --git a/frontend/docs/pages/home/cancellation.mdx b/frontend/docs/pages/home/cancellation.mdx
index 18568b42e..7fce04cd7 100644
--- a/frontend/docs/pages/home/cancellation.mdx
+++ b/frontend/docs/pages/home/cancellation.mdx
@@ -11,7 +11,7 @@ When a task is canceled, Hatchet sends a cancellation signal to the task. The ta
## Cancellation Mechanisms
-
+
+
+
+
+
## Cancellation Best Practices
diff --git a/frontend/docs/pages/home/child-spawning.mdx b/frontend/docs/pages/home/child-spawning.mdx
index 8bfdde4a8..c47819f97 100644
--- a/frontend/docs/pages/home/child-spawning.mdx
+++ b/frontend/docs/pages/home/child-spawning.mdx
@@ -16,7 +16,7 @@ Hatchet supports the dynamic creation of child tasks during a parent task's exec
To implement child task spawning, you first need to create both parent and child task definitions.
-
+
First, we'll declare a couple of tasks for the parent and child:
@@ -42,13 +42,17 @@ And that's it! The fanout parent will run and spawn the child, and then will col
+
+
+
+
## Running Child Tasks
To spawn and run a child task from a parent task, use the appropriate method for your language:
-
+
@@ -66,13 +70,16 @@ To spawn and run a child task from a parent task, use the appropriate method for
+
+
+
## Parallel Child Task Execution
As shown in the examples above, you can spawn multiple child tasks in parallel:
-
+
@@ -90,6 +97,9 @@ As shown in the examples above, you can spawn multiple child tasks in parallel:
+
+
+
## Use Cases for Child Workflows
@@ -106,7 +116,7 @@ Child workflows are ideal for:
When working with child workflows, it's important to properly handle errors. Here are patterns for different languages:
-
+
@@ -124,4 +134,7 @@ When working with child workflows, it's important to properly handle errors. Her
+
+
+
diff --git a/frontend/docs/pages/home/concurrency.mdx b/frontend/docs/pages/home/concurrency.mdx
index 67c88d9b8..d0a71206f 100644
--- a/frontend/docs/pages/home/concurrency.mdx
+++ b/frontend/docs/pages/home/concurrency.mdx
@@ -37,7 +37,7 @@ There are several reasons why you might want to use concurrency control in your
In addition to setting concurrency limits at the task level, you can also control concurrency at the worker level by passing the `slots` option when creating a new `Worker` instance:
-
+
+
+
+
This example will only let 1 run in each group run at a given time to fairly distribute the load across the workers.
@@ -123,7 +130,7 @@ The `CANCEL_NEWEST` strategy is particularly useful in scenarios where:
You can also combine multiple concurrency strategies to create a more complex concurrency control system. For example, you can use one group key to represent a specific team, and another group to represent a specific resource in that team, giving you more control over the rate at which tasks are executed.
-
+
+
+
+
diff --git a/frontend/docs/pages/home/conditional-workflows.mdx b/frontend/docs/pages/home/conditional-workflows.mdx
index 7a96fce31..0b9cfe798 100644
--- a/frontend/docs/pages/home/conditional-workflows.mdx
+++ b/frontend/docs/pages/home/conditional-workflows.mdx
@@ -67,7 +67,7 @@ Note the branching logic (`left_branch` and `right_branch`), as well as the use
To get started, let's declare the workflow.
-
+
@@ -82,11 +82,14 @@ To get started, let's declare the workflow.
+
+
+
Next, we'll start adding tasks to our workflow. First, we'll add a basic task that outputs a random number:
-
+
@@ -100,11 +103,14 @@ Next, we'll start adding tasks to our workflow. First, we'll add a basic task th
+
+
+
Next, we'll add a task to the workflow that's a child of the first task, but it has a `wait_for` condition that sleeps for 10 seconds.
-
+
@@ -119,13 +125,16 @@ Next, we'll add a task to the workflow that's a child of the first task, but it
+
+
+
This task will first wait for the parent task to complete, and then it'll sleep for 10 seconds before executing and returning another random number.
Next, we'll add a task that will be skipped on an event:
-
+
@@ -140,13 +149,16 @@ Next, we'll add a task that will be skipped on an event:
+
+
+
In this case, our task will wait for a 30 second sleep, and then it will be skipped if the `skip_on_event:skip` is fired.
Next, let's add some branching logic. Here we'll add two more tasks, a left and right branch.
-
+
@@ -160,13 +172,16 @@ Next, let's add some branching logic. Here we'll add two more tasks, a left and
+
+
+
These two tasks use the `ParentCondition` and `skip_if` together to check if the output of an upstream task was greater or less than `50`, respectively. Only one of the two tasks will run: whichever one's condition evaluates to `True`.
Next, we'll add a task that waits for an event:
-
+
@@ -181,11 +196,14 @@ Next, we'll add a task that waits for an event:
+
+
+
And finally, we'll add the last task, which collects all of its parents and sums them up.
-
+
@@ -199,6 +217,9 @@ Note that in this task, we rely on `ctx.was_skipped` to determine if a task was
+
+
+
This workflow demonstrates the power of the new conditional logic in Hatchet V1. You can now create complex workflows that are much more dynamic than workflows in the previous version of Hatchet, and do all of it declaratively (rather than, for example, by dynamically spawning child workflows based on conditions in the parent).
diff --git a/frontend/docs/pages/home/cron-runs.mdx b/frontend/docs/pages/home/cron-runs.mdx
index 777c9f4ef..0367e45dc 100644
--- a/frontend/docs/pages/home/cron-runs.mdx
+++ b/frontend/docs/pages/home/cron-runs.mdx
@@ -51,7 +51,9 @@ Each field can contain a specific value, an asterisk (`*`) to represent all poss
You can define a task with a cron schedule by configuring the cron expression as part of the task definition:
-
+
+
+
+
In the examples above, we set the `on cron` property of the task. The property specifies the cron expression that determines when the task should be triggered.
@@ -92,7 +97,9 @@ You can create dynamic cron triggers programmatically via the API. This is usefu
Here's an example of creating a a cron to trigger a report for a specific customer every day at noon:
-
+
@@ -105,6 +112,9 @@ Here's an example of creating a a cron to trigger a report for a specific custom
+
+
+
In this example you can have different expressions for different customers, or dynamically set the expression based on some other business logic.
@@ -120,7 +130,9 @@ When creating a cron via the API, you will receive a cron trigger object with a
You can delete a cron trigger by passing the cron object or a cron trigger id to the delete method.
-
+
@@ -133,6 +145,9 @@ You can delete a cron trigger by passing the cron object or a cron trigger id to
+
+
+
@@ -145,7 +160,9 @@ You can delete a cron trigger by passing the cron object or a cron trigger id to
Retrieves a list of all task cron triggers matching the criteria.
-
+
@@ -158,6 +175,9 @@ Retrieves a list of all task cron triggers matching the criteria.
+
+
+
## Managing Cron Triggers in the Hatchet Dashboard
diff --git a/frontend/docs/pages/home/dags.mdx b/frontend/docs/pages/home/dags.mdx
index 9d9ed8a51..9b6e1dea9 100644
--- a/frontend/docs/pages/home/dags.mdx
+++ b/frontend/docs/pages/home/dags.mdx
@@ -13,7 +13,7 @@ Start by declaring a workflow with a name. The workflow object can declare addit
The returned object is an instance of the `Workflow` class, which is the primary interface for interacting with the workflow (i.e. [running](./run-with-results.mdx), [enqueuing](./run-no-wait.mdx), [scheduling](./scheduled-runs.mdx), etc).
-
+
@@ -29,6 +29,9 @@ The returned object is an instance of the `Workflow` class, which is the primary
+
+
+
@@ -43,7 +46,7 @@ Now that we have a workflow, we can define a task to be executed as part of the
The `task` method takes a name and a function that defines the task's behavior. The function will receive the workflow's input and return the task's output. Tasks also accept a number of other configuration options, which are covered elsewhere in our documentation.
-
+
In Python, the `task` method is a decorator, which is used like this to wrap a function:
@@ -74,13 +77,16 @@ asynchronous.
+
+
+
## Building a DAG with Task Dependencies
The power of Hatchet's workflow design comes from connecting tasks into a DAG structure. Tasks can specify dependencies (parents) which must complete successfully before the task can start.
-
+
@@ -96,13 +102,16 @@ The power of Hatchet's workflow design comes from connecting tasks into a DAG st
+
+
+
## Accessing Parent Task Outputs
As shown in the examples above, tasks can access outputs from their parent tasks using the context object:
-
+
@@ -123,13 +132,16 @@ if err != nil {
}
```
+
+
+
## Running a Workflow
You can run workflows directly or enqueue them for asynchronous execution. All the same methods for running a task are available for workflows!
-
+
@@ -153,4 +165,7 @@ runID, err := simple.RunNoWait(ctx, input)
```
+
+
+
diff --git a/frontend/docs/pages/home/docker.mdx b/frontend/docs/pages/home/docker.mdx
index 62c98836c..1702b0218 100644
--- a/frontend/docs/pages/home/docker.mdx
+++ b/frontend/docs/pages/home/docker.mdx
@@ -2,7 +2,7 @@ import { Tabs, Callout } from "nextra/components";
# Dockerizing Hatchet Applications
-This guide explains how to create Dockerfiles for Hatchet applications. There are examples for both Python and TypeScript applications here.
+This guide explains how to create Dockerfiles for Hatchet applications. There are examples for Python, TypeScript, Go, and Ruby applications here.
## Entrypoint Configuration for Hatchet
@@ -14,7 +14,7 @@ Before creating your Dockerfile, understand that Hatchet workers require specifi
## Example Dockerfiles
-
+
```dockerfile
@@ -212,5 +212,37 @@ CMD ["/app/hatchet-worker"]
```
+
+{/* TODO-RUBY: verify and test this, possibly all dockerfiles */}
+```dockerfile
+FROM ruby:3.3-slim
+
+ENV HATCHET_ENV=production
+
+# Install system dependencies for native gems
+
+RUN apt-get update && \
+ apt-get install -y build-essential && \
+ apt-get clean && \
+ rm -rf /var/lib/apt/lists/\*
+
+WORKDIR /app
+
+COPY Gemfile Gemfile.lock ./
+
+RUN bundle config set --local without 'development test' && \
+ bundle install
+
+COPY . /app
+
+CMD ["bundle", "exec", "ruby", "worker.rb"]
+
+```
+
+
+ If you're using a Rake task or binstub to start your worker, replace the CMD with the appropriate command, e.g. `CMD ["bundle", "exec", "rake", "hatchet:worker"]`.
+
+
+```
diff --git a/frontend/docs/pages/home/durable-events.mdx b/frontend/docs/pages/home/durable-events.mdx
index f4feaf206..6c49b830e 100644
--- a/frontend/docs/pages/home/durable-events.mdx
+++ b/frontend/docs/pages/home/durable-events.mdx
@@ -11,7 +11,7 @@ Durable events are a feature of **durable tasks** which allow tasks to wait for
Durable events are declared using the context method `WaitFor` (or utility method `WaitForEvent`) on the `DurableContext` object.
-
+
@@ -27,13 +27,16 @@ Durable events are declared using the context method `WaitFor` (or utility metho
+
+
+
## Durable event filters
Durable events can be filtered using [CEL](https://github.com/google/cel-spec) expressions. For example, to only receive `user:update` events for a specific user, you can use the following filter:
-
+
@@ -51,4 +54,7 @@ Durable events can be filtered using [CEL](https://github.com/google/cel-spec) e
+
+
+
diff --git a/frontend/docs/pages/home/durable-sleep.mdx b/frontend/docs/pages/home/durable-sleep.mdx
index 6fd9d50d1..70abcf789 100644
--- a/frontend/docs/pages/home/durable-sleep.mdx
+++ b/frontend/docs/pages/home/durable-sleep.mdx
@@ -13,7 +13,7 @@ For example, say you'd like to send a notification to a user after 24 hours. Wit
Durable sleep can be used by calling the `SleepFor` method on the `DurableContext` object. This method takes a duration as an argument and will sleep for that duration.
-
+
@@ -29,4 +29,7 @@ Durable sleep can be used by calling the `SleepFor` method on the `DurableContex
+
+
+
diff --git a/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx b/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx
index 211ef589c..5fb840591 100644
--- a/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx
+++ b/frontend/docs/pages/home/hatchet-cloud-quickstart.mdx
@@ -78,6 +78,16 @@ Finally, you can trigger your workflow using the `hatchet trigger simple` comman
hatchet trigger simple
```
+### (Optional) Install Hatchet docs MCP
+
+Get Hatchet documentation directly in your AI coding assistant (Cursor, Claude Code, Claude Desktop, and more):
+
+```sh copy
+hatchet docs install
+```
+
+See the [full setup guide](./install-docs-mcp.mdx) for manual configuration options.
+
And that's it! You should now have a Hatchet project set up on Hatchet Cloud with a worker running locally.
diff --git a/frontend/docs/pages/home/install-docs-mcp.mdx b/frontend/docs/pages/home/install-docs-mcp.mdx
new file mode 100644
index 000000000..68c6e7b5f
--- /dev/null
+++ b/frontend/docs/pages/home/install-docs-mcp.mdx
@@ -0,0 +1,97 @@
+import { Callout, Steps, Tabs } from "nextra/components";
+import {
+ McpUrl,
+ CursorDeeplinkButton,
+ CursorMcpConfig,
+ ClaudeCodeCommand,
+ CursorTabLabel,
+ ClaudeCodeTabLabel,
+ OtherAgentsTabLabel,
+} from "@/components/McpSetup";
+
+# Install Docs MCP
+
+Hatchet documentation is optimized for LLMs and available as an **MCP (Model Context Protocol) server**, so AI coding assistants like Cursor and Claude Code can search and reference Hatchet docs directly.
+
+MCP endpoint:
+
+, , ]}>
+
+
+
+
+
+ ```bash copy
+ hatchet docs install cursor
+ ```
+
+ This creates a `.cursor/rules/hatchet-docs.mdc` file and prints the one-click deeplink.
+
+
+
+
+ Install the Hatchet docs MCP server in Cursor with one click:
+
+
+
+
+
+
+
+
+ ### Open Cursor Settings
+
+ Go to **Cursor Settings** → **MCP** → **Add new MCP server**.
+
+ ### Add the server configuration
+
+
+
+ ### Use in chat
+
+ Reference Hatchet docs in any Cursor chat with `@hatchet-docs` or ask questions and the agent will automatically search the docs.
+
+
+
+
+
+
+
+
+
+
+
+
+ ```bash copy
+ hatchet docs install claude-code
+ ```
+
+ If `claude` is on your PATH, this runs the command automatically. Otherwise it prints it for you to copy.
+
+
+
+
+ Run this command in your terminal:
+
+
+
+
+
+
+
+
+
+ For any AI tool that supports [llms.txt](https://llmstxt.org/), Hatchet docs are available at:
+
+ | Resource | URL |
+ |----------|-----|
+ | **llms.txt** (index) | [docs.hatchet.run/llms.txt](https://docs.hatchet.run/llms.txt) |
+ | **llms-full.txt** (all docs) | [docs.hatchet.run/llms-full.txt](https://docs.hatchet.run/llms-full.txt) |
+ | **Per-page markdown** | `docs.hatchet.run/llms/{section}/{page}.md` |
+ | **MCP endpoint** | |
+
+ Every documentation page also includes a `` header
+ pointing to its markdown version, and a "View as Markdown" link at the top of the page.
+
+
+
diff --git a/frontend/docs/pages/home/inter-service-triggering.mdx b/frontend/docs/pages/home/inter-service-triggering.mdx
index 92d99c45c..ba4dad015 100644
--- a/frontend/docs/pages/home/inter-service-triggering.mdx
+++ b/frontend/docs/pages/home/inter-service-triggering.mdx
@@ -13,7 +13,7 @@ The recommended way to trigger a run from a service where you _cannot_ import th
This allows you to have a polyglot, fully typed interface with full SDK support.
-
+
+
+ {/* TODO: add ruby snippet */}
+
diff --git a/frontend/docs/pages/home/logging.mdx b/frontend/docs/pages/home/logging.mdx
index 10cc6c361..5dbca737c 100644
--- a/frontend/docs/pages/home/logging.mdx
+++ b/frontend/docs/pages/home/logging.mdx
@@ -8,7 +8,7 @@ import UniversalTabs from "../../components/UniversalTabs";
Hatchet comes with a built-in logging view where you can push logs from your workflows. This is useful for debugging and monitoring your workflows.
-
+
{/* TODO V1 DOCS -- Add Go logging and add this tab back */}
You can use either Python's built-in `logging` package, or the `context.log` method for more control over the logs that are sent.
@@ -68,4 +68,8 @@ In this example, we create Pino logger that implement's Hatchet's `Logger` inter
TODO V1 DOCS - ADD GO LOGGING HERE
*/}
+
+
+
+
diff --git a/frontend/docs/pages/home/manual-slot-release.mdx b/frontend/docs/pages/home/manual-slot-release.mdx
index 35f33be8f..8f2c0e523 100644
--- a/frontend/docs/pages/home/manual-slot-release.mdx
+++ b/frontend/docs/pages/home/manual-slot-release.mdx
@@ -21,7 +21,7 @@ In some cases, you may have a task in your workflow that is resource-intensive a
You can manually release a slot in from within a running task in your workflow using the Hatchet context method `release_slot`:
-
+
@@ -44,6 +44,9 @@ func StepOne(ctx worker.HatchetContext) (result \*taskOneOutput, err error) {
```
+
+
+
In the above examples, the `release_slot()` method is called after the resource-intensive process has completed. This allows other tasks in the workflow to start executing while the current task continues with non-resource-intensive tasks.
diff --git a/frontend/docs/pages/home/on-failure-tasks.mdx b/frontend/docs/pages/home/on-failure-tasks.mdx
index 33be2cae0..c3cd00d4e 100644
--- a/frontend/docs/pages/home/on-failure-tasks.mdx
+++ b/frontend/docs/pages/home/on-failure-tasks.mdx
@@ -11,7 +11,7 @@ The on-failure task is a special type of task in Hatchet that allows you to defi
You can define an on-failure task on your task the same as you'd define any other task:
-
+
@@ -24,6 +24,9 @@ You can define an on-failure task on your task the same as you'd define any othe
+
+
+
In the examples above, the on-failure task will be executed only if any of the main tasks in the workflow fail.
diff --git a/frontend/docs/pages/home/priority.mdx b/frontend/docs/pages/home/priority.mdx
index 70dcaeebf..9b67c7e31 100644
--- a/frontend/docs/pages/home/priority.mdx
+++ b/frontend/docs/pages/home/priority.mdx
@@ -28,7 +28,7 @@ There are a few different ways to set priorities for tasks or workflows in Hatch
First, you can set a default priority at the workflow level:
-
+
@@ -45,6 +45,9 @@ First, you can set a default priority at the workflow level:
+
+
+
This will assign the same default priority to all runs of this workflow (and all of the workflow's corresponding tasks), but will have no effect without also setting run-level priorities, since every run will use the same default.
@@ -53,7 +56,7 @@ This will assign the same default priority to all runs of this workflow (and all
When you trigger a run, you can set the priority of the triggered run to override its default priority.
-
+
@@ -68,11 +71,14 @@ When you trigger a run, you can set the priority of the triggered run to overrid
+
+
+
Similarly, you can also assign a priority to scheduled and cron workflows.
-
+
@@ -87,6 +93,9 @@ Similarly, you can also assign a priority to scheduled and cron workflows.
+
+
+
In these cases, the priority set on the trigger will override the default priority, so these runs will be processed ahead of lower-priority ones.
diff --git a/frontend/docs/pages/home/rate-limits.mdx b/frontend/docs/pages/home/rate-limits.mdx
index a50f7c1f9..a850cb86d 100644
--- a/frontend/docs/pages/home/rate-limits.mdx
+++ b/frontend/docs/pages/home/rate-limits.mdx
@@ -39,7 +39,7 @@ This pattern is especially useful for:
### Declaring and Consuming Dynamic Rate Limits
-
+
> Note: `dynamic_key` must be a CEL expression. `units` and `limits` can be either an integer or a CEL expression.
@@ -62,6 +62,9 @@ We can add one or more rate limits to a task by adding the `rate_limits` configu
+
+
+
## Static Rate Limits
@@ -84,7 +87,7 @@ If a step run exceeds the rate limit, Hatchet re-queues the step run until the r
Define the static rate limits that can be consumed by any step run across all workflow runs using the `put_rate_limit` method in the `Admin` client within your code.
-
+
@@ -103,13 +106,16 @@ Define the static rate limits that can be consumed by any step run across all wo
+
+
+
### Consuming Static Rate Limits
With your rate limit key defined, specify the units of consumption for a specific key in each step definition by adding the `rate_limits` configuration to your step definition in your workflow.
-
+
@@ -124,6 +130,9 @@ With your rate limit key defined, specify the units of consumption for a specifi
+
+
+
### Limiting Workflow Runs
diff --git a/frontend/docs/pages/home/retry-policies.mdx b/frontend/docs/pages/home/retry-policies.mdx
index 6fe80c9c2..43b9b93fc 100644
--- a/frontend/docs/pages/home/retry-policies.mdx
+++ b/frontend/docs/pages/home/retry-policies.mdx
@@ -27,7 +27,7 @@ This simple retry mechanism can help to mitigate transient failures, such as net
To enable retries for a task, simply add the `retries` property to the task object in your task definition:
-
+
@@ -37,6 +37,9 @@ To enable retries for a task, simply add the `retries` property to the task obje
+
+
+
You can add the `retries` property to any task, and Hatchet will handle the retry logic automatically.
@@ -49,7 +52,7 @@ Additionally, if a task interacts with external services or databases, you shoul
If you need to access the current retry count within a task, you can use the `retryCount` method available in the task context:
-
+
@@ -59,13 +62,16 @@ If you need to access the current retry count within a task, you can use the `re
+
+
+
## Exponential Backoff
Hatchet also supports exponential backoff for retries, which can be useful for handling failures in a more resilient manner. Exponential backoff increases the delay between retries exponentially, giving the failing service more time to recover before the next retry.
-
+
@@ -75,6 +81,9 @@ Hatchet also supports exponential backoff for retries, which can be useful for h
+
+
+
## Bypassing Retry logic
@@ -85,7 +94,7 @@ The Hatchet SDKs each expose a `NonRetryable` exception, which allows you to byp
2. A task that contains a single non-idempotent operation that can fail but cannot safely be rerun on failure, such as a billing operation.
3. A failure that requires manual intervention to resolve.
-
+
@@ -97,6 +106,9 @@ The Hatchet SDKs each expose a `NonRetryable` exception, which allows you to byp
+
+
+
In these cases, even though `retries` is set to a non-zero number (meaning the task would ordinarily retry), Hatchet will not retry.
diff --git a/frontend/docs/pages/home/run-no-wait.mdx b/frontend/docs/pages/home/run-no-wait.mdx
index 59b7eb482..564a0d111 100644
--- a/frontend/docs/pages/home/run-no-wait.mdx
+++ b/frontend/docs/pages/home/run-no-wait.mdx
@@ -14,7 +14,7 @@ Some example use cases for fire-and-forget style tasks might be:
1. Sending a shipping confirmation email to a user once their order has shipped. This is a truly async task, in the sense that the user is not necessarily using your application when it happens, and the part of the application triggering the task does not need to know the result of the work, just that it has been enqueued (assuming that it will complete, of course).
2. Triggering a machine learning model training job that can take minutes, hours, or even days to complete. Similarly to above, it's likely that no part of the application needs to wait on the result of this work, it just needs to "fire and forget" it - meaning that it needs to kick it off, and let it complete whenever it completes.
-
+
If we have the following workflow:
@@ -42,13 +42,18 @@ Note that the type of `input` here is a Pydantic model that matches the input sc
+
+
+
+
+
## Subscribing to results from an enqueued task
Often it is useful to subscribe to the results of a task at a later time. The `run_no_wait` method returns a `WorkflowRunRef` object which includes a listener for the result of the task.
-
+
Use `ref.result()` to block until the result is available:
@@ -70,6 +75,10 @@ or await `aio_result`:
+
+
+
+
## Triggering Runs in the Hatchet Dashboard
diff --git a/frontend/docs/pages/home/run-on-event.mdx b/frontend/docs/pages/home/run-on-event.mdx
index a56335222..6c21f0182 100644
--- a/frontend/docs/pages/home/run-on-event.mdx
+++ b/frontend/docs/pages/home/run-on-event.mdx
@@ -21,7 +21,7 @@ Run-on-event allows you to trigger one or more tasks when a specific event occur
To run a task on an event, you need to declare the event that will trigger the task. This is done by declaring the `on_events` property in the task declaration.
-
+
@@ -33,6 +33,9 @@ To run a task on an event, you need to declare the event that will trigger the t
+
+
+
@@ -50,7 +53,7 @@ To run a task on an event, you need to declare the event that will trigger the t
You can push an event to the event queue by calling the `push` method on the Hatchet event client and providing the event name and payload.
-
+
@@ -60,6 +63,9 @@ You can push an event to the event queue by calling the `push` method on the Hat
+
+
+
## Event Filtering
@@ -74,7 +80,7 @@ There are two ways to create filters in Hatchet.
The simplest way to create a filter is to register it declaratively with your workflow when it's created. For example:
-
+
@@ -84,6 +90,9 @@ The simplest way to create a filter is to register it declaratively with your wo
+
+
+
In each of these cases, we register a filter with the workflow. Note that these "declarative" filters are overwritten each time your workflow is updated, so the ids associated with them will not be stable over time. This allows you to modify a filter in-place or remove a filter, and not need to manually delete it over the API.
@@ -92,7 +101,7 @@ In each of these cases, we register a filter with the workflow. Note that these
You also can create event filters by using the `filters` clients on the SDKs:
-
+
@@ -102,6 +111,9 @@ You also can create event filters by using the `filters` clients on the SDKs:
+
+
+
@@ -114,7 +126,7 @@ You also can create event filters by using the `filters` clients on the SDKs:
Then, push an event that uses the filter to determine whether or not to run. For instance, this run will be skipped, since the payload does not match the expression:
-
+
@@ -124,11 +136,14 @@ Then, push an event that uses the filter to determine whether or not to run. For
+
+
+
But this one will be triggered since the payload _does_ match the expression:
-
+
@@ -138,6 +153,9 @@ But this one will be triggered since the payload _does_ match the expression:
+
+
+
@@ -149,7 +167,7 @@ But this one will be triggered since the payload _does_ match the expression:
You can access the filter payload by using the `Context` in the task that was triggered by your event:
-
+
@@ -161,6 +179,9 @@ You can access the filter payload by using the `Context` in the task that was tr
+
+
+
### Advanced Usage
diff --git a/frontend/docs/pages/home/run-with-results.mdx b/frontend/docs/pages/home/run-with-results.mdx
index 81e5a2d66..e3618909e 100644
--- a/frontend/docs/pages/home/run-with-results.mdx
+++ b/frontend/docs/pages/home/run-with-results.mdx
@@ -12,7 +12,7 @@ One method for running a task in Hatchet is to run it and wait for its result. S
1. Fanout patterns, where a parent fans out work to a number of children, and wants to receive the results of those child tasks and make some decision based on them. For example, if each child run fips a coin, and the parent wants to count up how many heads there were and do something with that information.
2. Waiting for long-running API calls to complete, such as if calling an LLM. For instance, if you had a part of your product that writes a poem for a user, your backend might run a `write_poem` task, which in turn calls an LLM, and then your backend would wait for that task to complete and return its result (the poem).
-
+
You can use your `Task` object to run a task and wait for it to complete by calling the `run` method. This method will block until the task completes and return the result.
@@ -34,13 +34,17 @@ Note that the type of `input` here is a Pydantic model that matches the input sc
You can use your `Task` object to run a task and wait for it to complete by calling the `Run` method. This method will block until the task completes and return the result.
+
+
+
+
## Spawning Tasks from within a Task
You can also spawn tasks from within a task. This is useful for composing tasks together to create more complex workflows, fanning out batched tasks, or creating conditional workflows.
-
+
You can run a task from within a task by calling the `aio_run` method on the task object from within a task function. This will associate the runs in the dashboard for easier debugging.
@@ -66,13 +70,16 @@ You can run a task from within a task by calling the `Run` method on the task ob
+
+
+
## Running Tasks in Parallel
Sometimes you may want to run multiple tasks concurrently. Here's how to do that in each language:
-
+
Since the `aio_run` method returns a coroutine, you can spawn multiple tasks in parallel and await using `asyncio.gather`.
@@ -94,6 +101,9 @@ You can run multiple tasks in parallel by calling `Run` multiple times in gorout
+
+
+
diff --git a/frontend/docs/pages/home/running-your-task.mdx b/frontend/docs/pages/home/running-your-task.mdx
index a218109e8..b89d6cc30 100644
--- a/frontend/docs/pages/home/running-your-task.mdx
+++ b/frontend/docs/pages/home/running-your-task.mdx
@@ -7,7 +7,7 @@ import UniversalTabs from "@/components/UniversalTabs";
With your task defined, you can import it wherever you need to use it and invoke it with the `run` method.
-
+
@@ -19,6 +19,9 @@ With your task defined, you can import it wherever you need to use it and invoke
+
+
+
There are many ways to run a task, including:
diff --git a/frontend/docs/pages/home/scheduled-runs.mdx b/frontend/docs/pages/home/scheduled-runs.mdx
index fe0b56334..7b7fa505e 100644
--- a/frontend/docs/pages/home/scheduled-runs.mdx
+++ b/frontend/docs/pages/home/scheduled-runs.mdx
@@ -32,7 +32,7 @@ You can create dynamic scheduled runs programmatically via the API to run tasks
Here's an example of creating a scheduled run to trigger a task tomorrow at noon:
-
+
@@ -44,6 +44,9 @@ Here's an example of creating a scheduled run to trigger a task tomorrow at noon
+
+
+
In this example you can have different scheduled times for different customers, or dynamically set the scheduled time based on some other business logic.
@@ -59,7 +62,7 @@ When creating a scheduled run via the API, you will receive a scheduled run obje
You can delete a scheduled run by calling the `delete` method on the scheduled client.
-
+
@@ -69,13 +72,16 @@ You can delete a scheduled run by calling the `delete` method on the scheduled c
+
+
+
### Listing Scheduled Runs
You can list all scheduled runs for a task by calling the `list` method on the scheduled client.
-
+
@@ -85,13 +91,16 @@ You can list all scheduled runs for a task by calling the `list` method on the s
+
+
+
### Rescheduling a Scheduled Run
If you need to change the trigger time for an existing scheduled run, you can reschedule it by updating its `triggerAt`.
-
+
@@ -102,6 +111,9 @@ If you need to change the trigger time for an existing scheduled run, you can re
/>
+
+
+
@@ -114,7 +126,7 @@ If you need to change the trigger time for an existing scheduled run, you can re
Hatchet supports bulk operations for scheduled runs. You can bulk delete scheduled runs, and you can bulk reschedule scheduled runs by providing a list of updates.
-
+
@@ -127,6 +139,10 @@ Hatchet supports bulk operations for scheduled runs. You can bulk delete schedul
/>
+
+
+
+
## Managing Scheduled Runs in the Hatchet Dashboard
diff --git a/frontend/docs/pages/home/setup.mdx b/frontend/docs/pages/home/setup.mdx
index 2c1234ca0..ab6973c96 100644
--- a/frontend/docs/pages/home/setup.mdx
+++ b/frontend/docs/pages/home/setup.mdx
@@ -1,7 +1,7 @@
import Tabs from "../_setup/tabs.mdx";
import { Callout } from "nextra/components";
-# Hatchet Walkthrough
+# Advanced Setup
This guide is intended for users who want to explore Hatchet in more depth
diff --git a/frontend/docs/pages/home/sticky-assignment.mdx b/frontend/docs/pages/home/sticky-assignment.mdx
index f94fdb785..450322e03 100644
--- a/frontend/docs/pages/home/sticky-assignment.mdx
+++ b/frontend/docs/pages/home/sticky-assignment.mdx
@@ -33,7 +33,7 @@ There are two strategies for setting sticky assignment for [DAG](./dags.mdx) wor
- `SOFT`: All tasks in the workflow will attempt to be assigned to the same worker, but if that worker is unavailable, it will be assigned to another worker.
- `HARD`: All taks in the workflow will only be assigned to the same worker. If that worker is unavailable, the workflow run will not be assigned to another worker and will remain in a pending state until the original worker becomes available or timeout is reached. (See [Scheduling Timeouts](./timeouts.mdx#task-level-timeouts))
-
+
@@ -48,6 +48,9 @@ There are two strategies for setting sticky assignment for [DAG](./dags.mdx) wor
+
+
+
In this example, the `sticky` property is set to `SOFT`, which means that the task will attempt to be assigned to the same worker for the duration of its execution. If the original worker is unavailable, the task will be assigned to another worker.
@@ -63,7 +66,7 @@ However, the child task must:
If either condition is not met, an error will be thrown when the child task is spawned.
-
+
@@ -79,4 +82,7 @@ If either condition is not met, an error will be thrown when the child task is s
+
+
+
diff --git a/frontend/docs/pages/home/streaming.mdx b/frontend/docs/pages/home/streaming.mdx
index 40a996590..ca5715e17 100644
--- a/frontend/docs/pages/home/streaming.mdx
+++ b/frontend/docs/pages/home/streaming.mdx
@@ -11,7 +11,7 @@ Hatchet tasks can stream data back to a consumer in real-time. This has a number
You can stream data out of a task run by using the `put_stream` (or equivalent) method on the `Context`.
-
+
@@ -27,6 +27,9 @@ You can stream data out of a task run by using the `put_stream` (or equivalent)
+
+
+
This task will stream small chunks of content through Hatchet, which can then be consumed elsewhere. Here we use some text as an example, but this is intended to replicate streaming the results of an LLM call back to a consumer.
@@ -35,7 +38,7 @@ This task will stream small chunks of content through Hatchet, which can then be
You can easily consume stream events by using the stream method on the workflow run ref that the various [fire-and-forget](./run-no-wait.mdx) methods return.
-
+
@@ -51,6 +54,9 @@ You can easily consume stream events by using the stream method on the workflow
+
+
+
In the examples above, this will result in the famous text below being gradually printed to the console, bit by bit.
@@ -74,7 +80,7 @@ It's common to want to stream events out of a Hatchet task and back to the front
In both cases, we recommend using your application's backend as a proxy for the stream, where you would subscribe to the stream of events from Hatchet, and then stream events through to the frontend as they're received by the backend.
-
+
For example, with FastAPI, you'd do the following:
@@ -97,6 +103,9 @@ For example, with Go's built-in HTTP server, you'd do the following:
+
+ {/* TODO: add ruby snippet */}
+
Then, assuming you run the server on port `8000`, running `curl -N http://localhost:8000/stream` would result in the text streaming back to your console from Hatchet through your FastAPI proxy.
diff --git a/frontend/docs/pages/home/timeouts.mdx b/frontend/docs/pages/home/timeouts.mdx
index d398aca47..fd41f153d 100644
--- a/frontend/docs/pages/home/timeouts.mdx
+++ b/frontend/docs/pages/home/timeouts.mdx
@@ -37,7 +37,7 @@ If no unit is specified, seconds are assumed.
You can specify execution and scheduling timeouts for a task using the `execution_timeout` and `schedule_timeout` parameters when creating a task.
-
+
@@ -50,6 +50,9 @@ You can specify execution and scheduling timeouts for a task using the `executio
+
+
+
In these tasks, both timeouts are specified, meaning:
@@ -69,7 +72,7 @@ In some cases, you may need to extend the timeout for a step while it is running
For example:
-
+
@@ -80,6 +83,9 @@ For example:
+
+
+
In this example, the step initially would exceed its execution timeout. But before it does, we call the `refreshTimeout` method, which extends the timeout and allows it to complete. Importantly, refreshing a timeout is an additive operation - the new timeout is added to the existing timeout. So for instance, if the task originally had a timeout of `30s` and we call `refreshTimeout("15s")`, the new timeout will be `45s`.
diff --git a/frontend/docs/pages/home/worker-affinity.mdx b/frontend/docs/pages/home/worker-affinity.mdx
index 232f830a2..1c1ff3132 100644
--- a/frontend/docs/pages/home/worker-affinity.mdx
+++ b/frontend/docs/pages/home/worker-affinity.mdx
@@ -17,7 +17,7 @@ Specific tasks can then specify desired label state to ensure that workflows are
Labels can be set on workers when they are registered with Hatchet. Labels are key-value pairs that can be used to specify worker capabilities, resource availability, or other criteria that can be used to match workflows to workers. Values can be strings or numbers, and multiple labels can be set on a worker.
-
+
@@ -40,6 +40,9 @@ Labels can be set on workers when they are registered with Hatchet. Labels are k
)
```
+
+
+
## Specifying Step Desired Labels
@@ -57,7 +60,7 @@ You can specify desired worker label state for specific tasks in a workflow by s
- `required` (default: `true`): Whether the label is required for the task to run. If `true`, the task will remain in a pending state until a worker with the desired label state becomes available. If `false`, the worker will be prioritized based on the sum of the highest matching weights.
- `weight` (optional, default: `100`): The weight of the label. Higher weights are prioritized over lower weights when selecting a worker for the task. If multiple workers have the same highest weight, the worker with the highest sum of weights will be selected. Ignored if `required` is `true`.
-
+
@@ -100,6 +103,9 @@ You can specify desired worker label state for specific tasks in a workflow by s
)
```
+
+
+
@@ -114,7 +120,7 @@ You can specify desired worker label state for specific tasks in a workflow by s
Labels can also be set dynamically on workers using the `upsertLabels` method. This can be useful when worker state changes over time, such as when a new model is loaded into memory or when a worker's resource availability changes.
-
+
@@ -173,4 +179,7 @@ Labels can also be set dynamically on workers using the `upsertLabels` method. T
```
+
+
+
diff --git a/frontend/docs/pages/home/workers.mdx b/frontend/docs/pages/home/workers.mdx
index 3f937622f..528547234 100644
--- a/frontend/docs/pages/home/workers.mdx
+++ b/frontend/docs/pages/home/workers.mdx
@@ -17,7 +17,7 @@ Now that we have a [task declared](./your-first-task.mdx) we can create a worker
Declare a worker by calling the `worker` method on the Hatchet client. The `worker` method takes a name and an optional configuration object.
-
+
@@ -88,6 +88,32 @@ Declare a worker by calling the `worker` method on the Hatchet client. The `work
+
+ ### Add the Hatchet SDK to your Gemfile
+
+ ```ruby
+ gem "hatchet-sdk"
+ ```
+
+ Then install with:
+
+ ```bash
+ bundle install
+ ```
+
+ ### Register the Worker
+
+
+
+ ### Run the Worker
+
+ Start the worker by running:
+
+ ```bash
+ bundle exec ruby worker.rb
+ ```
+
+
And that's it! Once you run your script to start the worker, you'll see some logs like this, which tell you that your worker is running.
diff --git a/frontend/docs/pages/home/your-first-task.mdx b/frontend/docs/pages/home/your-first-task.mdx
index 20f8bdf1a..0b3dd3bca 100644
--- a/frontend/docs/pages/home/your-first-task.mdx
+++ b/frontend/docs/pages/home/your-first-task.mdx
@@ -15,7 +15,7 @@ Start by declaring a task with a name. The task object can declare additional ta
The returned object is an instance of the `Task` class, which is the primary interface for interacting with the task (i.e. [running](./run-with-results.mdx), [enqueuing](./run-no-wait.mdx), [scheduling](./scheduled-runs.mdx), etc).
-
+
@@ -26,6 +26,9 @@ The returned object is an instance of the `Task` class, which is the primary int
+
+
+
## Running a Task
@@ -38,7 +41,7 @@ With your task defined, you can import it wherever you need to use it and invoke
worker but it will wait indefinitely for the task to be executed.
-
+
@@ -50,6 +53,9 @@ With your task defined, you can import it wherever you need to use it and invoke
+
+
+
There are many ways to run a task, including:
diff --git a/frontend/docs/pnpm-lock.yaml b/frontend/docs/pnpm-lock.yaml
index cd160f759..e613e1184 100644
--- a/frontend/docs/pnpm-lock.yaml
+++ b/frontend/docs/pnpm-lock.yaml
@@ -51,6 +51,9 @@ importers:
lucide-react:
specifier: ^0.459.0
version: 0.459.0(react@18.3.1)
+ minisearch:
+ specifier: ^7.2.0
+ version: 7.2.0
next:
specifier: ^14.2.35
version: 14.2.35(react-dom@18.3.1(react@18.3.1))(react@18.3.1)
@@ -66,6 +69,9 @@ importers:
posthog-js:
specifier: ^1.272.1
version: 1.272.1
+ posthog-node:
+ specifier: ^5.24.15
+ version: 5.24.15
react:
specifier: ^18.3.1
version: 18.3.1
@@ -92,10 +98,13 @@ importers:
version: 2.6.0
tailwindcss:
specifier: ^3.4.18
- version: 3.4.18(yaml@2.8.1)
+ version: 3.4.18(tsx@4.21.0)(yaml@2.8.1)
tailwindcss-animate:
specifier: ^1.0.7
- version: 1.0.7(tailwindcss@3.4.18(yaml@2.8.1))
+ version: 1.0.7(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.1))
+ zod:
+ specifier: ^4.3.6
+ version: 4.3.6
devDependencies:
'@types/node':
specifier: 18.11.10
@@ -112,6 +121,9 @@ importers:
prettier:
specifier: ^3.6.2
version: 3.6.2
+ tsx:
+ specifier: ^4.21.0
+ version: 4.21.0
typescript:
specifier: ^5.9.3
version: 5.9.3
@@ -154,6 +166,162 @@ packages:
'@chevrotain/utils@11.0.3':
resolution: {integrity: sha512-YslZMgtJUyuMbZ+aKvfF3x1f5liK4mWNxghFRv7jqRR9C3R3fAOGTTKvxXDa2Y1s9zSbcpuO0cAxDYsc9SrXoQ==}
+ '@esbuild/aix-ppc64@0.27.3':
+ resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==}
+ engines: {node: '>=18'}
+ cpu: [ppc64]
+ os: [aix]
+
+ '@esbuild/android-arm64@0.27.3':
+ resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [android]
+
+ '@esbuild/android-arm@0.27.3':
+ resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==}
+ engines: {node: '>=18'}
+ cpu: [arm]
+ os: [android]
+
+ '@esbuild/android-x64@0.27.3':
+ resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [android]
+
+ '@esbuild/darwin-arm64@0.27.3':
+ resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [darwin]
+
+ '@esbuild/darwin-x64@0.27.3':
+ resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [darwin]
+
+ '@esbuild/freebsd-arm64@0.27.3':
+ resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [freebsd]
+
+ '@esbuild/freebsd-x64@0.27.3':
+ resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [freebsd]
+
+ '@esbuild/linux-arm64@0.27.3':
+ resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [linux]
+
+ '@esbuild/linux-arm@0.27.3':
+ resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==}
+ engines: {node: '>=18'}
+ cpu: [arm]
+ os: [linux]
+
+ '@esbuild/linux-ia32@0.27.3':
+ resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==}
+ engines: {node: '>=18'}
+ cpu: [ia32]
+ os: [linux]
+
+ '@esbuild/linux-loong64@0.27.3':
+ resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==}
+ engines: {node: '>=18'}
+ cpu: [loong64]
+ os: [linux]
+
+ '@esbuild/linux-mips64el@0.27.3':
+ resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==}
+ engines: {node: '>=18'}
+ cpu: [mips64el]
+ os: [linux]
+
+ '@esbuild/linux-ppc64@0.27.3':
+ resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==}
+ engines: {node: '>=18'}
+ cpu: [ppc64]
+ os: [linux]
+
+ '@esbuild/linux-riscv64@0.27.3':
+ resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==}
+ engines: {node: '>=18'}
+ cpu: [riscv64]
+ os: [linux]
+
+ '@esbuild/linux-s390x@0.27.3':
+ resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==}
+ engines: {node: '>=18'}
+ cpu: [s390x]
+ os: [linux]
+
+ '@esbuild/linux-x64@0.27.3':
+ resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [linux]
+
+ '@esbuild/netbsd-arm64@0.27.3':
+ resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [netbsd]
+
+ '@esbuild/netbsd-x64@0.27.3':
+ resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [netbsd]
+
+ '@esbuild/openbsd-arm64@0.27.3':
+ resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [openbsd]
+
+ '@esbuild/openbsd-x64@0.27.3':
+ resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [openbsd]
+
+ '@esbuild/openharmony-arm64@0.27.3':
+ resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [openharmony]
+
+ '@esbuild/sunos-x64@0.27.3':
+ resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [sunos]
+
+ '@esbuild/win32-arm64@0.27.3':
+ resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==}
+ engines: {node: '>=18'}
+ cpu: [arm64]
+ os: [win32]
+
+ '@esbuild/win32-ia32@0.27.3':
+ resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==}
+ engines: {node: '>=18'}
+ cpu: [ia32]
+ os: [win32]
+
+ '@esbuild/win32-x64@0.27.3':
+ resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==}
+ engines: {node: '>=18'}
+ cpu: [x64]
+ os: [win32]
+
'@floating-ui/core@1.7.3':
resolution: {integrity: sha512-sGnvb5dmrJaKEZ+LDIpguvdX3bDlEllmv4/ClQ9awcmCZrlx5jQyyMWFM5kBI+EyNOCDDiKk8il0zeuX3Zlg/w==}
@@ -390,6 +558,9 @@ packages:
'@posthog/core@1.2.2':
resolution: {integrity: sha512-f16Ozx6LIigRG+HsJdt+7kgSxZTHeX5f1JlCGKI1lXcvlZgfsCR338FuMI2QRYXGl+jg/vYFzGOTQBxl90lnBg==}
+ '@posthog/core@1.22.0':
+ resolution: {integrity: sha512-WkmOnq95aAOu6yk6r5LWr5cfXsQdpVbWDCwOxQwxSne8YV6GuZET1ziO5toSQXgrgbdcjrSz2/GopAfiL6iiAA==}
+
'@radix-ui/react-compose-refs@1.1.2':
resolution: {integrity: sha512-z4eqJvfiNnFMHIIvXP3CY57y2WJs5g2v3X0zm9mEJkrkNv4rDxu+sg9Jh8EkXyeqBkB7SOcboo9dMVqhyrACIg==}
peerDependencies:
@@ -1310,6 +1481,11 @@ packages:
esast-util-from-js@2.0.1:
resolution: {integrity: sha512-8Ja+rNJ0Lt56Pcf3TAmpBZjmx8ZcK5Ts4cAzIOjsjevg9oSXJnl6SUQ2EevU8tv3h6ZLWmoKL5H4fgWvdvfETw==}
+ esbuild@0.27.3:
+ resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==}
+ engines: {node: '>=18'}
+ hasBin: true
+
escalade@3.2.0:
resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==}
engines: {node: '>=6'}
@@ -1452,6 +1628,9 @@ packages:
resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==}
engines: {node: '>=16'}
+ get-tsconfig@4.13.6:
+ resolution: {integrity: sha512-shZT/QMiSHc/YBLxxOkMtgSid5HFoauqCE3/exfsEcwg1WkeqjG+V40yBbBrsD+jW2HDXcs28xOfcbm2jI8Ddw==}
+
github-slugger@2.0.0:
resolution: {integrity: sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==}
@@ -2006,6 +2185,9 @@ packages:
resolution: {integrity: sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==}
engines: {node: '>=16 || 14 >=14.17'}
+ minisearch@7.2.0:
+ resolution: {integrity: sha512-dqT2XBYUOZOiC5t2HRnwADjhNS2cecp9u+TJRiJ1Qp/f5qjkeT5APcGPjHw+bz89Ms8Jp+cG4AlE+QZ/QnDglg==}
+
mj-context-menu@0.6.1:
resolution: {integrity: sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==}
@@ -2281,6 +2463,10 @@ packages:
rrweb-snapshot:
optional: true
+ posthog-node@5.24.15:
+ resolution: {integrity: sha512-0QnWVOZAPwEAlp+r3r0jIGfk2IaNYM/2YnEJJhBMJZXs4LpHcTu7mX42l+e95o9xX87YpVuZU0kOkmtQUxgnOA==}
+ engines: {node: ^20.20.0 || >=22.22.0}
+
preact@10.28.2:
resolution: {integrity: sha512-lbteaWGzGHdlIuiJ0l2Jq454m6kcpI1zNje6d8MlGAFlYvP2GO4ibnat7P74Esfz4sPTdM6UxtTwh/d3pwM9JA==}
@@ -2541,6 +2727,9 @@ packages:
reselect@5.1.1:
resolution: {integrity: sha512-K/BG6eIky/SBpzfHZv/dd+9JBFiS4SWV7FIujVyJRux6e45+73RaUHXLmIR1f7WOMaQ0U1km6qwklRQxpJJY0w==}
+ resolve-pkg-maps@1.0.0:
+ resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==}
+
resolve@1.22.10:
resolution: {integrity: sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==}
engines: {node: '>= 0.4'}
@@ -2814,6 +3003,11 @@ packages:
tslib@2.8.1:
resolution: {integrity: sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==}
+ tsx@4.21.0:
+ resolution: {integrity: sha512-5C1sg4USs1lfG0GFb2RLXsdpXqBSEhAaA/0kPL01wxzpMqLILNxIxIOKiILz+cdg/pLnOUxFYOR5yhHU666wbw==}
+ engines: {node: '>=18.0.0'}
+ hasBin: true
+
twoslash-protocol@0.2.12:
resolution: {integrity: sha512-5qZLXVYfZ9ABdjqbvPc4RWMr7PrpPaaDSeaYY55vl/w1j6H6kzsWK/urAEIXlzYlyrFmyz1UbwIt+AA0ck+wbg==}
@@ -3006,6 +3200,9 @@ packages:
zod@3.25.76:
resolution: {integrity: sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==}
+ zod@4.3.6:
+ resolution: {integrity: sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==}
+
zwitch@2.0.4:
resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==}
@@ -3045,6 +3242,84 @@ snapshots:
'@chevrotain/utils@11.0.3': {}
+ '@esbuild/aix-ppc64@0.27.3':
+ optional: true
+
+ '@esbuild/android-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/android-arm@0.27.3':
+ optional: true
+
+ '@esbuild/android-x64@0.27.3':
+ optional: true
+
+ '@esbuild/darwin-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/darwin-x64@0.27.3':
+ optional: true
+
+ '@esbuild/freebsd-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/freebsd-x64@0.27.3':
+ optional: true
+
+ '@esbuild/linux-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/linux-arm@0.27.3':
+ optional: true
+
+ '@esbuild/linux-ia32@0.27.3':
+ optional: true
+
+ '@esbuild/linux-loong64@0.27.3':
+ optional: true
+
+ '@esbuild/linux-mips64el@0.27.3':
+ optional: true
+
+ '@esbuild/linux-ppc64@0.27.3':
+ optional: true
+
+ '@esbuild/linux-riscv64@0.27.3':
+ optional: true
+
+ '@esbuild/linux-s390x@0.27.3':
+ optional: true
+
+ '@esbuild/linux-x64@0.27.3':
+ optional: true
+
+ '@esbuild/netbsd-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/netbsd-x64@0.27.3':
+ optional: true
+
+ '@esbuild/openbsd-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/openbsd-x64@0.27.3':
+ optional: true
+
+ '@esbuild/openharmony-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/sunos-x64@0.27.3':
+ optional: true
+
+ '@esbuild/win32-arm64@0.27.3':
+ optional: true
+
+ '@esbuild/win32-ia32@0.27.3':
+ optional: true
+
+ '@esbuild/win32-x64@0.27.3':
+ optional: true
+
'@floating-ui/core@1.7.3':
dependencies:
'@floating-ui/utils': 0.2.10
@@ -3271,6 +3546,10 @@ snapshots:
'@posthog/core@1.2.2': {}
+ '@posthog/core@1.22.0':
+ dependencies:
+ cross-spawn: 7.0.6
+
'@radix-ui/react-compose-refs@1.1.2(@types/react@18.3.26)(react@18.3.1)':
dependencies:
react: 18.3.1
@@ -4504,6 +4783,35 @@ snapshots:
esast-util-from-estree: 2.0.0
vfile-message: 4.0.3
+ esbuild@0.27.3:
+ optionalDependencies:
+ '@esbuild/aix-ppc64': 0.27.3
+ '@esbuild/android-arm': 0.27.3
+ '@esbuild/android-arm64': 0.27.3
+ '@esbuild/android-x64': 0.27.3
+ '@esbuild/darwin-arm64': 0.27.3
+ '@esbuild/darwin-x64': 0.27.3
+ '@esbuild/freebsd-arm64': 0.27.3
+ '@esbuild/freebsd-x64': 0.27.3
+ '@esbuild/linux-arm': 0.27.3
+ '@esbuild/linux-arm64': 0.27.3
+ '@esbuild/linux-ia32': 0.27.3
+ '@esbuild/linux-loong64': 0.27.3
+ '@esbuild/linux-mips64el': 0.27.3
+ '@esbuild/linux-ppc64': 0.27.3
+ '@esbuild/linux-riscv64': 0.27.3
+ '@esbuild/linux-s390x': 0.27.3
+ '@esbuild/linux-x64': 0.27.3
+ '@esbuild/netbsd-arm64': 0.27.3
+ '@esbuild/netbsd-x64': 0.27.3
+ '@esbuild/openbsd-arm64': 0.27.3
+ '@esbuild/openbsd-x64': 0.27.3
+ '@esbuild/openharmony-arm64': 0.27.3
+ '@esbuild/sunos-x64': 0.27.3
+ '@esbuild/win32-arm64': 0.27.3
+ '@esbuild/win32-ia32': 0.27.3
+ '@esbuild/win32-x64': 0.27.3
+
escalade@3.2.0: {}
escape-string-regexp@5.0.0: {}
@@ -4655,6 +4963,10 @@ snapshots:
get-stream@8.0.1: {}
+ get-tsconfig@4.13.6:
+ dependencies:
+ resolve-pkg-maps: 1.0.0
+
github-slugger@2.0.0: {}
glob-parent@5.1.2:
@@ -5580,6 +5892,8 @@ snapshots:
minipass@7.1.2: {}
+ minisearch@7.2.0: {}
+
mj-context-menu@0.6.1: {}
mlly@1.8.0:
@@ -5854,12 +6168,13 @@ snapshots:
camelcase-css: 2.0.1
postcss: 8.5.6
- postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.1):
+ postcss-load-config@6.0.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(yaml@2.8.1):
dependencies:
lilconfig: 3.1.3
optionalDependencies:
jiti: 1.21.7
postcss: 8.5.6
+ tsx: 4.21.0
yaml: 2.8.1
postcss-nested@6.2.0(postcss@8.5.6):
@@ -5894,6 +6209,10 @@ snapshots:
preact: 10.28.2
web-vitals: 4.2.4
+ posthog-node@5.24.15:
+ dependencies:
+ '@posthog/core': 1.22.0
+
preact@10.28.2: {}
prettier@3.6.2: {}
@@ -6239,6 +6558,8 @@ snapshots:
reselect@5.1.1: {}
+ resolve-pkg-maps@1.0.0: {}
+
resolve@1.22.10:
dependencies:
is-core-module: 2.16.1
@@ -6501,11 +6822,11 @@ snapshots:
tailwind-merge@2.6.0: {}
- tailwindcss-animate@1.0.7(tailwindcss@3.4.18(yaml@2.8.1)):
+ tailwindcss-animate@1.0.7(tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.1)):
dependencies:
- tailwindcss: 3.4.18(yaml@2.8.1)
+ tailwindcss: 3.4.18(tsx@4.21.0)(yaml@2.8.1)
- tailwindcss@3.4.18(yaml@2.8.1):
+ tailwindcss@3.4.18(tsx@4.21.0)(yaml@2.8.1):
dependencies:
'@alloc/quick-lru': 5.2.0
arg: 5.0.2
@@ -6524,7 +6845,7 @@ snapshots:
postcss: 8.5.6
postcss-import: 15.1.0(postcss@8.5.6)
postcss-js: 4.1.0(postcss@8.5.6)
- postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.6)(yaml@2.8.1)
+ postcss-load-config: 6.0.1(jiti@1.21.7)(postcss@8.5.6)(tsx@4.21.0)(yaml@2.8.1)
postcss-nested: 6.2.0(postcss@8.5.6)
postcss-selector-parser: 6.1.2
resolve: 1.22.10
@@ -6597,6 +6918,13 @@ snapshots:
tslib@2.8.1: {}
+ tsx@4.21.0:
+ dependencies:
+ esbuild: 0.27.3
+ get-tsconfig: 4.13.6
+ optionalDependencies:
+ fsevents: 2.3.3
+
twoslash-protocol@0.2.12: {}
twoslash@0.2.12(typescript@5.9.3):
@@ -6827,4 +7155,6 @@ snapshots:
zod@3.25.76: {}
+ zod@4.3.6: {}
+
zwitch@2.0.4: {}
diff --git a/frontend/docs/public/claude-logo.svg b/frontend/docs/public/claude-logo.svg
new file mode 100644
index 000000000..a081f45f0
--- /dev/null
+++ b/frontend/docs/public/claude-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/public/cursor-logo.svg b/frontend/docs/public/cursor-logo.svg
new file mode 100644
index 000000000..79b44c5e8
--- /dev/null
+++ b/frontend/docs/public/cursor-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/public/go-logo.svg b/frontend/docs/public/go-logo.svg
new file mode 100644
index 000000000..bc75916a9
--- /dev/null
+++ b/frontend/docs/public/go-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/public/python-logo.svg b/frontend/docs/public/python-logo.svg
new file mode 100644
index 000000000..31f087d59
--- /dev/null
+++ b/frontend/docs/public/python-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/public/ruby-logo.svg b/frontend/docs/public/ruby-logo.svg
new file mode 100644
index 000000000..83ec962e1
--- /dev/null
+++ b/frontend/docs/public/ruby-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/public/typescript-logo.svg b/frontend/docs/public/typescript-logo.svg
new file mode 100644
index 000000000..f6da7c416
--- /dev/null
+++ b/frontend/docs/public/typescript-logo.svg
@@ -0,0 +1 @@
+
diff --git a/frontend/docs/scripts/generate-llms.ts b/frontend/docs/scripts/generate-llms.ts
new file mode 100644
index 000000000..367851a0b
--- /dev/null
+++ b/frontend/docs/scripts/generate-llms.ts
@@ -0,0 +1,924 @@
+/**
+ * Generate llms.txt, llms-full.txt, and per-page markdown files from the
+ * Hatchet documentation.
+ *
+ * This script reads the MDX documentation pages, resolves Snippet references
+ * to inline code, expands UniversalTabs into labeled language sections, and
+ * converts JSX components to plain Markdown.
+ *
+ * Usage:
+ * tsx scripts/generate-llms.ts # all languages
+ * tsx scripts/generate-llms.ts --languages python # Python only
+ * tsx scripts/generate-llms.ts --languages python,typescript
+ */
+
+import fs from "node:fs";
+import path from "node:path";
+import { snippets } from "../lib/generated/snippets/index.js";
+
+// ---------------------------------------------------------------------------
+// Paths
+// ---------------------------------------------------------------------------
+const SCRIPT_DIR = path.dirname(new URL(import.meta.url).pathname);
+const DOCS_ROOT = path.resolve(SCRIPT_DIR, "..");
+const PAGES_DIR = path.join(DOCS_ROOT, "pages");
+const OUTPUT_DIR = path.join(DOCS_ROOT, "public");
+
+const DOCS_BASE_URL = "https://docs.hatchet.run";
+
+const LANGUAGE_EXTENSIONS: Record = {
+ python: "python",
+ typescript: "typescript",
+ go: "go",
+};
+
+const TAB_LABEL_TO_LANG: Record = {
+ python: "python",
+ typescript: "typescript",
+ go: "go",
+};
+
+// ---------------------------------------------------------------------------
+// Snippet resolution
+// ---------------------------------------------------------------------------
+type SnippetNode = Record;
+
+function resolveSnippetPath(
+ tree: SnippetNode,
+ dotpath: string,
+): SnippetNode | null {
+ let cleaned = dotpath;
+ if (cleaned.startsWith("snippets.")) {
+ cleaned = cleaned.slice("snippets.".length);
+ }
+ const parts = cleaned.split(".");
+ let current: any = tree;
+ for (const part of parts) {
+ if (current && typeof current === "object" && part in current) {
+ current = current[part];
+ } else {
+ return null;
+ }
+ }
+ if (current && typeof current === "object" && "content" in current) {
+ return current as SnippetNode;
+ }
+ return null;
+}
+
+// ---------------------------------------------------------------------------
+// _meta.js parsing
+// ---------------------------------------------------------------------------
+interface DocPage {
+ title: string;
+ slug: string;
+ href: string;
+ filepath: string;
+ section: string;
+}
+
+/**
+ * Parse a _meta.js file into a plain object.
+ *
+ * **Limitations:** This uses regex to convert simple JS object literals to
+ * JSON. It only supports _meta.js files that export a plain object with:
+ * - Simple unquoted or quoted string keys (no computed `[expr]` keys)
+ * - String or plain-object values (no function calls, template literals,
+ * spread operators, or variable references)
+ * - No inline or block comments
+ *
+ * If your _meta.js file uses any of these unsupported constructs, either
+ * simplify it or extend this parser (e.g. with @babel/parser + eval).
+ */
+function parseMetaJs(filepath: string): Record {
+ const raw = fs.readFileSync(filepath, "utf-8");
+ let content = raw.replace("export default ", "");
+ // Quote unquoted object keys for JSON parsing
+ const pattern = /^(\s*)([a-zA-Z_$][a-zA-Z0-9_$-]*)\s*:/gm;
+ content = content.replace(pattern, '$1"$2":');
+ // Apply twice to catch keys that were adjacent
+ content = content.replace(pattern, '$1"$2":');
+ // Remove trailing commas before closing braces
+ content = content.replace(/,(\s*\n?\s*})(\s*);?/g, "$1");
+
+ try {
+ return JSON.parse(content);
+ } catch (err) {
+ const message =
+ err instanceof Error ? err.message : String(err);
+ throw new Error(
+ `Failed to parse _meta.js at ${filepath}: ${message}.\n` +
+ `The regex-based parser only supports simple object literals ` +
+ `(no computed keys, spread operators, comments, or expressions). ` +
+ `Simplify the file or switch to a proper JS parser.\n` +
+ `--- transformed content ---\n${content}`,
+ );
+ }
+}
+
+function isDocPage(key: string, value: any): boolean {
+ if (key.trim().startsWith("--")) return false;
+ if (key.trim().startsWith("_")) return false;
+ if (typeof value === "string") return true;
+ if (typeof value === "object" && value !== null) {
+ if (value.display === "hidden") return false;
+ if ("title" in value) return true;
+ }
+ return false;
+}
+
+function extractTitle(value: any): string {
+ if (typeof value === "string") return value;
+ if (typeof value === "object" && value !== null && "title" in value)
+ return value.title;
+ return "";
+}
+
+function collectPages(): DocPage[] {
+ const pages: DocPage[] = [];
+
+ const rootMetaPath = path.join(PAGES_DIR, "_meta.js");
+ if (!fs.existsSync(rootMetaPath)) return pages;
+
+ const rootMeta = parseMetaJs(rootMetaPath);
+ const sectionOrder = Object.keys(rootMeta).filter(
+ (k) => !k.startsWith("_"),
+ );
+
+ for (const sectionKey of sectionOrder) {
+ const sectionDir = path.join(PAGES_DIR, sectionKey);
+ const sectionMetaPath = path.join(sectionDir, "_meta.js");
+
+ const sectionValue = rootMeta[sectionKey] ?? {};
+ const sectionTitle =
+ typeof sectionValue === "object"
+ ? extractTitle(sectionValue)
+ : sectionKey;
+
+ if (!fs.existsSync(sectionMetaPath)) {
+ const mdxPath = path.join(PAGES_DIR, sectionKey + ".mdx");
+ if (fs.existsSync(mdxPath)) {
+ pages.push({
+ title: sectionTitle || sectionKey,
+ slug: sectionKey,
+ href: `${DOCS_BASE_URL}/${sectionKey}`,
+ filepath: mdxPath,
+ section: sectionTitle || sectionKey,
+ });
+ }
+ continue;
+ }
+
+ const sectionMeta = parseMetaJs(sectionMetaPath);
+ for (const [pageKey, pageValue] of Object.entries(sectionMeta)) {
+ if (!isDocPage(pageKey, pageValue)) continue;
+
+ const title = extractTitle(pageValue);
+ let mdxPath = path.join(sectionDir, pageKey + ".mdx");
+
+ if (!fs.existsSync(mdxPath)) {
+ mdxPath = path.join(sectionDir, pageKey, "index.mdx");
+ }
+ if (!fs.existsSync(mdxPath)) continue;
+
+ const href = `${DOCS_BASE_URL}/${sectionKey}/${pageKey}`;
+
+ pages.push({
+ title,
+ slug: pageKey,
+ href,
+ filepath: mdxPath,
+ section: sectionTitle || sectionKey,
+ });
+ }
+ }
+
+ return pages;
+}
+
+// ---------------------------------------------------------------------------
+// MDX -> Markdown conversion
+// ---------------------------------------------------------------------------
+function stripImportLines(text: string): string {
+ const lines = text.split("\n");
+ const result: string[] = [];
+ let inImports = true;
+ for (const line of lines) {
+ if (inImports) {
+ const stripped = line.trim();
+ if (stripped.startsWith("import ") || stripped === "") continue;
+ inImports = false;
+ }
+ result.push(line);
+ }
+ return result.join("\n");
+}
+
+function stripJsxComments(text: string): string {
+ return text.replace(/\{\/\*[\s\S]*?\*\/\}/g, "");
+}
+
+function resolveSnippets(
+ text: string,
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+): string {
+ const pattern = //g;
+ return text.replace(pattern, (_match, rawPath: string) => {
+ const dotpath = rawPath.replace(/\s+/g, "").trim();
+ const snippet = resolveSnippetPath(snippetTree, dotpath);
+ if (!snippet) return ``;
+
+ const lang = snippet.language ?? "";
+ if (languages && !languages.includes(lang)) return "";
+
+ const langExt = LANGUAGE_EXTENSIONS[lang] ?? lang;
+ const code = (snippet.content ?? "").trimEnd();
+ return `\`\`\`${langExt}\n${code}\n\`\`\``;
+ });
+}
+
+function convertCallouts(text: string): string {
+ const pattern = /([\s\S]*?)<\/Callout>/g;
+ return text.replace(pattern, (_match, calloutType: string, content: string) => {
+ const label = calloutType.charAt(0).toUpperCase() + calloutType.slice(1);
+ const trimmed = content.trim();
+ const lines = trimmed.split("\n");
+ if (lines.length === 1) {
+ return `> **${label}:** ${trimmed}`;
+ }
+ return (
+ `> **${label}:** ${lines[0]}\n` +
+ lines
+ .slice(1)
+ .map((l) => (l.trim() ? `> ${l}` : ">"))
+ .join("\n")
+ );
+ });
+}
+
+// ---------------------------------------------------------------------------
+// Tab expansion
+// ---------------------------------------------------------------------------
+function dedentTabContent(text: string): string {
+ const lines = text.split("\n");
+ let inFence = false;
+ // Use a boolean array instead of Set to avoid es5 iteration issues
+ const isProseLine: boolean[] = new Array(lines.length).fill(false);
+
+ for (let i = 0; i < lines.length; i++) {
+ const stripped = lines[i].trimStart();
+ if (stripped.startsWith("```")) {
+ inFence = !inFence;
+ isProseLine[i] = true;
+ continue;
+ }
+ if (!inFence) {
+ isProseLine[i] = true;
+ }
+ }
+
+ let minIndent: number | null = null;
+ for (let i = 0; i < lines.length; i++) {
+ if (!isProseLine[i]) continue;
+ const line = lines[i];
+ const stripped = line.trim();
+ if (!stripped) continue;
+ if (stripped.startsWith("<") || stripped.startsWith("{/*")) continue;
+ const indent = line.length - line.trimStart().length;
+ if (indent === 0) continue;
+ if (minIndent === null || indent < minIndent) {
+ minIndent = indent;
+ }
+ }
+
+ if (!minIndent) return text;
+
+ const result: string[] = [];
+ for (let i = 0; i < lines.length; i++) {
+ if (
+ isProseLine[i] &&
+ lines[i].length >= minIndent &&
+ lines[i].slice(0, minIndent).trim() === ""
+ ) {
+ result.push(lines[i].slice(minIndent));
+ } else {
+ result.push(lines[i]);
+ }
+ }
+ return result.join("\n");
+}
+
+function extractTabContents(
+ inner: string,
+ items: string[],
+): [string, string][] {
+ const result: [string, string][] = [];
+ let tabIdx = 0;
+ let pos = 0;
+
+ while (pos < inner.length) {
+ const openMatch = inner.slice(pos).match(/]*)?>/);
+ if (!openMatch || openMatch.index === undefined) break;
+
+ const start = pos + openMatch.index + openMatch[0].length;
+ let depth = 1;
+ let scan = start;
+
+ while (scan < inner.length && depth > 0) {
+ const remaining = inner.slice(scan);
+ const nextOpen = remaining.match(/]*)?>/);
+ const nextClose = remaining.match(/<\/Tabs\.Tab>/);
+
+ if (!nextClose || nextClose.index === undefined) break;
+
+ if (
+ nextOpen &&
+ nextOpen.index !== undefined &&
+ nextOpen.index < nextClose.index
+ ) {
+ depth++;
+ scan += nextOpen.index + nextOpen[0].length;
+ } else {
+ depth--;
+ if (depth === 0) {
+ let content = inner.slice(start, scan + nextClose.index);
+ content = dedentTabContent(content);
+ const label =
+ tabIdx < items.length ? items[tabIdx] : `Tab ${tabIdx + 1}`;
+ result.push([label, content]);
+ tabIdx++;
+ scan += nextClose.index + nextClose[0].length;
+ } else {
+ scan += nextClose.index + nextClose[0].length;
+ }
+ }
+ }
+
+ pos = scan;
+ }
+
+ return result;
+}
+
+function expandUniversalTabs(
+ text: string,
+ languages: string[] | null,
+): string {
+ const pattern =
+ /((?:(?!/g;
+
+ function processTabsBlock(
+ _match: string,
+ itemsStr: string,
+ optionKey: string | undefined,
+ inner: string,
+ ): string {
+ let items = itemsStr.match(/"([^"]*)"/g)?.map((s) => s.slice(1, -1)) ?? [];
+ if (items.length === 0) {
+ items = itemsStr.match(/'([^']*)'/g)?.map((s) => s.slice(1, -1)) ?? [];
+ }
+
+ const isLanguageTabs = !optionKey || optionKey === "language";
+ const tabContents = extractTabContents(inner, items);
+
+ const parts: string[] = [];
+ for (const [label, content] of tabContents) {
+ const langKey = TAB_LABEL_TO_LANG[label.toLowerCase()];
+
+ if (isLanguageTabs && langKey && languages && !languages.includes(langKey))
+ continue;
+
+ parts.push(`#### ${label}\n\n${content.trim()}`);
+ }
+
+ return parts.join("\n\n");
+ }
+
+ // Repeatedly process innermost first (handles nesting)
+ let prev: string | null = null;
+ while (prev !== text) {
+ prev = text;
+ text = text.replace(pattern, processTabsBlock);
+ }
+
+ return text;
+}
+
+function expandStandaloneTabs(text: string): string {
+ const pattern =
+ /([\s\S]*?)<\/Tabs>/g;
+
+ return text.replace(pattern, (_match, itemsStr: string, inner: string) => {
+ let items = itemsStr.match(/"([^"]*)"/g)?.map((s) => s.slice(1, -1)) ?? [];
+ if (items.length === 0) {
+ items = itemsStr.match(/'([^']*)'/g)?.map((s) => s.slice(1, -1)) ?? [];
+ }
+
+ const tabContents = extractTabContents(inner, items);
+ const parts: string[] = [];
+ for (const [label, content] of tabContents) {
+ parts.push(`#### ${label}\n\n${content.trim()}`);
+ }
+ return parts.join("\n\n");
+ });
+}
+
+// ---------------------------------------------------------------------------
+// Other component converters
+// ---------------------------------------------------------------------------
+function convertSteps(text: string): string {
+ text = text.replace(//g, "");
+ text = text.replace(/<\/Steps>/g, "");
+ return text;
+}
+
+function convertCards(text: string): string {
+ text = text.replace(//g, "");
+ text = text.replace(/<\/Cards>/g, "");
+
+ text = text.replace(
+ /([\s\S]*?)<\/Card>|\/>)/g,
+ (_match, attrs: string, content?: string) => {
+ const titleMatch = attrs.match(/title=["']([^"']*)["']/);
+ const hrefMatch = attrs.match(/href=["']([^"']*)["']/);
+ const title = titleMatch?.[1] ?? "";
+ const href = hrefMatch?.[1] ?? "";
+ const trimContent = content?.trim() ?? "";
+
+ if (href) {
+ return `- [${title}](${href})${trimContent ? ": " + trimContent : ""}`;
+ }
+ return `- **${title}**${trimContent ? ": " + trimContent : ""}`;
+ },
+ );
+ return text;
+}
+
+function convertFileTree(text: string): string {
+ function walkFileTree(
+ content: string,
+ lines: string[],
+ depth: number,
+ ): void {
+ const folderPattern =
+ /]*>([\s\S]*?)<\/FileTree\.Folder>/g;
+ let folderMatch: RegExpExecArray | null;
+ while ((folderMatch = folderPattern.exec(content)) !== null) {
+ lines.push(" ".repeat(depth) + folderMatch[1] + "/");
+ walkFileTree(folderMatch[2], lines, depth + 1);
+ }
+ const filePattern =
+ /]*\s*\/>/g;
+ let fileMatch: RegExpExecArray | null;
+ while ((fileMatch = filePattern.exec(content)) !== null) {
+ lines.push(" ".repeat(depth) + fileMatch[1]);
+ }
+ }
+
+ return text.replace(
+ /([\s\S]*?)<\/FileTree>/g,
+ (_match, inner: string) => {
+ const lines: string[] = [];
+ walkFileTree(inner, lines, 0);
+ return "```\n" + lines.join("\n") + "\n```";
+ },
+ );
+}
+
+function stripJsxComponents(text: string): string {
+ // Self-closing JSX tags
+ text = text.replace(/<[A-Z]\w*(?:\.\w+)*\s*[^>]*\/\s*>/g, "");
+ // Opening/closing JSX tags
+ text = text.replace(/<\/?[A-Z]\w*(?:\.\w+)*\s*[^>]*>/g, "");
+ return text;
+}
+
+function resolveMdxComponentImports(
+ text: string,
+ filepath: string,
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+ depth: number = 0,
+): string {
+ if (depth > 10) {
+ console.warn(
+ `[generate-llms] resolveMdxComponentImports: recursion depth limit ` +
+ `(10) reached while processing "${filepath}". This likely indicates ` +
+ `circular MDX imports. The remaining component references will not ` +
+ `be resolved.`,
+ );
+ return text;
+ }
+
+ const mdxImportPattern =
+ /import\s+(\w+)\s+from\s+["']([^"']*\.mdx)["']/g;
+
+ // Collect all MDX component imports first
+ const imports: Array<{ componentName: string; relPath: string }> = [];
+ let importMatch: RegExpExecArray | null;
+ while ((importMatch = mdxImportPattern.exec(text)) !== null) {
+ imports.push({
+ componentName: importMatch[1],
+ relPath: importMatch[2],
+ });
+ }
+
+ for (const imp of imports) {
+ const importedFilePath = path.resolve(path.dirname(filepath), imp.relPath);
+ if (!fs.existsSync(importedFilePath)) {
+ // Fall back to a comment if the file can't be found
+ text = text.replace(
+ new RegExp(`<${imp.componentName}\\s*/\\s*>`, "g"),
+ ``,
+ );
+ continue;
+ }
+
+ // Read the imported MDX and recursively convert it
+ const importedRaw = fs.readFileSync(importedFilePath, "utf-8");
+ const importedMd = convertMdxToMarkdown(
+ importedRaw,
+ snippetTree,
+ languages,
+ importedFilePath,
+ depth + 1,
+ );
+
+ // Replace all usages of with the inlined content
+ text = text.replace(
+ new RegExp(`<${imp.componentName}\\s*/\\s*>`, "g"),
+ importedMd.trim(),
+ );
+ }
+
+ return text;
+}
+
+function cleanBlankLines(text: string): string {
+ return text.replace(/\n{4,}/g, "\n\n\n");
+}
+
+// ---------------------------------------------------------------------------
+// Full pipeline
+// ---------------------------------------------------------------------------
+function convertMdxToMarkdown(
+ content: string,
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+ filepath?: string,
+ depth?: number,
+): string {
+ let text = content;
+
+ if (filepath) {
+ text = resolveMdxComponentImports(
+ text,
+ filepath,
+ snippetTree,
+ languages,
+ depth ?? 0,
+ );
+ }
+ text = stripImportLines(text);
+ text = stripJsxComments(text);
+ text = convertCallouts(text);
+ text = resolveSnippets(text, snippetTree, languages);
+ text = expandUniversalTabs(text, languages);
+ text = expandStandaloneTabs(text);
+ text = convertSteps(text);
+ text = convertCards(text);
+ text = convertFileTree(text);
+ text = stripJsxComponents(text);
+ text = cleanBlankLines(text);
+
+ return text.trim() + "\n";
+}
+
+// ---------------------------------------------------------------------------
+// MiniSearch index generation
+// ---------------------------------------------------------------------------
+import MiniSearch from "minisearch";
+
+import { MINISEARCH_OPTIONS } from "../lib/search-config.js";
+
+interface SearchDoc {
+ id: string;
+ title: string;
+ content: string;
+ codeIdentifiers: string;
+ pageTitle: string;
+ pageRoute: string;
+}
+
+/**
+ * Extract compound code identifiers from fenced code blocks in markdown.
+ * Finds dotted identifiers (e.g. hatchet.task, ctx.spawn, hatchet.workflow)
+ * and other notable code patterns, returning them as a space-separated string.
+ */
+function extractCodeIdentifiers(markdown: string): string {
+ const identifiers = new Set();
+ const lines = markdown.split("\n");
+ let inFence = false;
+ let fenceMarker: string | null = null;
+
+ for (const line of lines) {
+ const trimmed = line.trimStart();
+ const backtickMatch = trimmed.match(/^(`{3,})/);
+ if (backtickMatch) {
+ if (fenceMarker === null) {
+ fenceMarker = backtickMatch[1];
+ inFence = true;
+ } else if (backtickMatch[1].length >= fenceMarker.length) {
+ fenceMarker = null;
+ inFence = false;
+ }
+ continue;
+ }
+
+ if (!inFence) continue;
+
+ // Dotted identifiers: hatchet.task, ctx.spawn, hatchet.workflow, etc.
+ const dottedPattern = /[a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)+/g;
+ let m: RegExpExecArray | null;
+ while ((m = dottedPattern.exec(line)) !== null) {
+ identifiers.add(m[0].toLowerCase());
+ }
+
+ // Decorated identifiers: @hatchet.task, @hatchet.workflow
+ const decoratorPattern = /@([a-zA-Z_]\w*(?:\.[a-zA-Z_]\w*)*)/g;
+ while ((m = decoratorPattern.exec(line)) !== null) {
+ identifiers.add(m[1].toLowerCase());
+ }
+ }
+
+ return Array.from(identifiers).join(" ");
+}
+
+/**
+ * Convert heading text to a URL-friendly slug (matching Nextra's anchor generation).
+ */
+function slugify(text: string): string {
+ return text
+ .toLowerCase()
+ .replace(/[^\w\s-]/g, "")
+ .replace(/\s+/g, "-")
+ .replace(/-+/g, "-")
+ .replace(/^-|-$/g, "");
+}
+
+/**
+ * Split markdown content into sections by h2 headings.
+ * Returns an array of { heading, slug, content } objects.
+ * The first element has heading="" for content before the first h2.
+ */
+function splitByH2(
+ markdown: string,
+): Array<{ heading: string; slug: string; content: string }> {
+ const lines = markdown.split("\n");
+ const sections: Array<{ heading: string; slug: string; content: string }> = [];
+ let currentHeading = "";
+ let currentSlug = "";
+ let currentLines: string[] = [];
+ let fenceMarker: string | null = null; // tracks the opening fence (e.g. "```" or "````")
+
+ for (const line of lines) {
+ // Track fenced code blocks so we don't split on ## inside them.
+ // A fence opens with 3+ backticks and closes only when we see at
+ // least the same number of backticks (CommonMark spec).
+ const trimmed = line.trimStart();
+ const backtickMatch = trimmed.match(/^(`{3,})/);
+ if (backtickMatch) {
+ if (fenceMarker === null) {
+ fenceMarker = backtickMatch[1]; // open fence
+ } else if (backtickMatch[1].length >= fenceMarker.length) {
+ fenceMarker = null; // close fence
+ }
+ // else: fewer backticks than the opening fence — just content
+ }
+
+ const h2Match = fenceMarker === null && line.match(/^## (.+)$/);
+ if (h2Match) {
+ // Flush the previous section
+ const content = currentLines.join("\n").trim();
+ if (content || currentHeading) {
+ sections.push({
+ heading: currentHeading,
+ slug: currentSlug,
+ content,
+ });
+ }
+ currentHeading = h2Match[1].trim();
+ currentSlug = slugify(currentHeading);
+ currentLines = [];
+ } else {
+ currentLines.push(line);
+ }
+ }
+
+ // Flush the last section
+ const content = currentLines.join("\n").trim();
+ if (content || currentHeading) {
+ sections.push({
+ heading: currentHeading,
+ slug: currentSlug,
+ content,
+ });
+ }
+
+ return sections;
+}
+
+function buildSearchIndex(
+ pages: DocPage[],
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+): string {
+ const miniSearch = new MiniSearch(MINISEARCH_OPTIONS);
+
+ const docs: SearchDoc[] = [];
+ for (const page of pages) {
+ const raw = fs.readFileSync(page.filepath, "utf-8");
+ const md = convertMdxToMarkdown(raw, snippetTree, languages, page.filepath);
+ const urlPath = page.href.replace(DOCS_BASE_URL + "/", "");
+ const pageRoute = `hatchet://docs/${urlPath}`;
+
+ const sections = splitByH2(md);
+
+ for (const section of sections) {
+ if (!section.content.trim()) continue;
+
+ const id = section.slug
+ ? `${pageRoute}#${section.slug}`
+ : pageRoute;
+ const title = section.heading || page.title;
+
+ docs.push({
+ id,
+ title,
+ content: section.content,
+ codeIdentifiers: extractCodeIdentifiers(section.content),
+ pageTitle: page.title,
+ pageRoute,
+ });
+ }
+ }
+
+ miniSearch.addAll(docs);
+ return JSON.stringify(miniSearch);
+}
+
+// ---------------------------------------------------------------------------
+// Output generation
+// ---------------------------------------------------------------------------
+function generateLlmsTxt(pages: DocPage[]): string {
+ const lines: string[] = [
+ "# Hatchet Documentation",
+ "",
+ "> Hatchet is a distributed task queue and workflow engine for modern " +
+ "applications. It provides durable execution, concurrency control, " +
+ "rate limiting, and observability for background tasks and workflows " +
+ "in Python, TypeScript, and Go.",
+ "",
+ ];
+
+ let currentSection = "";
+ for (const page of pages) {
+ if (page.section !== currentSection) {
+ currentSection = page.section;
+ lines.push(`## ${currentSection}`);
+ lines.push("");
+ }
+ lines.push(`- [${page.title}](${page.href})`);
+ }
+
+ lines.push("");
+ return lines.join("\n");
+}
+
+function generateLlmsFullTxt(
+ pages: DocPage[],
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+): string {
+ const parts: string[] = [
+ "# Hatchet Documentation",
+ "",
+ "> Hatchet is a distributed task queue and workflow engine for modern " +
+ "applications. It provides durable execution, concurrency control, " +
+ "rate limiting, and observability for background tasks and workflows " +
+ "in Python, TypeScript, and Go.",
+ "",
+ ];
+
+ for (const page of pages) {
+ const raw = fs.readFileSync(page.filepath, "utf-8");
+ const md = convertMdxToMarkdown(raw, snippetTree, languages, page.filepath);
+ parts.push(`---\n\n\n`);
+ parts.push(md);
+ parts.push("");
+ }
+
+ return parts.join("\n");
+}
+
+function generatePerPageMarkdown(
+ pages: DocPage[],
+ snippetTree: SnippetNode,
+ languages: string[] | null,
+): void {
+ const llmsDir = path.join(OUTPUT_DIR, "llms");
+
+ for (const page of pages) {
+ const raw = fs.readFileSync(page.filepath, "utf-8");
+ const md = convertMdxToMarkdown(raw, snippetTree, languages, page.filepath);
+
+ const urlPath = page.href.replace(DOCS_BASE_URL + "/", "");
+ const outPath = path.join(llmsDir, urlPath + ".md");
+ fs.mkdirSync(path.dirname(outPath), { recursive: true });
+ fs.writeFileSync(outPath, md);
+
+ // For index pages (e.g. home/index), also write at the section root
+ // (e.g. home.md) so that /llms/home.md resolves correctly — Next.js
+ // router.pathname for section roots is "/home", not "/home/index".
+ if (page.slug === "index") {
+ const sectionPath = urlPath.replace(/\/index$/, "");
+ const sectionOutPath = path.join(llmsDir, sectionPath + ".md");
+ fs.writeFileSync(sectionOutPath, md);
+ }
+ }
+
+ console.log(
+ ` Wrote ${pages.length} per-page markdown files to ${llmsDir}/`,
+ );
+}
+
+// ---------------------------------------------------------------------------
+// CLI & main
+// ---------------------------------------------------------------------------
+function parseArgs(): string[] | null {
+ const idx = process.argv.indexOf("--languages");
+ if (idx === -1 || idx + 1 >= process.argv.length) return null;
+
+ const raw = process.argv[idx + 1];
+ const langs = raw.split(",").map((l) => l.trim().toLowerCase());
+ const valid = Object.keys(LANGUAGE_EXTENSIONS);
+ for (const lang of langs) {
+ if (!valid.includes(lang)) {
+ console.error(
+ `Unknown language: ${lang}. Valid: ${valid.sort().join(", ")}`,
+ );
+ process.exit(1);
+ }
+ }
+ return langs;
+}
+
+function main(): void {
+ const languages = parseArgs();
+
+ console.log("Loading snippets...");
+ const snippetTree = snippets as unknown as SnippetNode;
+
+ console.log("Collecting pages from _meta.js files...");
+ const pages = collectPages();
+ console.log(` Found ${pages.length} pages`);
+
+ console.log("Generating llms.txt...");
+ const llmsTxt = generateLlmsTxt(pages);
+
+ console.log("Generating llms-full.txt...");
+ const llmsFullTxt = generateLlmsFullTxt(pages, snippetTree, languages);
+
+ console.log("Generating per-page markdown files...");
+ generatePerPageMarkdown(pages, snippetTree, languages);
+
+ console.log("Building MiniSearch index...");
+ const searchIndexJson = buildSearchIndex(pages, snippetTree, languages);
+
+ fs.mkdirSync(OUTPUT_DIR, { recursive: true });
+
+ const llmsTxtPath = path.join(OUTPUT_DIR, "llms.txt");
+ fs.writeFileSync(llmsTxtPath, llmsTxt);
+ console.log(` Wrote ${llmsTxtPath} (${llmsTxt.length} bytes)`);
+
+ const llmsFullPath = path.join(OUTPUT_DIR, "llms-full.txt");
+ fs.writeFileSync(llmsFullPath, llmsFullTxt);
+ console.log(` Wrote ${llmsFullPath} (${llmsFullTxt.length} bytes)`);
+
+ const searchIndexPath = path.join(OUTPUT_DIR, "llms-search-index.json");
+ fs.writeFileSync(searchIndexPath, searchIndexJson);
+ console.log(
+ ` Wrote ${searchIndexPath} (${searchIndexJson.length} bytes)`,
+ );
+
+ if (languages) {
+ console.log(` Languages: ${languages.join(", ")}`);
+ } else {
+ console.log(" Languages: all");
+ }
+
+ console.log("Done!");
+}
+
+main();
diff --git a/frontend/docs/scripts/test-search-quality.ts b/frontend/docs/scripts/test-search-quality.ts
new file mode 100644
index 000000000..37c3ad98e
--- /dev/null
+++ b/frontend/docs/scripts/test-search-quality.ts
@@ -0,0 +1,1005 @@
+/**
+ * Search quality test harness for the docs MiniSearch index.
+ *
+ * Defines a set of common search queries with expected results,
+ * runs them against the generated index, and reports pass/fail.
+ *
+ * Usage:
+ * tsx scripts/test-search-quality.ts
+ *
+ * Exit code 0 = all tests pass, 1 = failures detected.
+ */
+
+import fs from "node:fs";
+import path from "node:path";
+import MiniSearch from "minisearch";
+import { MINISEARCH_OPTIONS, SEARCH_OPTIONS, rerankResults, expandSynonyms } from "../lib/search-config.js";
+
+// ---------------------------------------------------------------------------
+// Load the search index
+// ---------------------------------------------------------------------------
+const SCRIPT_DIR = path.dirname(new URL(import.meta.url).pathname);
+const DOCS_ROOT = path.resolve(SCRIPT_DIR, "..");
+const INDEX_PATH = path.join(DOCS_ROOT, "public", "llms-search-index.json");
+
+function loadIndex(): MiniSearch {
+ const json = fs.readFileSync(INDEX_PATH, "utf-8");
+ return MiniSearch.loadJSON(json, MINISEARCH_OPTIONS);
+}
+
+// ---------------------------------------------------------------------------
+// Test case definitions
+// ---------------------------------------------------------------------------
+
+interface SearchTestCase {
+ /** Human description of what we're testing */
+ name: string;
+ /** The raw search query (exactly what a user would type) */
+ query: string;
+ /** At least one of these page routes must appear in the top N results */
+ expectAnyOf: string[];
+ /** How many top results to check (default: 5) */
+ topN?: number;
+ /** If true, skip this test (for known issues / WIP) */
+ skip?: boolean;
+}
+
+const TEST_CASES: SearchTestCase[] = [
+ // -------------------------------------------------------------------------
+ // Core API patterns — things developers commonly search for
+ // -------------------------------------------------------------------------
+ {
+ name: "hatchet.task( — defining a task",
+ query: "hatchet.task(",
+ expectAnyOf: ["home/your-first-task"],
+ },
+ {
+ name: "hatchet.task — without parens",
+ query: "hatchet.task",
+ expectAnyOf: ["home/your-first-task"],
+ },
+ {
+ name: "@hatchet.task() — Python decorator",
+ query: "@hatchet.task()",
+ expectAnyOf: ["home/your-first-task"],
+ },
+ {
+ name: "hatchet.workflow — defining a workflow",
+ query: "hatchet.workflow",
+ expectAnyOf: ["home/dags", "home/orchestration"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Getting started & onboarding
+ // -------------------------------------------------------------------------
+ {
+ name: "quickstart",
+ query: "quickstart",
+ expectAnyOf: ["home/hatchet-cloud-quickstart", "self-hosting/kubernetes-quickstart"],
+ },
+ {
+ name: "setup",
+ query: "setup",
+ expectAnyOf: ["home/setup", "home/hatchet-cloud-quickstart"],
+ },
+ {
+ name: "getting started",
+ query: "getting started",
+ expectAnyOf: ["home/hatchet-cloud-quickstart", "home/setup"],
+ topN: 10,
+ },
+ {
+ name: "install",
+ query: "install",
+ expectAnyOf: ["home/hatchet-cloud-quickstart", "home/setup", "cli/index"],
+ topN: 10,
+ },
+ {
+ name: "architecture",
+ query: "architecture",
+ expectAnyOf: ["home/architecture"],
+ },
+ {
+ name: "guarantees",
+ query: "guarantees",
+ expectAnyOf: ["home/guarantees-and-tradeoffs"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Core task features
+ // -------------------------------------------------------------------------
+ {
+ name: "define a task",
+ query: "define a task",
+ expectAnyOf: ["home/your-first-task"],
+ topN: 10,
+ },
+ {
+ name: "create worker",
+ query: "create worker",
+ expectAnyOf: ["home/workers"],
+ topN: 10,
+ },
+ {
+ name: "worker",
+ query: "worker",
+ expectAnyOf: ["home/workers"],
+ },
+ {
+ name: "run task",
+ query: "run task",
+ expectAnyOf: ["home/running-your-task", "home/running-tasks", "home/run-with-results"],
+ topN: 10,
+ },
+ {
+ name: "environments",
+ query: "environments",
+ expectAnyOf: ["home/environments"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Trigger types
+ // -------------------------------------------------------------------------
+ {
+ name: "run with results",
+ query: "run with results",
+ expectAnyOf: ["home/run-with-results"],
+ },
+ {
+ name: "run no wait",
+ query: "run no wait",
+ expectAnyOf: ["home/run-no-wait"],
+ },
+ {
+ name: "scheduled runs",
+ query: "scheduled runs",
+ expectAnyOf: ["home/scheduled-runs"],
+ },
+ {
+ name: "cron",
+ query: "cron",
+ expectAnyOf: ["home/cron-runs"],
+ },
+ {
+ name: "event trigger",
+ query: "event trigger",
+ expectAnyOf: ["home/run-on-event"],
+ topN: 10,
+ },
+ {
+ name: "bulk run",
+ query: "bulk run",
+ expectAnyOf: ["home/bulk-run"],
+ },
+ {
+ name: "webhooks",
+ query: "webhooks",
+ expectAnyOf: ["home/webhooks"],
+ },
+ {
+ name: "inter-service",
+ query: "inter-service",
+ expectAnyOf: ["home/inter-service-triggering"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Flow control
+ // -------------------------------------------------------------------------
+ {
+ name: "concurrency",
+ query: "concurrency",
+ expectAnyOf: ["home/concurrency"],
+ },
+ {
+ name: "rate limit",
+ query: "rate limit",
+ expectAnyOf: ["home/rate-limits"],
+ },
+ {
+ name: "rate limits (plural)",
+ query: "rate limits",
+ expectAnyOf: ["home/rate-limits"],
+ },
+ {
+ name: "priority",
+ query: "priority",
+ expectAnyOf: ["home/priority"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Orchestration & composition
+ // -------------------------------------------------------------------------
+ {
+ name: "orchestration",
+ query: "orchestration",
+ expectAnyOf: ["home/orchestration"],
+ },
+ {
+ name: "DAG",
+ query: "DAG",
+ expectAnyOf: ["home/dags"],
+ },
+ {
+ name: "conditional workflows",
+ query: "conditional workflows",
+ expectAnyOf: ["home/conditional-workflows"],
+ },
+ {
+ name: "on failure",
+ query: "on failure",
+ expectAnyOf: ["home/on-failure-tasks"],
+ },
+ {
+ name: "child spawning",
+ query: "child spawning",
+ expectAnyOf: ["home/child-spawning"],
+ },
+ {
+ name: "child tasks",
+ query: "child tasks",
+ expectAnyOf: ["home/child-spawning"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Durability
+ // -------------------------------------------------------------------------
+ {
+ name: "durable execution",
+ query: "durable execution",
+ expectAnyOf: ["home/durable-execution"],
+ },
+ {
+ name: "durable events",
+ query: "durable events",
+ expectAnyOf: ["home/durable-events"],
+ },
+ {
+ name: "durable sleep",
+ query: "durable sleep",
+ expectAnyOf: ["home/durable-sleep"],
+ },
+ {
+ name: "durable best practices",
+ query: "durable best practices",
+ expectAnyOf: ["home/durable-best-practices"],
+ topN: 10,
+ },
+
+ // -------------------------------------------------------------------------
+ // Reliability & error handling
+ // -------------------------------------------------------------------------
+ {
+ name: "retry",
+ query: "retry",
+ expectAnyOf: ["home/retry-policies"],
+ },
+ {
+ name: "timeout",
+ query: "timeout",
+ expectAnyOf: ["home/timeouts"],
+ },
+ {
+ name: "cancellation",
+ query: "cancellation",
+ expectAnyOf: ["home/cancellation"],
+ },
+ {
+ name: "bulk retries",
+ query: "bulk retries",
+ expectAnyOf: ["home/bulk-retries-and-cancellations"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Worker management
+ // -------------------------------------------------------------------------
+ {
+ name: "sticky assignment",
+ query: "sticky assignment",
+ expectAnyOf: ["home/sticky-assignment"],
+ },
+ {
+ name: "worker affinity",
+ query: "worker affinity",
+ expectAnyOf: ["home/worker-affinity"],
+ },
+ {
+ name: "manual slot release",
+ query: "manual slot release",
+ expectAnyOf: ["home/manual-slot-release"],
+ },
+ {
+ name: "autoscaling workers",
+ query: "autoscaling workers",
+ expectAnyOf: ["home/autoscaling-workers"],
+ },
+ {
+ name: "worker health check",
+ query: "worker health check",
+ expectAnyOf: ["home/worker-healthchecks"],
+ topN: 10,
+ },
+ {
+ name: "troubleshooting",
+ query: "troubleshooting",
+ expectAnyOf: ["home/troubleshooting-workers"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Observability
+ // -------------------------------------------------------------------------
+ {
+ name: "logging",
+ query: "logging",
+ expectAnyOf: ["home/logging"],
+ },
+ {
+ name: "opentelemetry",
+ query: "opentelemetry",
+ expectAnyOf: ["home/opentelemetry"],
+ },
+ {
+ name: "prometheus metrics",
+ query: "prometheus metrics",
+ expectAnyOf: ["self-hosting/prometheus-metrics", "home/prometheus-metrics"],
+ },
+ {
+ name: "streaming",
+ query: "streaming",
+ expectAnyOf: ["home/streaming"],
+ },
+ {
+ name: "additional metadata",
+ query: "additional metadata",
+ expectAnyOf: ["home/additional-metadata"],
+ },
+
+ // -------------------------------------------------------------------------
+ // SDK-specific (Python)
+ // -------------------------------------------------------------------------
+ {
+ name: "pydantic",
+ query: "pydantic",
+ expectAnyOf: ["home/pydantic"],
+ },
+ {
+ name: "asyncio",
+ query: "asyncio",
+ expectAnyOf: ["home/asyncio"],
+ },
+ {
+ name: "dependency injection",
+ query: "dependency injection",
+ expectAnyOf: ["home/dependency-injection"],
+ },
+ {
+ name: "dataclass",
+ query: "dataclass",
+ expectAnyOf: ["home/dataclasses"],
+ },
+ {
+ name: "lifespans",
+ query: "lifespans",
+ expectAnyOf: ["home/lifespans"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Migration guides
+ // -------------------------------------------------------------------------
+ {
+ name: "migration python",
+ query: "migration python",
+ expectAnyOf: ["home/migration-guide-python"],
+ },
+ {
+ name: "migration typescript",
+ query: "migration typescript",
+ expectAnyOf: ["home/migration-guide-typescript"],
+ },
+ {
+ name: "migration go",
+ query: "migration go",
+ expectAnyOf: ["home/migration-guide-go"],
+ },
+ {
+ name: "engine migration",
+ query: "engine migration",
+ expectAnyOf: ["home/migration-guide-engine"],
+ },
+ {
+ name: "SDK improvements",
+ query: "SDK improvements",
+ expectAnyOf: ["home/v1-sdk-improvements"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Self-hosting & infrastructure
+ // -------------------------------------------------------------------------
+ {
+ name: "docker compose",
+ query: "docker compose",
+ expectAnyOf: ["self-hosting/docker-compose", "home/docker"],
+ },
+ {
+ name: "running with docker",
+ query: "running with docker",
+ expectAnyOf: ["home/docker", "self-hosting/docker-compose"],
+ topN: 10,
+ },
+ {
+ name: "kubernetes",
+ query: "kubernetes",
+ expectAnyOf: ["self-hosting/kubernetes-quickstart", "self-hosting/kubernetes-helm-configuration"],
+ },
+ {
+ name: "helm chart",
+ query: "helm chart",
+ expectAnyOf: ["self-hosting/kubernetes-helm-configuration", "self-hosting/high-availability"],
+ },
+ {
+ name: "configuration options",
+ query: "configuration options",
+ expectAnyOf: ["self-hosting/configuration-options"],
+ },
+ {
+ name: "self hosting",
+ query: "self hosting",
+ expectAnyOf: ["self-hosting/index", "self-hosting/docker-compose"],
+ topN: 10,
+ },
+ {
+ name: "hatchet lite",
+ query: "hatchet lite",
+ expectAnyOf: ["self-hosting/hatchet-lite"],
+ },
+ {
+ name: "networking",
+ query: "networking",
+ expectAnyOf: ["self-hosting/networking"],
+ },
+ {
+ name: "external database",
+ query: "external database",
+ expectAnyOf: ["self-hosting/kubernetes-external-database"],
+ },
+ {
+ name: "high availability",
+ query: "high availability",
+ expectAnyOf: ["self-hosting/high-availability"],
+ },
+ {
+ name: "data retention",
+ query: "data retention",
+ expectAnyOf: ["self-hosting/data-retention"],
+ },
+ {
+ name: "benchmarking",
+ query: "benchmarking",
+ expectAnyOf: ["self-hosting/benchmarking"],
+ },
+ {
+ name: "read replicas",
+ query: "read replicas",
+ expectAnyOf: ["self-hosting/read-replicas"],
+ },
+ {
+ name: "SMTP",
+ query: "SMTP",
+ expectAnyOf: ["self-hosting/smtp-server"],
+ },
+ {
+ name: "sampling",
+ query: "sampling",
+ expectAnyOf: ["self-hosting/sampling"],
+ },
+ {
+ name: "glasskube",
+ query: "glasskube",
+ expectAnyOf: ["self-hosting/kubernetes-glasskube"],
+ },
+ {
+ name: "downgrading versions",
+ query: "downgrading versions",
+ expectAnyOf: ["self-hosting/downgrading-versions"],
+ },
+ {
+ name: "improving performance",
+ query: "improving performance",
+ expectAnyOf: ["self-hosting/improving-performance"],
+ },
+ {
+ name: "worker configuration",
+ query: "worker configuration",
+ expectAnyOf: ["self-hosting/worker-configuration-options"],
+ topN: 10,
+ },
+
+ // -------------------------------------------------------------------------
+ // CLI
+ // -------------------------------------------------------------------------
+ {
+ name: "CLI",
+ query: "CLI",
+ expectAnyOf: ["cli/index"],
+ },
+ {
+ name: "TUI",
+ query: "TUI",
+ expectAnyOf: ["cli/tui"],
+ },
+ {
+ name: "profiles",
+ query: "profiles",
+ expectAnyOf: ["cli/profiles"],
+ },
+ {
+ name: "running hatchet locally",
+ query: "running hatchet locally",
+ expectAnyOf: ["cli/running-hatchet-locally"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Code-specific searches
+ // -------------------------------------------------------------------------
+ {
+ name: "SimpleInput — Pydantic model",
+ query: "SimpleInput",
+ expectAnyOf: ["home/your-first-task"],
+ },
+ {
+ name: "input_validator — Python arg",
+ query: "input_validator",
+ expectAnyOf: ["home/pydantic", "home/your-first-task"],
+ },
+ {
+ name: "BaseModel — Pydantic",
+ query: "BaseModel",
+ expectAnyOf: ["home/pydantic", "home/your-first-task"],
+ },
+ {
+ name: "ctx.spawn — child spawn",
+ query: "ctx.spawn",
+ expectAnyOf: ["home/child-spawning"],
+ },
+ {
+ name: "NewStandaloneTask — Go API",
+ query: "NewStandaloneTask",
+ expectAnyOf: ["home/your-first-task", "home/migration-guide-go"],
+ },
+ {
+ name: "DurableContext",
+ query: "DurableContext",
+ expectAnyOf: ["home/durable-execution"],
+ },
+ {
+ name: "aio_run — Python async run",
+ query: "aio_run",
+ expectAnyOf: ["home/your-first-task", "home/run-with-results"],
+ },
+
+ // -------------------------------------------------------------------------
+ // Special characters (regression tests)
+ // -------------------------------------------------------------------------
+ {
+ name: "hatchet.task( — trailing paren",
+ query: "hatchet.task(",
+ expectAnyOf: ["home/your-first-task"],
+ topN: 10,
+ },
+ {
+ name: "ctx.spawn( — trailing paren",
+ query: "ctx.spawn(",
+ expectAnyOf: ["home/child-spawning"],
+ topN: 10,
+ },
+ {
+ name: ".run() — dot prefix and parens",
+ query: ".run()",
+ expectAnyOf: ["home/your-first-task", "home/run-with-results", "home/running-your-task"],
+ topN: 10,
+ },
+ {
+ name: "( — lone paren should not crash",
+ query: "(",
+ expectAnyOf: [],
+ },
+ {
+ name: ") — lone close paren should not crash",
+ query: ")",
+ expectAnyOf: [],
+ },
+
+ // -------------------------------------------------------------------------
+ // Synonym / alternate phrasing queries
+ // -------------------------------------------------------------------------
+ {
+ name: "delay → scheduled/sleep",
+ query: "delay",
+ expectAnyOf: ["home/durable-sleep", "home/scheduled-runs"],
+ },
+ {
+ name: "debounce → concurrency",
+ query: "debounce",
+ expectAnyOf: ["home/concurrency"],
+ },
+ {
+ name: "dedup → concurrency",
+ query: "dedup",
+ expectAnyOf: ["home/concurrency"],
+ },
+ {
+ name: "throttle → rate limits",
+ query: "throttle",
+ expectAnyOf: ["home/rate-limits", "home/concurrency"],
+ },
+ {
+ name: "fan out → child spawning",
+ query: "fan out",
+ expectAnyOf: ["home/child-spawning", "home/bulk-run"],
+ },
+ {
+ name: "parallel tasks",
+ query: "parallel tasks",
+ expectAnyOf: ["home/child-spawning", "home/run-with-results"],
+ },
+ {
+ name: "background job",
+ query: "background job",
+ expectAnyOf: ["home/your-first-task", "home/run-no-wait", "home/workers"],
+ },
+ {
+ name: "recurring → cron",
+ query: "recurring",
+ expectAnyOf: ["home/cron-runs"],
+ },
+ {
+ name: "error handling → retry/failure",
+ query: "error handling",
+ expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"],
+ },
+ {
+ name: "fire and forget → run no wait",
+ query: "fire and forget",
+ expectAnyOf: ["home/run-no-wait"],
+ topN: 10,
+ },
+ {
+ name: "scale workers → autoscaling",
+ query: "scale workers",
+ expectAnyOf: ["home/autoscaling-workers"],
+ },
+ {
+ name: "pipeline → DAG",
+ query: "pipeline",
+ expectAnyOf: ["home/dags", "home/orchestration"],
+ },
+ {
+ name: "long running task → durable",
+ query: "long running task",
+ expectAnyOf: ["home/durable-execution"],
+ topN: 10,
+ },
+ {
+ name: "batch → bulk run",
+ query: "batch tasks",
+ expectAnyOf: ["home/bulk-run"],
+ topN: 10,
+ },
+ {
+ name: "if else → conditional",
+ query: "if else workflow",
+ expectAnyOf: ["home/conditional-workflows"],
+ topN: 10,
+ },
+ {
+ name: "monitor → observability",
+ query: "monitor",
+ expectAnyOf: ["home/opentelemetry", "home/prometheus-metrics", "home/logging"],
+ topN: 10,
+ },
+ {
+ name: "tracing → opentelemetry",
+ query: "tracing",
+ expectAnyOf: ["home/opentelemetry"],
+ topN: 10,
+ },
+ {
+ name: "observability",
+ query: "observability",
+ expectAnyOf: ["home/opentelemetry", "home/prometheus-metrics", "home/logging"],
+ topN: 10,
+ },
+ {
+ name: "debug → troubleshooting",
+ query: "debug",
+ expectAnyOf: ["home/troubleshooting-workers", "home/logging"],
+ topN: 10,
+ },
+ {
+ name: "deploy → docker/k8s",
+ query: "deploy",
+ expectAnyOf: ["home/docker", "self-hosting/docker-compose", "self-hosting/kubernetes-quickstart"],
+ topN: 10,
+ },
+ {
+ name: "upgrade → migration",
+ query: "upgrade",
+ expectAnyOf: ["home/migration-guide-python", "home/migration-guide-typescript", "home/migration-guide-go", "home/migration-guide-engine"],
+ topN: 10,
+ },
+ {
+ name: "downgrade → downgrading",
+ query: "downgrade",
+ expectAnyOf: ["self-hosting/downgrading-versions"],
+ topN: 10,
+ },
+ {
+ name: "postgres → database config",
+ query: "postgres",
+ expectAnyOf: ["self-hosting/kubernetes-external-database", "self-hosting/configuration-options"],
+ topN: 10,
+ },
+ {
+ name: "performance → improving",
+ query: "performance",
+ expectAnyOf: ["self-hosting/improving-performance", "self-hosting/benchmarking"],
+ topN: 10,
+ },
+ {
+ name: "async await → asyncio",
+ query: "async await",
+ expectAnyOf: ["home/asyncio"],
+ topN: 10,
+ },
+ {
+ name: "liveness → health checks",
+ query: "liveness",
+ expectAnyOf: ["home/worker-healthchecks"],
+ topN: 10,
+ },
+ {
+ name: "wait for event → durable events",
+ query: "wait for event",
+ expectAnyOf: ["home/durable-events"],
+ topN: 10,
+ },
+ {
+ name: "api call → inter-service",
+ query: "api call between services",
+ expectAnyOf: ["home/inter-service-triggering"],
+ topN: 10,
+ },
+ {
+ name: "cleanup → lifespans",
+ query: "cleanup shutdown",
+ expectAnyOf: ["home/lifespans"],
+ topN: 10,
+ },
+
+ // -------------------------------------------------------------------------
+ // Natural language questions
+ // -------------------------------------------------------------------------
+ {
+ name: "how to retry a failed task",
+ query: "how to retry a failed task",
+ expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"],
+ topN: 10,
+ },
+ {
+ name: "how to run tasks in parallel",
+ query: "how to run tasks in parallel",
+ expectAnyOf: ["home/child-spawning", "home/run-with-results"],
+ topN: 10,
+ },
+ {
+ name: "how to cancel a running task",
+ query: "how to cancel a running task",
+ expectAnyOf: ["home/cancellation"],
+ topN: 10,
+ },
+ {
+ name: "how to set up cron job",
+ query: "how to set up cron job",
+ expectAnyOf: ["home/cron-runs"],
+ topN: 10,
+ },
+ {
+ name: "how to handle errors",
+ query: "how to handle errors",
+ expectAnyOf: ["home/retry-policies", "home/on-failure-tasks"],
+ topN: 10,
+ },
+ {
+ name: "how to limit concurrency",
+ query: "how to limit concurrency",
+ expectAnyOf: ["home/concurrency", "home/rate-limits"],
+ topN: 10,
+ },
+];
+
+// ---------------------------------------------------------------------------
+// Test runner
+// ---------------------------------------------------------------------------
+
+interface TestResult {
+ name: string;
+ query: string;
+ passed: boolean;
+ reason?: string;
+ topResults: Array<{ title: string; route: string; score: number }>;
+}
+
+function runTests(idx: MiniSearch): TestResult[] {
+ const results: TestResult[] = [];
+
+ for (const tc of TEST_CASES) {
+ if (tc.skip) {
+ results.push({
+ name: tc.name,
+ query: tc.query,
+ passed: true,
+ reason: "SKIPPED",
+ topResults: [],
+ });
+ continue;
+ }
+
+ const topN = tc.topN ?? 5;
+
+ let searchResults: any[];
+ try {
+ const expanded = expandSynonyms(tc.query);
+ const raw = idx.search(expanded, SEARCH_OPTIONS);
+ searchResults = rerankResults(raw, tc.query);
+ } catch (e: any) {
+ results.push({
+ name: tc.name,
+ query: tc.query,
+ passed: false,
+ reason: `Search threw: ${e.message}`,
+ topResults: [],
+ });
+ continue;
+ }
+
+ // If no expected results, just check it didn't crash
+ if (tc.expectAnyOf.length === 0) {
+ results.push({
+ name: tc.name,
+ query: tc.query,
+ passed: true,
+ reason: "No crash (no expected results)",
+ topResults: [],
+ });
+ continue;
+ }
+
+ const topSlice = searchResults.slice(0, topN);
+ const topRoutes = topSlice.map((r) => {
+ const route = (r.pageRoute as string || r.id).replace("hatchet://docs/", "");
+ return route;
+ });
+ const topIds = topSlice.map((r) => r.id.replace("hatchet://docs/", ""));
+
+ // Check if any expected route appears in top results (match on page route or section id)
+ const found = tc.expectAnyOf.some(
+ (expected) =>
+ topRoutes.some((r) => r === expected || r.startsWith(expected + "#")) ||
+ topIds.some((id) => id === expected || id.startsWith(expected + "#") || expected.includes("#") && id === expected),
+ );
+
+ results.push({
+ name: tc.name,
+ query: tc.query,
+ passed: found,
+ reason: found
+ ? undefined
+ : `Expected one of [${tc.expectAnyOf.join(", ")}] in top ${topN}, got: [${topIds.slice(0, topN).join(", ")}]`,
+ topResults: topSlice.map((r) => ({
+ title: r.title as string,
+ route: r.id.replace("hatchet://docs/", ""),
+ score: r.score,
+ })),
+ });
+ }
+
+ return results;
+}
+
+// ---------------------------------------------------------------------------
+// Output formatting
+// ---------------------------------------------------------------------------
+
+function formatResults(results: TestResult[], warnMode: boolean): void {
+ const passed = results.filter((r) => r.passed);
+ const failed = results.filter((r) => !r.passed);
+
+ if (warnMode) {
+ // Compact output: only show failures as warnings
+ if (failed.length === 0) {
+ console.log(
+ ` Search quality: ${passed.length}/${results.length} tests passed`,
+ );
+ } else {
+ console.warn(
+ `\n ⚠ Search quality: ${failed.length}/${results.length} tests FAILED:`,
+ );
+ for (const r of failed) {
+ console.warn(` • ${r.name} (query: ${JSON.stringify(r.query)})`);
+ }
+ console.warn();
+ }
+ return;
+ }
+
+ console.log("╔════════════════════════════════════════════════════════════╗");
+ console.log("║ Search Quality Test Results ║");
+ console.log("╚════════════════════════════════════════════════════════════╝\n");
+
+ if (failed.length > 0) {
+ console.log(`❌ FAILURES (${failed.length}):\n`);
+ for (const r of failed) {
+ console.log(` FAIL: ${r.name}`);
+ console.log(` query: ${JSON.stringify(r.query)}`);
+ console.log(` ${r.reason}`);
+ if (r.topResults.length > 0) {
+ console.log(` actual top results:`);
+ r.topResults.slice(0, 5).forEach((tr, i) => {
+ console.log(` ${i + 1}. [${tr.score.toFixed(1)}] ${tr.title} (${tr.route})`);
+ });
+ }
+ console.log();
+ }
+ }
+
+ if (passed.length > 0) {
+ console.log(`✅ PASSED (${passed.length}):\n`);
+ for (const r of passed) {
+ const note = r.reason ? ` (${r.reason})` : "";
+ console.log(` OK: ${r.name}${note}`);
+ }
+ console.log();
+ }
+
+ console.log("─".repeat(60));
+ console.log(
+ `Total: ${results.length} | Passed: ${passed.length} | Failed: ${failed.length}`,
+ );
+ console.log("─".repeat(60));
+}
+
+// ---------------------------------------------------------------------------
+// Main
+// ---------------------------------------------------------------------------
+
+function main(): void {
+ const warnMode = process.argv.includes("--warn");
+
+ if (!fs.existsSync(INDEX_PATH)) {
+ if (warnMode) {
+ console.warn(" ⚠ Search index not found — skipping search quality tests");
+ process.exit(0);
+ }
+ console.error(
+ "Search index not found. Run 'pnpm run generate-llms' first.",
+ );
+ process.exit(1);
+ }
+
+ if (!warnMode) {
+ console.log("Loading search index...");
+ }
+ const idx = loadIndex();
+
+ if (!warnMode) {
+ console.log(`Running ${TEST_CASES.length} search quality tests...\n`);
+ }
+ const results = runTests(idx);
+
+ formatResults(results, warnMode);
+
+ const failed = results.filter((r) => !r.passed);
+ // In --warn mode, always exit 0 so we don't block the dev server
+ process.exit(warnMode ? 0 : failed.length > 0 ? 1 : 0);
+}
+
+main();
diff --git a/frontend/docs/styles/global.css b/frontend/docs/styles/global.css
index 0b9504522..26369c23a 100644
--- a/frontend/docs/styles/global.css
+++ b/frontend/docs/styles/global.css
@@ -264,3 +264,23 @@ nav {
.dark ._sticky {
box-shadow: none !important;
}
+
+/* MCP / markdown action links in breadcrumb row */
+.page-actions {
+ position: absolute;
+ top: -1.75rem;
+ right: 0;
+ display: flex;
+ gap: 0.75rem;
+ align-items: center;
+ white-space: nowrap;
+}
+
+/* Drop to its own row on narrower viewports */
+@media (max-width: 1100px) {
+ .page-actions {
+ position: static;
+ margin-top: 0.5rem;
+ margin-bottom: 0.25rem;
+ }
+}
diff --git a/frontend/docs/tailwind.config.js b/frontend/docs/tailwind.config.cjs
similarity index 94%
rename from frontend/docs/tailwind.config.js
rename to frontend/docs/tailwind.config.cjs
index ad3f29333..1f1181116 100644
--- a/frontend/docs/tailwind.config.js
+++ b/frontend/docs/tailwind.config.cjs
@@ -1,6 +1,7 @@
-/** @type {import('tailwindcss').Config} */
+import tailwindcssAnimate from "tailwindcss-animate";
-module.exports = {
+/** @type {import('tailwindcss').Config} */
+export default {
darkMode: ["class"],
content: [
"./pages/**/*.{js,jsx,ts,tsx,md,mdx}",
@@ -62,5 +63,5 @@ module.exports = {
},
},
},
- plugins: [require("tailwindcss-animate")],
+ plugins: [tailwindcssAnimate],
};
diff --git a/frontend/docs/theme.config.tsx b/frontend/docs/theme.config.tsx
index 9797ce47a..cb8906917 100644
--- a/frontend/docs/theme.config.tsx
+++ b/frontend/docs/theme.config.tsx
@@ -1,6 +1,75 @@
-import React, { useEffect } from "react";
+import React, { useCallback, useEffect, useState } from "react";
import { useConfig, useTheme } from "nextra-theme-docs";
import { useRouter } from "next/router";
+import posthog from "posthog-js";
+import Search from "@/components/Search";
+
+const DEFAULT_ORIGIN = "https://docs.hatchet.run";
+
+function safeBase64Encode(str: string): string {
+ if (typeof btoa === "function") {
+ return btoa(str);
+ }
+ if (typeof Buffer !== "undefined") {
+ return Buffer.from(str).toString("base64");
+ }
+ return "";
+}
+
+const CursorIcon = () => (
+
+);
+
+const ClaudeIcon = () => (
+
+);
+
+const MarkdownIcon = () => (
+
+);
+
+function CopyClaudeButton({ command }: { command: string }) {
+ const [copied, setCopied] = useState(false);
+
+ const handleClick = useCallback((e: React.MouseEvent) => {
+ e.preventDefault();
+ navigator.clipboard.writeText(command);
+ setCopied(true);
+ posthog.capture("mcp_install_click", {
+ editor: "claude-code",
+ method: "copy_command",
+ page: window.location.pathname,
+ });
+ setTimeout(() => setCopied(false), 1500);
+ }, [command]);
+
+ return (
+
+
+ {copied ? "Copied! Run in terminal" : "Add to Claude"}
+
+ );
+}
+
+const pageLinkStyle: React.CSSProperties = {
+ fontSize: "0.75rem",
+ opacity: 0.5,
+ textDecoration: "none",
+ display: "inline-flex",
+ alignItems: "center",
+ gap: "4px",
+ cursor: "pointer",
+};
const config = {
logo: (
@@ -20,20 +89,30 @@ const config = {
),
head: () => {
const { title } = useConfig();
+ const router = useRouter();
const fallbackTitle = "Hatchet Documentation";
+ // Build the path to the LLM-friendly markdown version of this page
+ const pathname = router.pathname.replace(/^\//, "").replace(/\/$/, "") || "index";
+ const llmsMarkdownHref = `/llms/${pathname}.md`;
+
return (
<>
{title ? `${title} - ${fallbackTitle}` : fallbackTitle}
+
+
>
);
},
main: ({ children }) => {
const router = useRouter();
const { setTheme } = useTheme();
+ const [origin, setOrigin] = useState(() =>
+ typeof window !== "undefined" ? window.location.origin : DEFAULT_ORIGIN
+ );
useEffect(() => {
const themeParam = router.query.theme;
@@ -43,7 +122,35 @@ const config = {
}
}, [router.query.theme, setTheme]);
- return <>{children}>;
+ const pathname =
+ router.pathname.replace(/^\//, "").replace(/\/$/, "") || "index";
+ const llmsMarkdownHref = `/llms/${pathname}.md`;
+
+ const mcpUrl = `${origin}/api/mcp`;
+ const cursorConfig = JSON.stringify({
+ command: "npx",
+ args: ["-y", "mcp-remote", mcpUrl],
+ });
+ const cursorDeeplink = `cursor://anysphere.cursor-deeplink/mcp/install?name=hatchet-docs&config=${safeBase64Encode(cursorConfig)}`;
+
+ const claudeCommand = `claude mcp add --transport http hatchet-docs ${mcpUrl}`;
+
+ return (
+
+ );
},
primaryHue: {
dark: 210,
@@ -72,6 +179,9 @@ const config = {
defaultMenuCollapseLevel: 2,
toggleButton: true,
},
+ search: {
+ component: Search,
+ },
darkMode: true,
nextThemes: {
defaultTheme: "dark",
diff --git a/frontend/snippets/.gitignore b/frontend/snippets/.gitignore
new file mode 100644
index 000000000..2b65aa8ea
--- /dev/null
+++ b/frontend/snippets/.gitignore
@@ -0,0 +1,2 @@
+__pycache__/
+.python-version
diff --git a/frontend/snippets/generate.py b/frontend/snippets/generate.py
index 53dfff192..ab89e50d0 100644
--- a/frontend/snippets/generate.py
+++ b/frontend/snippets/generate.py
@@ -19,6 +19,11 @@ IGNORED_FILE_PATTERNS = [
r"test_.*\.go$",
r"_test\.go$",
r"\.e2e\.ts$",
+ r"test_.*_spec\.rb$",
+ r"spec_helper\.rb$",
+ r"Gemfile",
+ r"\.rspec$",
+ r"README\.md$",
]
@@ -41,6 +46,9 @@ class SDKParsingContext(Enum):
GO = ParsingContext(
example_path="sdks/go/examples", extension=".go", comment_prefix="//"
)
+ RUBY = ParsingContext(
+ example_path="sdks/ruby/examples", extension=".rb", comment_prefix="#"
+ )
@dataclass
diff --git a/go.mod b/go.mod
index a03bd787f..8b2d052c7 100644
--- a/go.mod
+++ b/go.mod
@@ -241,7 +241,7 @@ require (
golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.34.0
google.golang.org/genproto/googleapis/rpc v0.0.0-20260203192932-546029d2fa20 // indirect
- google.golang.org/grpc v1.78.0
+ google.golang.org/grpc v1.79.1
google.golang.org/protobuf v1.36.11
gopkg.in/yaml.v3 v3.0.1
)
diff --git a/go.sum b/go.sum
index 203408b65..da1a44577 100644
--- a/go.sum
+++ b/go.sum
@@ -581,8 +581,8 @@ google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409 h1:
google.golang.org/genproto/googleapis/api v0.0.0-20260128011058-8636f8732409/go.mod h1:fl8J1IvUjCilwZzQowmw2b7HQB2eAuYBabMXzWurF+I=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260203192932-546029d2fa20 h1:Jr5R2J6F6qWyzINc+4AM8t5pfUz6beZpHp678GNrMbE=
google.golang.org/genproto/googleapis/rpc v0.0.0-20260203192932-546029d2fa20/go.mod h1:j9x/tPzZkyxcgEFkiKEEGxfvyumM01BEtsW8xzOahRQ=
-google.golang.org/grpc v1.78.0 h1:K1XZG/yGDJnzMdd/uZHAkVqJE+xIDOcmdSFZkBUicNc=
-google.golang.org/grpc v1.78.0/go.mod h1:I47qjTo4OKbMkjA/aOOwxDIiPSBofUtQUI5EfpWvW7U=
+google.golang.org/grpc v1.79.1 h1:zGhSi45ODB9/p3VAawt9a+O/MULLl9dpizzNNpq7flY=
+google.golang.org/grpc v1.79.1/go.mod h1:KmT0Kjez+0dde/v2j9vzwoAScgEPx/Bw1CYChhHLrHQ=
google.golang.org/protobuf v1.36.11 h1:fV6ZwhNocDyBLK0dj+fg8ektcVegBBuEolpbTQyBNVE=
google.golang.org/protobuf v1.36.11/go.mod h1:HTf+CrKn2C3g5S8VImy6tdcUvCska2kB7j23XfzDpco=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
diff --git a/hack/dev/init-dev-token-and-env.sh b/hack/dev/init-dev-token-and-env.sh
index 66cc7233a..173c1d053 100644
--- a/hack/dev/init-dev-token-and-env.sh
+++ b/hack/dev/init-dev-token-and-env.sh
@@ -1,8 +1,14 @@
-alias get_token='go run ./cmd/hatchet-admin token create --name local --tenant-id 707d0855-80ab-4e1f-a156-f1c4546cbf52'
+#!/bin/bash
-cat > ./examples/simple/.env </dev/null
+}
+
+cat <= 0 {
- limit.Int32 = limits.Limit
- limit.Valid = true
- }
-
- alarm := pgtype.Int4{}
-
- if limits.Alarm >= 0 {
- alarm.Int32 = limits.Alarm
- alarm.Valid = true
- }
-
- window := pgtype.Text{}
-
- if limits.Window != nil {
- window.String = limits.Window.String()
- window.Valid = true
- }
-
- cvm := pgtype.Bool{Bool: false, Valid: true}
-
- if limits.CustomValueMeter {
- cvm.Bool = true
- }
-
- if upsert {
- _, err := t.queries.UpsertTenantResourceLimit(ctx, t.pool, sqlcv1.UpsertTenantResourceLimitParams{
- Tenantid: tenantId,
- Resource: sqlcv1.NullLimitResource{
- LimitResource: limits.Resource,
- Valid: true,
- },
- LimitValue: limit,
- AlarmValue: alarm,
- Window: window,
- CustomValueMeter: cvm,
- })
-
- return err
- }
-
- _, err := t.queries.SelectOrInsertTenantResourceLimit(ctx, t.pool, sqlcv1.SelectOrInsertTenantResourceLimitParams{
- Tenantid: tenantId,
- Resource: sqlcv1.NullLimitResource{
- LimitResource: limits.Resource,
- Valid: true,
- },
- LimitValue: limit,
- AlarmValue: alarm,
- Window: window,
- CustomValueMeter: cvm,
- })
-
- return err
-}
-
func (t *tenantLimitRepository) GetLimits(ctx context.Context, tenantId uuid.UUID) ([]*sqlcv1.TenantResourceLimit, error) {
- if t.enforceLimitsFunc != nil {
- enforce, err := t.enforceLimitsFunc(ctx, tenantId.String())
- if err != nil {
- return nil, err
- }
-
- if !enforce {
- return []*sqlcv1.TenantResourceLimit{}, nil
- }
- } else if !t.enforceLimits {
+ if !t.enforceLimits {
return []*sqlcv1.TenantResourceLimit{}, nil
}
@@ -273,16 +143,7 @@ func (t *tenantLimitRepository) GetLimits(ctx context.Context, tenantId uuid.UUI
}
func (t *tenantLimitRepository) CanCreate(ctx context.Context, resource sqlcv1.LimitResource, tenantId uuid.UUID, numberOfResources int32) (bool, int, error) {
- if t.enforceLimitsFunc != nil {
- enforce, err := t.enforceLimitsFunc(ctx, tenantId.String())
- if err != nil {
- return false, 0, err
- }
-
- if !enforce {
- return true, 0, nil
- }
- } else if !t.enforceLimits {
+ if !t.enforceLimits {
return true, 0, nil
}
@@ -297,7 +158,7 @@ func (t *tenantLimitRepository) CanCreate(ctx context.Context, resource sqlcv1.L
if err != nil && errors.Is(err, pgx.ErrNoRows) {
t.l.Warn().Msgf("no %s tenant limit found, creating default limit", string(resource))
- err = t.SelectOrInsertTenantLimits(ctx, tenantId, nil)
+ err = t.UpdateLimits(ctx, tenantId, t.DefaultLimits())
if err != nil {
return false, 0, err
@@ -318,7 +179,6 @@ func (t *tenantLimitRepository) CanCreate(ctx context.Context, resource sqlcv1.L
if err != nil {
return false, 0, err
}
-
}
// subtract 1 for backwards compatibility
@@ -330,25 +190,12 @@ func (t *tenantLimitRepository) CanCreate(ctx context.Context, resource sqlcv1.L
return true, calcPercent(value+numberOfResources, limit.LimitValue), nil
}
-func (t *tenantLimitRepository) SetOnSuccessMeterCallback(cb func(resource sqlcv1.LimitResource, tenantId uuid.UUID, currentUsage int64)) {
- t.onSuccessMeterCb = cb
-}
-
func calcPercent(value int32, limit int32) int {
return int((float64(value) / float64(limit)) * 100)
}
func (t *tenantLimitRepository) saveMeter(ctx context.Context, resource sqlcv1.LimitResource, tenantId uuid.UUID, numberOfResources int32) (*sqlcv1.TenantResourceLimit, error) {
- if t.enforceLimitsFunc != nil {
- enforce, err := t.enforceLimitsFunc(ctx, tenantId.String())
- if err != nil {
- return nil, err
- }
-
- if !enforce {
- return nil, nil
- }
- } else if !t.enforceLimits {
+ if !t.enforceLimits {
return nil, nil
}
@@ -365,12 +212,6 @@ func (t *tenantLimitRepository) saveMeter(ctx context.Context, resource sqlcv1.L
return nil, err
}
- if t.onSuccessMeterCb != nil {
- go func() { // non-blocking callback
- t.onSuccessMeterCb(resource, tenantId, int64(r.Value))
- }()
- }
-
return r, nil
}
@@ -437,6 +278,43 @@ func (t *tenantLimitRepository) Meter(ctx context.Context, resource sqlcv1.Limit
}
}
+func (t *tenantLimitRepository) UpdateLimits(ctx context.Context, tenantId uuid.UUID, limits []Limit) error {
+ if len(limits) == 0 {
+ return nil
+ }
+
+ resources := make([]string, len(limits))
+ limitValues := make([]int32, len(limits))
+ alarmValues := make([]int32, len(limits))
+ windows := make([]string, len(limits))
+ customValueMeters := make([]bool, len(limits))
+
+ for i, limit := range limits {
+ resources[i] = string(limit.Resource)
+ limitValues[i] = limit.Limit
+ customValueMeters[i] = hasCustomValueMeter(limit.Resource)
+
+ if limit.Alarm != nil {
+ alarmValues[i] = *limit.Alarm
+ } else {
+ alarmValues[i] = int32(float64(limit.Limit) * 0.8) // nolint: gosec
+ }
+
+ if limit.Window != nil {
+ windows[i] = limit.Window.String()
+ }
+ }
+
+ return t.queries.UpsertTenantResourceLimits(ctx, t.pool, sqlcv1.UpsertTenantResourceLimitsParams{
+ Tenantid: tenantId,
+ Resources: resources,
+ Limitvalues: limitValues,
+ Alarmvalues: alarmValues,
+ Windows: windows,
+ Customvaluemeters: customValueMeters,
+ })
+}
+
var ErrResourceExhausted = fmt.Errorf("resource exhausted")
func (t *tenantLimitRepository) Stop() {
diff --git a/pkg/repository/user.go b/pkg/repository/user.go
index e65441faf..7b898a401 100644
--- a/pkg/repository/user.go
+++ b/pkg/repository/user.go
@@ -87,6 +87,10 @@ func BoolPtr(b bool) *bool {
return &b
}
+func Int32Ptr(i int32) *int32 {
+ return &i
+}
+
func VerifyPassword(hashedPW, candidate string) (bool, error) {
err := bcrypt.CompareHashAndPassword([]byte(hashedPW), []byte(candidate))
diff --git a/pkg/repository/worker.go b/pkg/repository/worker.go
index 92f997546..5e27ba520 100644
--- a/pkg/repository/worker.go
+++ b/pkg/repository/worker.go
@@ -376,6 +376,11 @@ func (w *workerRepository) CreateNewWorker(ctx context.Context, tenantId uuid.UU
WorkerSDKS: sqlcv1.WorkerSDKSTYPESCRIPT,
Valid: true,
}
+ case contracts.SDKS_RUBY:
+ createParams.Language = sqlcv1.NullWorkerSDKS{
+ WorkerSDKS: sqlcv1.WorkerSDKSRUBY,
+ Valid: true,
+ }
default:
return nil, fmt.Errorf("invalid sdk: %s", *opts.RuntimeInfo.Language)
}
diff --git a/pkg/v1/client.go b/pkg/v1/client.go
index 5eff7f771..faf2b120d 100644
--- a/pkg/v1/client.go
+++ b/pkg/v1/client.go
@@ -87,6 +87,8 @@ func NewHatchetClient(config ...Config) (HatchetClient, error) {
}, nil
}
+// Deprecated: Metrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Metrics() features.MetricsClient {
if c.metrics == nil {
api := c.V0().API()
@@ -97,25 +99,38 @@ func (c *v1HatchetClientImpl) Metrics() features.MetricsClient {
return c.metrics
}
+// Deprecated: V0 is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// V0 returns the underlying V0 client for backward compatibility.
func (c *v1HatchetClientImpl) V0() v0Client.Client {
return c.v0
}
+// Deprecated: Workflow is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Workflow creates a new workflow declaration with the provided options.
func (c *v1HatchetClientImpl) Workflow(opts create.WorkflowCreateOpts[any]) workflow.WorkflowDeclaration[any, any] {
return workflow.NewWorkflowDeclaration[any, any](opts, c.v0)
}
+// Deprecated: Events is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Events() v0Client.EventClient {
return c.V0().Event()
}
+// Deprecated: Worker is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Worker creates and configures a new worker with the provided options and optional configuration functions.
func (c *v1HatchetClientImpl) Worker(opts worker.WorkerOpts) (worker.Worker, error) {
return worker.NewWorker(c.workers, c.v0, opts)
}
+// Deprecated: RateLimits is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) RateLimits() features.RateLimitsClient {
if c.rateLimits == nil {
api := c.V0().API()
@@ -126,6 +141,8 @@ func (c *v1HatchetClientImpl) RateLimits() features.RateLimitsClient {
return c.rateLimits
}
+// Deprecated: Runs is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Runs() features.RunsClient {
if c.runs == nil {
tenantId := c.V0().TenantId()
@@ -134,6 +151,8 @@ func (c *v1HatchetClientImpl) Runs() features.RunsClient {
return c.runs
}
+// Deprecated: Workers is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Workers() features.WorkersClient {
if c.workers == nil {
api := c.V0().API()
@@ -143,6 +162,8 @@ func (c *v1HatchetClientImpl) Workers() features.WorkersClient {
return c.workers
}
+// Deprecated: Workflows is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Workflows() features.WorkflowsClient {
if c.workflows == nil {
api := c.V0().API()
@@ -152,6 +173,8 @@ func (c *v1HatchetClientImpl) Workflows() features.WorkflowsClient {
return c.workflows
}
+// Deprecated: Crons is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Crons() features.CronsClient {
if c.crons == nil {
api := c.V0().API()
@@ -161,6 +184,8 @@ func (c *v1HatchetClientImpl) Crons() features.CronsClient {
return c.crons
}
+// Deprecated: Schedules is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Schedules() features.SchedulesClient {
if c.schedules == nil {
api := c.V0().API()
@@ -172,6 +197,8 @@ func (c *v1HatchetClientImpl) Schedules() features.SchedulesClient {
return c.schedules
}
+// Deprecated: Filters is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Filters() features.FiltersClient {
if c.filters == nil {
api := c.V0().API()
@@ -181,6 +208,8 @@ func (c *v1HatchetClientImpl) Filters() features.FiltersClient {
return c.filters
}
+// Deprecated: CEL is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) CEL() features.CELClient {
if c.cel == nil {
api := c.V0().API()
@@ -190,6 +219,8 @@ func (c *v1HatchetClientImpl) CEL() features.CELClient {
return c.cel
}
+// Deprecated: Webhooks is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *v1HatchetClientImpl) Webhooks() features.WebhooksClient {
if c.webhooks == nil {
api := c.V0().API()
diff --git a/pkg/v1/config.go b/pkg/v1/config.go
index 88c29b0c8..d5d60af88 100644
--- a/pkg/v1/config.go
+++ b/pkg/v1/config.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package v1
import (
diff --git a/pkg/v1/factory/durable_factory.go b/pkg/v1/factory/durable_factory.go
index aa5cdff52..b8d8b8980 100644
--- a/pkg/v1/factory/durable_factory.go
+++ b/pkg/v1/factory/durable_factory.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package factory
import (
diff --git a/pkg/v1/factory/task_factory.go b/pkg/v1/factory/task_factory.go
index 63bba98b9..5d68462b2 100644
--- a/pkg/v1/factory/task_factory.go
+++ b/pkg/v1/factory/task_factory.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package factory
import (
diff --git a/pkg/v1/features/cel.go b/pkg/v1/features/cel.go
index 06f7c2e5c..93bba0239 100644
--- a/pkg/v1/features/cel.go
+++ b/pkg/v1/features/cel.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -11,8 +13,6 @@ import (
// Deprecated: CELClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// The CEL client is a client for debugging CEL expressions within Hatchet
type CELClient interface {
Debug(ctx context.Context, expression string, input map[string]interface{}, additionalMetadata, filterPayload *map[string]interface{}) (*CELEvaluationResult, error)
}
@@ -44,6 +44,9 @@ type CELEvaluationResult struct {
err *string
}
+// Deprecated: Debug is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Debug a CEL expression with the provided input, filter payload, and optional metadata. Useful for testing and validating CEL expressions and debugging issues in production.
func (c *celClientImpl) Debug(ctx context.Context, expression string, input map[string]interface{}, additionalMetadata, filterPayload *map[string]interface{}) (*CELEvaluationResult, error) {
resp, err := c.api.V1CelDebugWithResponse(
diff --git a/pkg/v1/features/crons.go b/pkg/v1/features/crons.go
index 2bda6c54c..0dec8464e 100644
--- a/pkg/v1/features/crons.go
+++ b/pkg/v1/features/crons.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -11,41 +13,24 @@ import (
// Deprecated: CronsClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// CronsClient provides methods for interacting with cron workflow triggers
-// in the Hatchet platform.
type CronsClient interface {
- // Create creates a new cron workflow trigger.
Create(ctx context.Context, workflowName string, cron CreateCronTrigger) (*rest.CronWorkflows, error)
- // Delete removes a cron workflow trigger.
Delete(ctx context.Context, cronId string) error
- // List retrieves a collection of cron workflow triggers based on the provided parameters.
List(ctx context.Context, opts rest.CronWorkflowListParams) (*rest.CronWorkflowsList, error)
- // Get retrieves a specific cron workflow trigger by its ID.
Get(ctx context.Context, cronId string) (*rest.CronWorkflows, error)
}
// Deprecated: CreateCronTrigger is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// CreateCronTrigger contains the configuration for creating a cron trigger.
type CreateCronTrigger struct {
- // Name is the unique identifier for the cron trigger.
- Name string `json:"name"`
-
- // Expression is the cron expression that defines the schedule.
- Expression string `json:"expression"`
-
- // Input is the optional input data for the workflow.
- Input map[string]interface{} `json:"input,omitempty"`
-
- // AdditionalMetadata is optional metadata to associate with the cron trigger.
+ Name string `json:"name"`
+ Expression string `json:"expression"`
+ Input map[string]interface{} `json:"input,omitempty"`
AdditionalMetadata map[string]interface{} `json:"additionalMetadata,omitempty"`
-
- Priority *int32 `json:"priority,omitempty"`
+ Priority *int32 `json:"priority,omitempty"`
}
// cronsClientImpl implements the CronsClient interface.
@@ -56,8 +41,6 @@ type cronsClientImpl struct {
// Deprecated: NewCronsClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// NewCronsClient creates a new client for interacting with cron workflow triggers.
func NewCronsClient(
api *rest.ClientWithResponses,
tenantId *string,
@@ -72,8 +55,6 @@ func NewCronsClient(
// Deprecated: ValidateCronExpression is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// ValidateCronExpression validates that a string is a valid cron expression.
func ValidateCronExpression(expression string) bool {
parser := cron.NewParser(cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
_, err := parser.Parse(expression)
@@ -81,6 +62,9 @@ func ValidateCronExpression(expression string) bool {
return err == nil
}
+// Deprecated: Create is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Create creates a new cron workflow trigger.
func (c *cronsClientImpl) Create(ctx context.Context, workflowName string, cron CreateCronTrigger) (*rest.CronWorkflows, error) {
// Validate cron expression
@@ -119,6 +103,9 @@ func (c *cronsClientImpl) Create(ctx context.Context, workflowName string, cron
return resp.JSON200, nil
}
+// Deprecated: Delete is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Delete removes a cron workflow trigger.
func (c *cronsClientImpl) Delete(ctx context.Context, cronId string) error {
cronIdUUID, err := uuid.Parse(cronId)
@@ -134,6 +121,9 @@ func (c *cronsClientImpl) Delete(ctx context.Context, cronId string) error {
return err
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// List retrieves a collection of cron workflow triggers based on the provided parameters.
func (c *cronsClientImpl) List(ctx context.Context, opts rest.CronWorkflowListParams) (*rest.CronWorkflowsList, error) {
resp, err := c.api.CronWorkflowListWithResponse(
@@ -148,6 +138,9 @@ func (c *cronsClientImpl) List(ctx context.Context, opts rest.CronWorkflowListPa
return resp.JSON200, nil
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves a specific cron workflow trigger by its ID.
func (c *cronsClientImpl) Get(ctx context.Context, cronId string) (*rest.CronWorkflows, error) {
cronIdUUID, err := uuid.Parse(cronId)
@@ -169,12 +162,12 @@ func (c *cronsClientImpl) Get(ctx context.Context, cronId string) (*rest.CronWor
// Deprecated: InvalidCronExpressionError is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// InvalidCronExpressionError represents an error when an invalid cron expression is provided.
type InvalidCronExpressionError struct {
Expression string
}
+// Deprecated: Error is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (e *InvalidCronExpressionError) Error() string {
return "invalid cron expression: " + e.Expression
}
diff --git a/pkg/v1/features/filters.go b/pkg/v1/features/filters.go
index 234510b62..a7ba0e26e 100644
--- a/pkg/v1/features/filters.go
+++ b/pkg/v1/features/filters.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -39,6 +41,8 @@ func NewFiltersClient(
}
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *filtersClientImpl) List(ctx context.Context, opts *rest.V1FilterListParams) (*rest.V1FilterList, error) {
resp, err := c.api.V1FilterListWithResponse(
ctx,
@@ -53,6 +57,8 @@ func (c *filtersClientImpl) List(ctx context.Context, opts *rest.V1FilterListPar
return resp.JSON200, nil
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *filtersClientImpl) Get(ctx context.Context, filterID string) (*rest.V1Filter, error) {
resp, err := c.api.V1FilterGetWithResponse(
ctx,
@@ -67,6 +73,8 @@ func (c *filtersClientImpl) Get(ctx context.Context, filterID string) (*rest.V1F
return resp.JSON200, nil
}
+// Deprecated: Create is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *filtersClientImpl) Create(ctx context.Context, opts rest.V1CreateFilterRequest) (*rest.V1Filter, error) {
resp, err := c.api.V1FilterCreateWithResponse(
ctx,
@@ -81,6 +89,8 @@ func (c *filtersClientImpl) Create(ctx context.Context, opts rest.V1CreateFilter
return resp.JSON200, nil
}
+// Deprecated: Delete is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *filtersClientImpl) Delete(ctx context.Context, filterID string) (*rest.V1Filter, error) {
resp, err := c.api.V1FilterDeleteWithResponse(
ctx,
@@ -95,6 +105,8 @@ func (c *filtersClientImpl) Delete(ctx context.Context, filterID string) (*rest.
return resp.JSON200, nil
}
+// Deprecated: Update is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *filtersClientImpl) Update(ctx context.Context, filterID string, opts rest.V1FilterUpdateJSONRequestBody) (*rest.V1Filter, error) {
resp, err := c.api.V1FilterUpdateWithResponse(
ctx,
diff --git a/pkg/v1/features/metrics.go b/pkg/v1/features/metrics.go
index 3177bbc6c..447b130ea 100644
--- a/pkg/v1/features/metrics.go
+++ b/pkg/v1/features/metrics.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -9,17 +11,11 @@ import (
// Deprecated: MetricsClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// MetricsClient provides methods for retrieving metrics data
-// in the Hatchet platform.
type MetricsClient interface {
- // GetWorkflowMetrics retrieves metrics for a specific workflow.
GetWorkflowMetrics(ctx context.Context, workflowId string, opts *rest.WorkflowGetMetricsParams) (*rest.WorkflowMetrics, error)
- // GetQueueMetrics retrieves tenant-wide queue metrics.
GetQueueMetrics(ctx context.Context, opts *rest.TenantGetQueueMetricsParams) (*rest.TenantGetQueueMetricsResponse, error)
- // GetTaskQueueMetrics retrieves tenant-wide step run queue metrics.
GetTaskQueueMetrics(ctx context.Context) (*rest.TenantGetStepRunQueueMetricsResponse, error)
}
@@ -32,8 +28,6 @@ type metricsClientImpl struct {
// Deprecated: NewMetricsClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// NewMetricsClient creates a new client for interacting with metrics.
func NewMetricsClient(
api *rest.ClientWithResponses,
tenantId *string,
@@ -48,6 +42,9 @@ func NewMetricsClient(
}
}
+// Deprecated: GetWorkflowMetrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// GetWorkflowMetrics retrieves metrics for a specific workflow.
func (m *metricsClientImpl) GetWorkflowMetrics(ctx context.Context, workflowName string, opts *rest.WorkflowGetMetricsParams) (*rest.WorkflowMetrics, error) {
@@ -70,6 +67,9 @@ func (m *metricsClientImpl) GetWorkflowMetrics(ctx context.Context, workflowName
return resp.JSON200, nil
}
+// Deprecated: GetQueueMetrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// GetQueueMetrics retrieves tenant-wide queue metrics.
func (m *metricsClientImpl) GetQueueMetrics(ctx context.Context, opts *rest.TenantGetQueueMetricsParams) (*rest.TenantGetQueueMetricsResponse, error) {
return m.api.TenantGetQueueMetricsWithResponse(
@@ -79,6 +79,9 @@ func (m *metricsClientImpl) GetQueueMetrics(ctx context.Context, opts *rest.Tena
)
}
+// Deprecated: GetTaskQueueMetrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// GetTaskQueueMetrics retrieves tenant-wide step run queue metrics.
func (m *metricsClientImpl) GetTaskQueueMetrics(ctx context.Context) (*rest.TenantGetStepRunQueueMetricsResponse, error) {
return m.api.TenantGetStepRunQueueMetricsWithResponse(
diff --git a/pkg/v1/features/ratelimits.go b/pkg/v1/features/ratelimits.go
index 8a726be95..1cb9299c1 100644
--- a/pkg/v1/features/ratelimits.go
+++ b/pkg/v1/features/ratelimits.go
@@ -1,4 +1,5 @@
-// package features provides functionality for interacting with hatchet features.
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -12,26 +13,17 @@ import (
// Deprecated: CreateRatelimitOpts is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// createRatelimitOpts contains options for creating or updating a rate limit.
type CreateRatelimitOpts struct {
- // key is the unique identifier for the rate limit
- Key string
- // limit is the maximum number of requests allowed within the duration
- Limit int
- // duration specifies the time period for the rate limit
+ Key string
+ Limit int
Duration types.RateLimitDuration
}
// Deprecated: RateLimitsClient is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
-//
-// rateLimitsClient provides an interface for managing rate limits.
type RateLimitsClient interface {
- // upsert creates or updates a rate limit with the provided options.
Upsert(opts CreateRatelimitOpts) error
- // list retrieves rate limits based on the provided parameters (optional).
List(ctx context.Context, opts *rest.RateLimitListParams) (*rest.RateLimitListResponse, error)
}
@@ -60,7 +52,10 @@ func NewRateLimitsClient(
}
}
-// upsert creates or updates a rate limit with the provided options.
+// Deprecated: Upsert is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
+// Upsert creates or updates a rate limit with the provided options.
func (c *rlClientImpl) Upsert(opts CreateRatelimitOpts) error {
return (*c.admin).PutRateLimit(opts.Key, &types.RateLimitOpts{
Max: opts.Limit,
@@ -68,7 +63,10 @@ func (c *rlClientImpl) Upsert(opts CreateRatelimitOpts) error {
})
}
-// list retrieves rate limits based on the provided parameters (optional).
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
+// List retrieves rate limits based on the provided parameters (optional).
func (c *rlClientImpl) List(ctx context.Context, opts *rest.RateLimitListParams) (*rest.RateLimitListResponse, error) {
return c.api.RateLimitListWithResponse(
ctx,
diff --git a/pkg/v1/features/runs.go b/pkg/v1/features/runs.go
index 8b523e8c8..55ad5471b 100644
--- a/pkg/v1/features/runs.go
+++ b/pkg/v1/features/runs.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -69,6 +71,9 @@ func NewRunsClient(
}
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves a workflow run by its ID.
func (r *runsClientImpl) Get(ctx context.Context, runId string) (*rest.V1WorkflowRunGetResponse, error) {
return r.api.V1WorkflowRunGetWithResponse(
@@ -77,6 +82,9 @@ func (r *runsClientImpl) Get(ctx context.Context, runId string) (*rest.V1Workflo
)
}
+// Deprecated: GetStatus is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// GetStatus retrieves the status of a workflow run by its ID.
func (r *runsClientImpl) GetStatus(ctx context.Context, runId string) (*rest.V1WorkflowRunGetStatusResponse, error) {
return r.api.V1WorkflowRunGetStatusWithResponse(
@@ -85,8 +93,10 @@ func (r *runsClientImpl) GetStatus(ctx context.Context, runId string) (*rest.V1W
)
}
-// GetDetails retrieves detailed information about a workflow run by its ID.
-// Deprecated: Use Get instead.
+// Deprecated: GetDetails is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
+// GetDetails retrieves detailed information about a workflow run by its ID. Use Get instead.
func (r *runsClientImpl) GetDetails(ctx context.Context, runId string) (*rest.V1WorkflowRunGetResponse, error) {
return r.api.V1WorkflowRunGetWithResponse(
ctx,
@@ -94,6 +104,9 @@ func (r *runsClientImpl) GetDetails(ctx context.Context, runId string) (*rest.V1
)
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// List retrieves a collection of workflow runs based on the provided parameters.
func (r *runsClientImpl) List(ctx context.Context, opts rest.V1WorkflowRunListParams) (*rest.V1WorkflowRunListResponse, error) {
return r.api.V1WorkflowRunListWithResponse(
@@ -103,6 +116,9 @@ func (r *runsClientImpl) List(ctx context.Context, opts rest.V1WorkflowRunListPa
)
}
+// Deprecated: Replay is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Replay requests a task to be replayed within a workflow run.
func (r *runsClientImpl) Replay(ctx context.Context, opts rest.V1ReplayTaskRequest) (*rest.V1TaskReplayResponse, error) {
json, err := json.Marshal(opts)
@@ -118,6 +134,9 @@ func (r *runsClientImpl) Replay(ctx context.Context, opts rest.V1ReplayTaskReque
)
}
+// Deprecated: Cancel is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Cancel requests cancellation of a specific task within a workflow run.
func (r *runsClientImpl) Cancel(ctx context.Context, opts rest.V1CancelTaskRequest) (*rest.V1TaskCancelResponse, error) {
json, err := json.Marshal(opts)
@@ -133,6 +152,9 @@ func (r *runsClientImpl) Cancel(ctx context.Context, opts rest.V1CancelTaskReque
)
}
+// Deprecated: SubscribeToStream is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// SubscribeToStream subscribes to streaming events for a specific workflow run.
func (r *runsClientImpl) SubscribeToStream(ctx context.Context, workflowRunId string) (<-chan string, error) {
ch := make(chan string)
diff --git a/pkg/v1/features/schedules.go b/pkg/v1/features/schedules.go
index 8483b64ba..f139ea91c 100644
--- a/pkg/v1/features/schedules.go
+++ b/pkg/v1/features/schedules.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -70,6 +72,9 @@ func NewSchedulesClient(
}
}
+// Deprecated: Create is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Create creates a new scheduled workflow run.
func (s *schedulesClientImpl) Create(ctx context.Context, workflowName string, trigger CreateScheduledRunTrigger) (*rest.ScheduledWorkflows, error) {
workflowName = client.ApplyNamespace(workflowName, s.namespace)
@@ -94,6 +99,9 @@ func (s *schedulesClientImpl) Create(ctx context.Context, workflowName string, t
return resp.JSON200, nil
}
+// Deprecated: Delete is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Delete removes a scheduled workflow run.
func (s *schedulesClientImpl) Delete(ctx context.Context, scheduledRunId string) error {
scheduledRunIdUUID, err := uuid.Parse(scheduledRunId)
@@ -109,6 +117,9 @@ func (s *schedulesClientImpl) Delete(ctx context.Context, scheduledRunId string)
return err
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// List retrieves a collection of scheduled workflow runs based on the provided parameters.
func (s *schedulesClientImpl) List(ctx context.Context, opts rest.WorkflowScheduledListParams) (*rest.ScheduledWorkflowsList, error) {
resp, err := s.api.WorkflowScheduledListWithResponse(
@@ -123,6 +134,9 @@ func (s *schedulesClientImpl) List(ctx context.Context, opts rest.WorkflowSchedu
return resp.JSON200, nil
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves a specific scheduled workflow run by its ID.
func (s *schedulesClientImpl) Get(ctx context.Context, scheduledRunId string) (*rest.ScheduledWorkflows, error) {
scheduledRunIdUUID, err := uuid.Parse(scheduledRunId)
diff --git a/pkg/v1/features/tenant.go b/pkg/v1/features/tenant.go
index 88a62b328..83ac502b2 100644
--- a/pkg/v1/features/tenant.go
+++ b/pkg/v1/features/tenant.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -36,6 +38,8 @@ func NewTenantCliet(
}
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (t *tenantClientImpl) Get(ctx context.Context) (*rest.Tenant, error) {
resp, err := t.api.TenantGetWithResponse(ctx, t.tenantId)
diff --git a/pkg/v1/features/webhooks.go b/pkg/v1/features/webhooks.go
index 02fcebc17..2a026ade1 100644
--- a/pkg/v1/features/webhooks.go
+++ b/pkg/v1/features/webhooks.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -97,7 +99,7 @@ type CreateWebhookOpts struct {
// Deprecated: UpdateWebhookOpts is part of the old generics-based v1 Go SDK.
// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type UpdateWebhookOpts struct {
- EventKeyExpression string
+ EventKeyExpression *string
}
// Deprecated: WebhooksClient is part of the old generics-based v1 Go SDK.
@@ -142,6 +144,8 @@ func NewWebhooksClient(
}
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *webhooksClientImpl) List(ctx context.Context, opts rest.V1WebhookListParams) (*rest.V1WebhookList, error) {
resp, err := c.api.V1WebhookListWithResponse(
ctx,
@@ -155,6 +159,8 @@ func (c *webhooksClientImpl) List(ctx context.Context, opts rest.V1WebhookListPa
return resp.JSON200, nil
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *webhooksClientImpl) Get(ctx context.Context, webhookName string) (*rest.V1Webhook, error) {
resp, err := c.api.V1WebhookGetWithResponse(
ctx,
@@ -168,6 +174,8 @@ func (c *webhooksClientImpl) Get(ctx context.Context, webhookName string) (*rest
return resp.JSON200, nil
}
+// Deprecated: Create is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *webhooksClientImpl) Create(ctx context.Context, opts CreateWebhookOpts) (*rest.V1Webhook, error) {
if opts.Auth == nil {
return nil, fmt.Errorf("auth is required")
@@ -190,6 +198,8 @@ func (c *webhooksClientImpl) Create(ctx context.Context, opts CreateWebhookOpts)
return resp.JSON200, nil
}
+// Deprecated: Update is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *webhooksClientImpl) Update(ctx context.Context, webhookName string, opts UpdateWebhookOpts) (*rest.V1Webhook, error) {
resp, err := c.api.V1WebhookUpdateWithResponse(
ctx,
@@ -206,6 +216,8 @@ func (c *webhooksClientImpl) Update(ctx context.Context, webhookName string, opt
return resp.JSON200, nil
}
+// Deprecated: Delete is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c *webhooksClientImpl) Delete(ctx context.Context, webhookName string) error {
_, err := c.api.V1WebhookDeleteWithResponse(
ctx,
diff --git a/pkg/v1/features/workers.go b/pkg/v1/features/workers.go
index 415d55f98..eb455875a 100644
--- a/pkg/v1/features/workers.go
+++ b/pkg/v1/features/workers.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -51,6 +53,9 @@ func NewWorkersClient(
}
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves a worker by its ID.
func (w *workersClientImpl) Get(ctx context.Context, workerId string) (*rest.Worker, error) {
workerIdUUID, err := uuid.Parse(workerId)
@@ -69,6 +74,9 @@ func (w *workersClientImpl) Get(ctx context.Context, workerId string) (*rest.Wor
return resp.JSON200, nil
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// List retrieves all workers for the tenant.
func (w *workersClientImpl) List(ctx context.Context) (*rest.WorkerList, error) {
resp, err := w.api.WorkerListWithResponse(
@@ -82,6 +90,9 @@ func (w *workersClientImpl) List(ctx context.Context) (*rest.WorkerList, error)
return resp.JSON200, nil
}
+// Deprecated: IsPaused is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// IsPaused checks if a worker is paused.
func (w *workersClientImpl) IsPaused(ctx context.Context, workerId string) (bool, error) {
worker, err := w.Get(ctx, workerId)
@@ -98,6 +109,9 @@ func (w *workersClientImpl) IsPaused(ctx context.Context, workerId string) (bool
return *worker.Status == rest.WorkerStatus("PAUSED"), nil
}
+// Deprecated: Pause is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Pause pauses a worker.
func (w *workersClientImpl) Pause(ctx context.Context, workerId string) (*rest.Worker, error) {
workerIdUUID, err := uuid.Parse(workerId)
@@ -123,6 +137,9 @@ func (w *workersClientImpl) Pause(ctx context.Context, workerId string) (*rest.W
return resp.JSON200, nil
}
+// Deprecated: Unpause is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Unpause unpauses a worker.
func (w *workersClientImpl) Unpause(ctx context.Context, workerId string) (*rest.Worker, error) {
workerIdUUID, err := uuid.Parse(workerId)
diff --git a/pkg/v1/features/workflows.go b/pkg/v1/features/workflows.go
index 9ea998d7d..68c5c3609 100644
--- a/pkg/v1/features/workflows.go
+++ b/pkg/v1/features/workflows.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package features
import (
@@ -65,6 +67,9 @@ func NewWorkflowsClient(
}
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves a workflow by its ID or name.
func (w *workflowsClientImpl) Get(ctx context.Context, workflowName string) (*rest.Workflow, error) {
// Try to get the workflow from cache first
@@ -99,6 +104,9 @@ func (w *workflowsClientImpl) Get(ctx context.Context, workflowName string) (*re
return &workflow, nil
}
+// Deprecated: GetId is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// GetId retrieves a workflow by its name.
func (w *workflowsClientImpl) GetId(ctx context.Context, workflowName string) (uuid.UUID, error) {
workflow, err := w.Get(ctx, workflowName)
@@ -109,6 +117,9 @@ func (w *workflowsClientImpl) GetId(ctx context.Context, workflowName string) (u
return uuid.MustParse(workflow.Metadata.Id), nil
}
+// Deprecated: List is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// List retrieves all workflows for the tenant with optional filtering parameters.
func (w *workflowsClientImpl) List(ctx context.Context, opts *rest.WorkflowListParams) (*rest.WorkflowList, error) {
resp, err := w.api.WorkflowListWithResponse(
@@ -123,6 +134,9 @@ func (w *workflowsClientImpl) List(ctx context.Context, opts *rest.WorkflowListP
return resp.JSON200, nil
}
+// Deprecated: Delete is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Delete removes a workflow by its ID or name.
func (w *workflowsClientImpl) Delete(ctx context.Context, workflowName string) (*rest.WorkflowDeleteResponse, error) {
// FIXME: this is a hack to get the workflow by name
diff --git a/pkg/v1/task/task.go b/pkg/v1/task/task.go
index de2dd5d27..3ad03cdfb 100644
--- a/pkg/v1/task/task.go
+++ b/pkg/v1/task/task.go
@@ -115,6 +115,9 @@ type DurableTaskDeclaration[I any] struct {
Fn interface{}
}
+// Deprecated: OnFailureTaskDeclaration is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// OnFailureTaskDeclaration represents a task that will be executed if
// any tasks in the workflow fail.
type OnFailureTaskDeclaration[I any] struct {
@@ -279,6 +282,9 @@ func makeContractTaskOpts(t *TaskShared, taskDefaults *create.TaskDefaults) *con
return taskOpts
}
+// Deprecated: Dump is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Dump converts the task declaration into a protobuf request.
func (t *TaskDeclaration[I]) Dump(workflowName string, taskDefaults *create.TaskDefaults) *contracts.CreateTaskOpts {
base := makeContractTaskOpts(&t.TaskShared, taskDefaults)
@@ -332,6 +338,8 @@ func durationToSeconds(d time.Duration) string {
return fmt.Sprintf("%ds", int(d.Seconds()))
}
+// Deprecated: Dump is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (t *DurableTaskDeclaration[I]) Dump(workflowName string, taskDefaults *create.TaskDefaults) *contracts.CreateTaskOpts {
base := makeContractTaskOpts(&t.TaskShared, taskDefaults)
base.ReadableId = t.Name
@@ -341,6 +349,9 @@ func (t *DurableTaskDeclaration[I]) Dump(workflowName string, taskDefaults *crea
return base
}
+// Deprecated: Dump is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Dump converts the on failure task declaration into a protobuf request.
func (t *OnFailureTaskDeclaration[I]) Dump(workflowName string, taskDefaults *create.TaskDefaults) *contracts.CreateTaskOpts {
base := makeContractTaskOpts(&t.TaskShared, taskDefaults)
@@ -351,17 +362,20 @@ func (t *OnFailureTaskDeclaration[I]) Dump(workflowName string, taskDefaults *cr
return base
}
-// Implement GetName for TaskDeclaration
+// Deprecated: GetName is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (t *TaskDeclaration[I]) GetName() string {
return t.Name
}
-// Implement GetName for DurableTaskDeclaration
+// Deprecated: GetName is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (t *DurableTaskDeclaration[I]) GetName() string {
return t.Name
}
-// Implement GetName for NamedTask
+// Deprecated: GetName is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (t *NamedTaskImpl) GetName() string {
return t.Name
}
diff --git a/pkg/v1/worker/worker.go b/pkg/v1/worker/worker.go
index 77ffd2296..2d2afb499 100644
--- a/pkg/v1/worker/worker.go
+++ b/pkg/v1/worker/worker.go
@@ -147,6 +147,9 @@ type NamedFunction struct {
Fn workflow.WrappedTaskFn
}
+// Deprecated: RegisterWorkflows is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// RegisterWorkflows registers one or more workflows with the worker.
// it converts the workflows to the format expected by the underlying worker implementation
// and registers both the workflow definitions and their action functions.
@@ -259,6 +262,9 @@ func (w *WorkerImpl) RegisterWorkflows(workflows ...workflow.WorkflowBase) error
return nil
}
+// Deprecated: Start is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Start begins worker execution in a non-blocking manner.
// returns a cleanup function to be called when the worker should be stopped,
// and any error encountered during startup.
@@ -324,6 +330,9 @@ func (w *WorkerImpl) Start() (func() error, error) {
}, nil
}
+// Deprecated: StartBlocking is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// StartBlocking begins worker execution and blocks until the process is interrupted.
// this method handles graceful shutdown via interrupt signals.
// returns any error encountered during startup or shutdown.
@@ -342,6 +351,9 @@ func (w *WorkerImpl) StartBlocking(ctx context.Context) error {
return nil
}
+// Deprecated: IsPaused is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// IsPaused checks if all worker instances are paused
func (w *WorkerImpl) IsPaused(ctx context.Context) (bool, error) {
// Create slice of worker IDs to check
@@ -379,6 +391,9 @@ func (w *WorkerImpl) IsPaused(ctx context.Context) (bool, error) {
return true, nil
}
+// Deprecated: Pause is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Pause pauses all worker instances
func (w *WorkerImpl) Pause(ctx context.Context) error {
// Pause main worker if it exists
@@ -400,6 +415,9 @@ func (w *WorkerImpl) Pause(ctx context.Context) error {
return nil
}
+// Deprecated: Unpause is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Unpause resumes all paused worker instances
func (w *WorkerImpl) Unpause(ctx context.Context) error {
// Unpause main worker if it exists
diff --git a/pkg/v1/workflow/declaration.go b/pkg/v1/workflow/declaration.go
index c9a762b52..ee5cfcd2e 100644
--- a/pkg/v1/workflow/declaration.go
+++ b/pkg/v1/workflow/declaration.go
@@ -115,7 +115,10 @@ type WorkflowDeclaration[I, O any] interface {
QueueMetrics(ctx context.Context, opts ...rest.TenantGetQueueMetricsParams) (*rest.TenantGetQueueMetricsResponse, error)
}
-// Define a TaskDeclaration with specific output type
+// Deprecated: TaskWithSpecificOutput is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
+// TaskWithSpecificOutput defines a TaskDeclaration with specific output type
type TaskWithSpecificOutput[I any, T any] struct {
Name string
Fn func(ctx worker.HatchetContext, input I) (*T, error)
@@ -157,6 +160,9 @@ type workflowDeclarationImpl[I any, O any] struct {
DefaultFilters []types.DefaultFilter
}
+// Deprecated: NewWorkflowDeclaration is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// NewWorkflowDeclaration creates a new workflow declaration with the specified options and client.
// The workflow will have input type I and output type O.
func NewWorkflowDeclaration[I any, O any](opts create.WorkflowCreateOpts[I], v0 v0Client.Client) WorkflowDeclaration[I, O] {
@@ -223,6 +229,9 @@ func NewWorkflowDeclaration[I any, O any](opts create.WorkflowCreateOpts[I], v0
return wf
}
+// Deprecated: Task is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Task registers a standard (non-durable) task with the workflow
func (w *workflowDeclarationImpl[I, O]) Task(opts create.WorkflowTask[I, O], fn func(ctx worker.HatchetContext, input I) (interface{}, error)) *task.TaskDeclaration[I] {
name := opts.Name
@@ -333,6 +342,9 @@ func (w *workflowDeclarationImpl[I, O]) Task(opts create.WorkflowTask[I, O], fn
return taskDecl
}
+// Deprecated: DurableTask is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// DurableTask registers a durable task with the workflow
func (w *workflowDeclarationImpl[I, O]) DurableTask(opts create.WorkflowTask[I, O], fn func(ctx worker.DurableHatchetContext, input I) (interface{}, error)) *task.DurableTaskDeclaration[I] {
name := opts.Name
@@ -439,7 +451,10 @@ func (w *workflowDeclarationImpl[I, O]) DurableTask(opts create.WorkflowTask[I,
return taskDecl
}
-// OnFailureTask registers a task that will be executed if the workflow fails.
+// Deprecated: OnFailure is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
+// OnFailure registers a task that will be executed if the workflow fails.
func (w *workflowDeclarationImpl[I, O]) OnFailure(opts create.WorkflowOnFailureTask[I, O], fn func(ctx worker.HatchetContext, input I) (interface{}, error)) *task.OnFailureTaskDeclaration[I] {
// Use reflection to validate the function type
@@ -510,6 +525,9 @@ func (w *workflowDeclarationImpl[I, O]) OnFailure(opts create.WorkflowOnFailureT
return taskDecl
}
+// Deprecated: RunBulkNoWait is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// RunBulkNoWait executes the workflow with the provided inputs without waiting for them to complete.
// Instead it returns a list of run IDs that can be used to check the status of the workflows.
func (w *workflowDeclarationImpl[I, O]) RunBulkNoWait(ctx context.Context, input []I, opts ...v0Client.RunOptFunc) ([]string, error) {
@@ -532,6 +550,9 @@ func (w *workflowDeclarationImpl[I, O]) RunBulkNoWait(ctx context.Context, input
return run, nil
}
+// Deprecated: RunNoWait is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// RunNoWait executes the workflow with the provided input without waiting for it to complete.
// Instead it returns a run ID that can be used to check the status of the workflow.
func (w *workflowDeclarationImpl[I, O]) RunNoWait(ctx context.Context, input I, opts ...v0Client.RunOptFunc) (*v0Client.Workflow, error) {
@@ -543,6 +564,9 @@ func (w *workflowDeclarationImpl[I, O]) RunNoWait(ctx context.Context, input I,
return run, nil
}
+// Deprecated: RunAsChild is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// RunAsChild executes the workflow as a child workflow with the provided input.
func (w *workflowDeclarationImpl[I, O]) RunAsChild(ctx worker.HatchetContext, input I, opts RunAsChildOpts) (*O, error) {
var additionalMetaOpt *map[string]string
@@ -623,6 +647,9 @@ func (w *workflowDeclarationImpl[I, O]) getOutputFromWorkflowResult(workflowResu
return &output, nil
}
+// Deprecated: Run is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Run executes the workflow with the provided input.
// It triggers a workflow run via the Hatchet client and waits for the result.
// Returns the workflow output and any error encountered during execution.
@@ -665,6 +692,9 @@ func getStructFields(t reflect.Type) map[string]reflect.Type {
return fields
}
+// Deprecated: Cron is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Cron schedules the workflow to run on a regular basis using a cron expression.
func (w *workflowDeclarationImpl[I, O]) Cron(ctx context.Context, name string, cronExpr string, input I, opts ...v0Client.RunOptFunc) (*rest.CronWorkflows, error) {
@@ -706,6 +736,9 @@ func (w *workflowDeclarationImpl[I, O]) Cron(ctx context.Context, name string, c
return cronWorkflow, nil
}
+// Deprecated: Schedule is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Schedule schedules the workflow to run at a specific time.
func (w *workflowDeclarationImpl[I, O]) Schedule(ctx context.Context, triggerAt time.Time, input I, opts ...v0Client.RunOptFunc) (*rest.ScheduledWorkflows, error) {
var inputMap map[string]interface{}
@@ -746,6 +779,9 @@ func (w *workflowDeclarationImpl[I, O]) Schedule(ctx context.Context, triggerAt
return scheduledWorkflow, nil
}
+// Deprecated: Dump is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Dump converts the workflow declaration into a protobuf request and function mappings.
// This is used to serialize the workflow for transmission to the Hatchet server.
// Returns the workflow definition as a protobuf request, the task functions, and the on-failure task function.
@@ -897,6 +933,9 @@ func (w *workflowDeclarationImpl[I, O]) Dump() (*contracts.CreateWorkflowVersion
return req, regularNamedFns, durableNamedFns, onFailureFn
}
+// Deprecated: Get is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Get retrieves the current state of the workflow.
func (w *workflowDeclarationImpl[I, O]) Get(ctx context.Context) (*rest.Workflow, error) {
workflow, err := w.workflows.Get(ctx, w.Name)
@@ -937,6 +976,9 @@ func (w *workflowDeclarationImpl[I, O]) Get(ctx context.Context) (*rest.Workflow
// return nil
// }
+// Deprecated: Metrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// Metrics retrieves metrics for this workflow.
func (w *workflowDeclarationImpl[I, O]) Metrics(ctx context.Context, opts ...rest.WorkflowGetMetricsParams) (*rest.WorkflowMetrics, error) {
var options rest.WorkflowGetMetricsParams
@@ -952,6 +994,9 @@ func (w *workflowDeclarationImpl[I, O]) Metrics(ctx context.Context, opts ...res
return metrics, nil
}
+// Deprecated: QueueMetrics is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// QueueMetrics retrieves queue metrics for this workflow.
func (w *workflowDeclarationImpl[I, O]) QueueMetrics(ctx context.Context, opts ...rest.TenantGetQueueMetricsParams) (*rest.TenantGetQueueMetricsResponse, error) {
var options rest.TenantGetQueueMetricsParams
@@ -984,6 +1029,9 @@ func (w *workflowDeclarationImpl[I, O]) QueueMetrics(ctx context.Context, opts .
return metrics, nil
}
+// Deprecated: RunChildWorkflow is part of the old generics-based v1 Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
+//
// RunChildWorkflow is a helper function to run a child workflow with full type safety
// It takes the parent context, the child workflow declaration, and input
// Returns the typed output of the child workflow
diff --git a/pkg/worker/condition/condition.go b/pkg/worker/condition/condition.go
index d99385eeb..854885a0a 100644
--- a/pkg/worker/condition/condition.go
+++ b/pkg/worker/condition/condition.go
@@ -1,3 +1,5 @@
+// Deprecated: This package is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
package condition
import (
diff --git a/pkg/worker/context.go b/pkg/worker/context.go
index 94ec72cf9..4213c6294 100644
--- a/pkg/worker/context.go
+++ b/pkg/worker/context.go
@@ -99,20 +99,28 @@ type HatchetContext interface {
FilterPayload() map[string]interface{}
}
+// Deprecated: TriggeredBy is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type TriggeredBy string
+// Deprecated: These constants are part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
const (
TriggeredByEvent TriggeredBy = "event"
TriggeredByCron TriggeredBy = "cron"
TriggeredBySchedule TriggeredBy = "schedule"
)
+// Deprecated: JobRunLookupData is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type JobRunLookupData struct {
Input map[string]interface{} `json:"input"`
TriggeredBy TriggeredBy `json:"triggered_by"`
Steps map[string]StepData `json:"steps,omitempty"`
}
+// Deprecated: StepRunData is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type StepRunData struct {
Input map[string]interface{} `json:"input"`
TriggeredBy TriggeredBy `json:"triggered_by"`
@@ -123,6 +131,8 @@ type StepRunData struct {
StepRunErrors map[string]string `json:"step_run_errors,omitempty"`
}
+// Deprecated: StepData is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type StepData map[string]interface{}
type hatchetContext struct {
@@ -242,14 +252,20 @@ func (h *hatchetContext) ParentOutput(parent create.NamedTask, output interface{
return fmt.Errorf("parent %s not found in action payload", stepName)
}
+// Deprecated: TriggeredByEvent is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) TriggeredByEvent() bool {
return h.stepData.TriggeredBy == TriggeredByEvent
}
+// Deprecated: WorkflowInput is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) WorkflowInput(target interface{}) error {
return toTarget(h.stepData.Input, target)
}
+// Deprecated: StepRunErrors is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) StepRunErrors() map[string]string {
errors := h.stepData.StepRunErrors
@@ -260,44 +276,64 @@ func (h *hatchetContext) StepRunErrors() map[string]string {
return errors
}
+// Deprecated: UserData is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) UserData(target interface{}) error {
return toTarget(h.stepData.UserData, target)
}
+// Deprecated: FilterPayload is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) FilterPayload() map[string]interface{} {
payload := h.stepData.Triggers["filter_payload"]
return payload
}
+// Deprecated: AdditionalMetadata is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) AdditionalMetadata() map[string]string {
return h.stepData.AdditionalMetadata
}
+// Deprecated: StepName is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) StepName() string {
return h.a.StepName
}
+// Deprecated: StepRunId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) StepRunId() string {
return h.a.StepRunId
}
+// Deprecated: StepId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) StepId() string {
return h.a.StepId
}
+// Deprecated: WorkflowRunId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) WorkflowRunId() string {
return h.a.WorkflowRunId
}
+// Deprecated: WorkflowId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) WorkflowId() *string {
return h.a.WorkflowId
}
+// Deprecated: WorkflowVersionId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) WorkflowVersionId() *string {
return h.a.WorkflowVersionId
}
+// Deprecated: Log is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) Log(message string) {
infoLevel := "INFO"
@@ -315,6 +351,8 @@ func (h *hatchetContext) Log(message string) {
}
}
+// Deprecated: ReleaseSlot is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) ReleaseSlot() error {
err := h.c.Dispatcher().ReleaseSlot(h, h.a.StepRunId)
@@ -325,6 +363,8 @@ func (h *hatchetContext) ReleaseSlot() error {
return nil
}
+// Deprecated: RefreshTimeout is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) RefreshTimeout(incrementTimeoutBy string) error {
err := h.c.Dispatcher().RefreshTimeout(h, h.a.StepRunId, incrementTimeoutBy)
@@ -335,6 +375,8 @@ func (h *hatchetContext) RefreshTimeout(incrementTimeoutBy string) error {
return nil
}
+// Deprecated: StreamEvent is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) StreamEvent(message []byte) {
h.streamEventIndexMu.Lock()
currentIndex := h.streamEventIndex
@@ -348,14 +390,20 @@ func (h *hatchetContext) StreamEvent(message []byte) {
}
}
+// Deprecated: PutStream is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) PutStream(message string) {
h.StreamEvent([]byte(message))
}
+// Deprecated: RetryCount is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) RetryCount() int {
return int(h.a.RetryCount)
}
+// Deprecated: CurChildIndex is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) CurChildIndex() int {
h.indexMu.Lock()
defer h.indexMu.Unlock()
@@ -363,12 +411,16 @@ func (h *hatchetContext) CurChildIndex() int {
return h.i
}
+// Deprecated: IncChildIndex is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) IncChildIndex() {
h.indexMu.Lock()
h.i++
h.indexMu.Unlock()
}
+// Deprecated: SpawnWorkflowOpts is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type SpawnWorkflowOpts struct {
Key *string
Sticky *bool
@@ -380,6 +432,8 @@ func (h *hatchetContext) saveOrLoadListener() (*client.WorkflowRunsListener, err
return h.client().Subscribe().SubscribeToWorkflowRunEvents(h)
}
+// Deprecated: SpawnWorkflow is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) SpawnWorkflow(workflowName string, input any, opts *SpawnWorkflowOpts) (*client.Workflow, error) {
if opts == nil {
opts = &SpawnWorkflowOpts{}
@@ -431,6 +485,8 @@ func (h *hatchetContext) SpawnWorkflow(workflowName string, input any, opts *Spa
return client.NewWorkflow(workflowRunId, listener), nil
}
+// Deprecated: SpawnWorkflowsOpts is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type SpawnWorkflowsOpts struct {
WorkflowName string
Input any
@@ -439,6 +495,8 @@ type SpawnWorkflowsOpts struct {
AdditionalMetadata *map[string]string
}
+// Deprecated: SpawnWorkflows is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) SpawnWorkflows(childWorkflows []*SpawnWorkflowsOpts) ([]*client.Workflow, error) {
triggerWorkflows := make([]*client.RunChildWorkflowsOpts, len(childWorkflows))
@@ -501,18 +559,26 @@ func (h *hatchetContext) SpawnWorkflows(childWorkflows []*SpawnWorkflowsOpts) ([
return createdWorkflows, nil
}
+// Deprecated: ChildIndex is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) ChildIndex() *int32 {
return h.a.ChildIndex
}
+// Deprecated: ChildKey is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) ChildKey() *string {
return h.a.ChildKey
}
+// Deprecated: ParentWorkflowRunId is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) ParentWorkflowRunId() *string {
return h.a.ParentWorkflowRunId
}
+// Deprecated: Priority is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) Priority() int32 {
return h.a.Priority
}
@@ -577,14 +643,20 @@ func toTarget(data interface{}, target interface{}) error {
return nil
}
+// Deprecated: SetContext is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) SetContext(ctx context.Context) {
wc.Context = ctx
}
+// Deprecated: GetContext is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) GetContext() context.Context {
return wc.Context
}
+// Deprecated: ID is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) ID() string {
if wc.id == nil {
return ""
@@ -593,10 +665,14 @@ func (wc *hatchetWorkerContext) ID() string {
return *wc.id
}
+// Deprecated: GetLabels is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) GetLabels() map[string]interface{} {
return wc.worker.labels
}
+// Deprecated: UpsertLabels is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) UpsertLabels(labels map[string]interface{}) error {
if wc.id == nil {
@@ -613,10 +689,14 @@ func (wc *hatchetWorkerContext) UpsertLabels(labels map[string]interface{}) erro
return nil
}
+// Deprecated: HasWorkflow is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (wc *hatchetWorkerContext) HasWorkflow(workflowName string) bool {
return wc.worker.registered_workflows[workflowName]
}
+// Deprecated: SingleWaitResult is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type SingleWaitResult struct {
*WaitResult
@@ -630,10 +710,14 @@ func newSingleWaitResult(key string, wr *WaitResult) *SingleWaitResult {
}
}
+// Deprecated: Unmarshal is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *SingleWaitResult) Unmarshal(in interface{}) error {
return w.WaitResult.Unmarshal(w.key, in)
}
+// Deprecated: WaitResult is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type WaitResult struct {
allResults map[string]map[string][]map[string]interface{}
}
@@ -652,14 +736,20 @@ func newWaitResult(dataBytes []byte) (*WaitResult, error) {
}, nil
}
+// Deprecated: ErrMarshalKeyNotFound is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type ErrMarshalKeyNotFound struct {
Key string
}
+// Deprecated: Error is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (e ErrMarshalKeyNotFound) Error() string {
return fmt.Sprintf("key %s not found", e.Key)
}
+// Deprecated: Keys is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WaitResult) Keys() []string {
keys := make([]string, 0, len(w.allResults))
@@ -672,6 +762,8 @@ func (w *WaitResult) Keys() []string {
return keys
}
+// Deprecated: Unmarshal is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WaitResult) Unmarshal(key string, in interface{}) error {
eNotFound := ErrMarshalKeyNotFound{
Key: key,
@@ -813,7 +905,8 @@ func (h *durableHatchetContext) saveOrLoadDurableEventListener() (*client.Durabl
return h.client().Subscribe().ListenForDurableEvents(context.Background())
}
-// NewDurableHatchetContext creates a DurableHatchetContext from a HatchetContext.
+// Deprecated: NewDurableHatchetContext is an internal function used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of calling this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func NewDurableHatchetContext(ctx HatchetContext) DurableHatchetContext {
// Try to cast directly if it's already a DurableHatchetContext
if durableCtx, ok := ctx.(DurableHatchetContext); ok {
@@ -840,7 +933,8 @@ func NewDurableHatchetContext(ctx HatchetContext) DurableHatchetContext {
}
}
-// Implementation of RunChild method for the hatchetContext
+// Deprecated: RunChild is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (h *hatchetContext) RunChild(workflowName string, input any, opts *SpawnWorkflowOpts) (*client.WorkflowResult, error) {
// Spawn the child workflow
workflow, err := h.SpawnWorkflow(workflowName, input, opts)
diff --git a/pkg/worker/errors.go b/pkg/worker/errors.go
index 6f4e7e841..bd360a5ab 100644
--- a/pkg/worker/errors.go
+++ b/pkg/worker/errors.go
@@ -2,18 +2,26 @@ package worker
import "errors"
+// Deprecated: NonRetryableError is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type NonRetryableError struct {
e error
}
+// Deprecated: Error is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (e *NonRetryableError) Error() string {
return e.e.Error()
}
+// Deprecated: NewNonRetryableError is an internal function used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of calling this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func NewNonRetryableError(err error) error {
return &NonRetryableError{e: err}
}
+// Deprecated: IsNonRetryableError is an internal function used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of calling this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func IsNonRetryableError(err error) bool {
e := &NonRetryableError{}
return errors.As(err, &e)
diff --git a/pkg/worker/managed-compute.go b/pkg/worker/managed-compute.go
index ec07e04fc..784e99e2d 100644
--- a/pkg/worker/managed-compute.go
+++ b/pkg/worker/managed-compute.go
@@ -11,6 +11,8 @@ import (
"github.com/hatchet-dev/hatchet/pkg/client/cloud/rest"
)
+// Deprecated: ManagedCompute is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type ManagedCompute struct {
ActionRegistry *ActionRegistry
Client client.Client
@@ -20,6 +22,8 @@ type ManagedCompute struct {
Logger *zerolog.Logger
}
+// Deprecated: NewManagedCompute is an internal function used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of calling this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func NewManagedCompute(actionRegistry *ActionRegistry, client client.Client, maxRuns int) *ManagedCompute {
if maxRuns == 0 {
maxRuns = 1
@@ -106,6 +110,8 @@ func getComputeConfigs(actions *ActionRegistry, maxRuns int) []rest.CreateManage
return configs
}
+// Deprecated: CloudRegister is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of calling this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (mc *ManagedCompute) CloudRegister(ctx context.Context) {
if mc.CloudRegisterID != nil {
mc.Logger.Info().Msg("Registering cloud compute plan with ID: " + *mc.CloudRegisterID)
diff --git a/pkg/worker/worker.go b/pkg/worker/worker.go
index dc885c332..e577e12d0 100644
--- a/pkg/worker/worker.go
+++ b/pkg/worker/worker.go
@@ -54,26 +54,38 @@ type actionImpl struct {
compute *compute.Compute
}
+// Deprecated: Name is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) Name() string {
return j.name
}
+// Deprecated: Run is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) Run(args ...interface{}) []interface{} {
return j.run(args...)
}
+// Deprecated: MethodFn is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) MethodFn() any {
return j.method
}
+// Deprecated: ConcurrencyFn is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) ConcurrencyFn() GetWorkflowConcurrencyGroupFn {
return j.runConcurrencyAction
}
+// Deprecated: Service is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) Service() string {
return j.service
}
+// Deprecated: Compute is an internal method used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *actionImpl) Compute() *compute.Compute {
return j.compute
}
diff --git a/pkg/worker/workflow.go b/pkg/worker/workflow.go
index dadb61d67..f3bc3b3dd 100644
--- a/pkg/worker/workflow.go
+++ b/pkg/worker/workflow.go
@@ -24,6 +24,8 @@ func Cron(c string) cron {
return cron(c)
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c cron) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
if wt.Cron == nil {
wt.Cron = []string{}
@@ -40,6 +42,8 @@ func Crons(c ...string) cronArr {
return cronArr(c)
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (c cronArr) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
if wt.Cron == nil {
wt.Cron = []string{}
@@ -56,6 +60,8 @@ func NoTrigger() noTrigger {
return noTrigger{}
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (n noTrigger) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
// do nothing
}
@@ -68,6 +74,8 @@ func At(t ...time.Time) scheduled {
return t
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (s scheduled) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
if wt.Schedules == nil {
wt.Schedules = []time.Time{}
@@ -105,6 +113,8 @@ func Event(e string) event {
return event(e)
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (e event) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
if wt.Events == nil {
wt.Events = []string{}
@@ -126,6 +136,8 @@ func Events(events ...string) eventsArr {
return events
}
+// Deprecated: ToWorkflowTriggers is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (e eventsArr) ToWorkflowTriggers(wt *types.WorkflowTriggers, namespace string) {
if wt.Events == nil {
wt.Events = []string{}
@@ -216,6 +228,8 @@ func (c *WorkflowConcurrency) LimitStrategy(limitStrategy types.WorkflowConcurre
return c
}
+// Deprecated: ToWorkflow is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *WorkflowJob) ToWorkflow(svcName string, namespace string) types.Workflow {
apiJob, err := j.ToWorkflowJob(svcName, namespace)
@@ -272,6 +286,8 @@ func (j *WorkflowJob) ToWorkflow(svcName string, namespace string) types.Workflo
return w
}
+// Deprecated: ToWorkflowJob is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *WorkflowJob) ToWorkflowJob(svcName string, namespace string) (*types.WorkflowJob, error) {
apiJob := &types.WorkflowJob{
Description: j.Description,
@@ -292,17 +308,25 @@ func (j *WorkflowJob) ToWorkflowJob(svcName string, namespace string) (*types.Wo
return apiJob, nil
}
+// Deprecated: ToWorkflowTrigger is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *WorkflowJob) ToWorkflowTrigger() triggerConverter {
return j.On
}
+// Deprecated: ActionWithCompute is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type ActionWithCompute struct {
fn any
compute *compute.Compute
}
+// Deprecated: ActionMap is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type ActionMap map[string]ActionWithCompute
+// Deprecated: ToActionMap is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (j *WorkflowJob) ToActionMap(svcName string) ActionMap {
res := ActionMap{}
@@ -450,10 +474,14 @@ func (w *WorkflowStep) AddParents(parents ...string) *WorkflowStep {
return w
}
+// Deprecated: ToWorkflowTrigger is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) ToWorkflowTrigger() triggerConverter {
return NoTrigger()
}
+// Deprecated: ToWorkflow is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) ToWorkflow(svcName string, namespace string) types.Workflow {
jobName := w.Name
@@ -470,6 +498,8 @@ func (w *WorkflowStep) ToWorkflow(svcName string, namespace string) types.Workfl
return workflowJob.ToWorkflow(svcName, namespace)
}
+// Deprecated: ToActionMap is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) ToActionMap(svcName string) ActionMap {
step := *w
@@ -481,6 +511,8 @@ func (w *WorkflowStep) ToActionMap(svcName string) ActionMap {
}
}
+// Deprecated: Step is an internal type used by the new Go SDK.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead of using this directly. Migration guide: https://docs.hatchet.run/home/migration-guide-go
type Step struct {
Id string
@@ -493,6 +525,8 @@ type Step struct {
APIStep types.WorkflowStep
}
+// Deprecated: ToWorkflowStep is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) ToWorkflowStep(svcName string, index int, namespace string) (*Step, error) {
fnType := reflect.TypeOf(w.Function)
@@ -564,6 +598,8 @@ func (w *WorkflowStep) ToWorkflowStep(svcName string, index int, namespace strin
return res, nil
}
+// Deprecated: GetStepId is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) GetStepId(index int) string {
if w.Name != "" {
return w.Name
@@ -579,6 +615,8 @@ func (w *WorkflowStep) GetStepId(index int) string {
return stepId
}
+// Deprecated: GetActionId is part of the legacy v0 workflow definition system.
+// Use the new Go SDK at github.com/hatchet-dev/hatchet/sdks/go instead. Migration guide: https://docs.hatchet.run/home/migration-guide-go
func (w *WorkflowStep) GetActionId(svcName string, index int) string {
stepId := w.GetStepId(index)
diff --git a/sdks/go/features/webhooks.go b/sdks/go/features/webhooks.go
index bac29b77d..aa7ec874f 100644
--- a/sdks/go/features/webhooks.go
+++ b/sdks/go/features/webhooks.go
@@ -119,7 +119,7 @@ type CreateWebhookOpts struct {
}
type UpdateWebhookOpts struct {
- EventKeyExpression string
+ EventKeyExpression *string
ScopeExpression *string
StaticPayload *map[string]interface{}
}
diff --git a/sdks/python/CHANGELOG.md b/sdks/python/CHANGELOG.md
index 120a0ba5d..00f232a09 100644
--- a/sdks/python/CHANGELOG.md
+++ b/sdks/python/CHANGELOG.md
@@ -5,12 +5,27 @@ All notable changes to Hatchet's Python SDK will be documented in this changelog
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
+## [1.24.0] - 2026-02-13
+
+### Added
+
+- Webhooks client for managing incoming webhooks: create, list, get, update, and delete methods for webhooks, so external systems (e.g. GitHub, Stripe) can trigger workflows via HTTP.
+
+## [1.23.4] - 2026-02-13
+
+### Changed
+
+- Fixes cases where raising exception classes or exceptions with no message would cause the whole error including stack trace to be converted to an empty string.
+- When an error is raised because a workflow has no tasks it now includes the workflows name.
+
## [1.23.3] - 2026-02-12
### Added
- Adds type-hinted `Standalone.output_validator` and `Standalone.output_validator_type` properties to support easier type-safety and match the `input_validator` property pattern on `BaseWorkflow`.
- Adds type-hinted `Task.output_validator` and `Task.output_validator_type` properties to support easier type-safety and match the patterns on `BaseWorkflow/Standalone`.
+- Adds parameterized unit tests documenting current retry behavior of the Python SDK’s tenacity retry predicate for REST and gRPC errors.
+
## [1.23.2] - 2026-02-11
diff --git a/sdks/python/examples/quickstart/README.md b/sdks/python/examples/quickstart/README.md
index 6e4fdb8d2..c619f524d 100644
--- a/sdks/python/examples/quickstart/README.md
+++ b/sdks/python/examples/quickstart/README.md
@@ -20,6 +20,8 @@ cd hatchet-python-quickstart
2. Set the required environment variable `HATCHET_CLIENT_TOKEN` created in the [Getting Started Guide](https://docs.hatchet.run/home/hatchet-cloud-quickstart).
+For most users, this token is the only required configuration — no server URL or additional settings are needed.
+
```bash
export HATCHET_CLIENT_TOKEN=
```
diff --git a/sdks/python/examples/return_exceptions/test_return_exceptions.py b/sdks/python/examples/return_exceptions/test_return_exceptions.py
index 2dca8d679..ce61ee7ea 100644
--- a/sdks/python/examples/return_exceptions/test_return_exceptions.py
+++ b/sdks/python/examples/return_exceptions/test_return_exceptions.py
@@ -2,7 +2,13 @@ import asyncio
import pytest
-from examples.return_exceptions.worker import Input, return_exceptions_task
+from examples.return_exceptions.worker import (
+ Input,
+ exception_parsing_workflow,
+ return_exceptions_task,
+)
+from hatchet_sdk.exceptions import FailedTaskRunExceptionGroup, TaskRunError
+from hatchet_sdk.runnables.types import EmptyModel
@pytest.mark.asyncio(loop_scope="session")
@@ -38,3 +44,18 @@ def test_return_exceptions_sync() -> None:
assert f"error in task with index {i}" in str(result)
else:
assert result == {"message": "this is a successful task."}
+
+
+@pytest.mark.asyncio(loop_scope="session")
+async def test_exceptions_parsing() -> None:
+ try:
+ await exception_parsing_workflow.aio_run(
+ EmptyModel(),
+ )
+ pytest.fail("Workflow run should have raised an exception")
+ except Exception as e:
+ assert isinstance(e, FailedTaskRunExceptionGroup)
+ for exception in e.exceptions:
+ # Test that we don't get empty error messages
+ assert exception.serialize(include_metadata=True)
+ assert exception.serialize(include_metadata=False)
diff --git a/sdks/python/examples/return_exceptions/worker.py b/sdks/python/examples/return_exceptions/worker.py
index 10f5db8fa..fdae2f085 100644
--- a/sdks/python/examples/return_exceptions/worker.py
+++ b/sdks/python/examples/return_exceptions/worker.py
@@ -1,5 +1,3 @@
-from pydantic import BaseModel
-
from hatchet_sdk import Context, EmptyModel, Hatchet
hatchet = Hatchet()
@@ -15,3 +13,34 @@ async def return_exceptions_task(input: Input, ctx: Context) -> dict[str, str]:
raise ValueError(f"error in task with index {input.index}")
return {"message": "this is a successful task."}
+
+
+exception_parsing_workflow = hatchet.workflow(name="ExceptionParsingWorkflow")
+
+
+@exception_parsing_workflow.task()
+async def exception_class_no_name_task(input: EmptyModel, ctx: Context) -> None:
+ class CustomNoNamedException(Exception): ...
+
+ CustomNoNamedException.__name__ = ""
+ raise CustomNoNamedException
+
+
+@exception_parsing_workflow.task()
+async def exception_class_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_no_args_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError()
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_falsy_arg_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError("")
+
+
+@exception_parsing_workflow.task()
+async def exception_instance_truthy_arg_task(input: EmptyModel, ctx: Context) -> None:
+ raise ValueError("Oh no!")
diff --git a/sdks/python/examples/webhook_with_scope/test_webhooks_with_scope.py b/sdks/python/examples/webhook_with_scope/test_webhooks_with_scope.py
index 8deee8c5c..f30cc62b5 100644
--- a/sdks/python/examples/webhook_with_scope/test_webhooks_with_scope.py
+++ b/sdks/python/examples/webhook_with_scope/test_webhooks_with_scope.py
@@ -13,13 +13,6 @@ from examples.webhook_with_scope.worker import (
WebhookInputWithStaticPayload,
)
from hatchet_sdk import Hatchet
-from hatchet_sdk.clients.rest.api.webhook_api import WebhookApi
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request import (
- V1CreateWebhookRequest,
-)
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import (
- V1CreateWebhookRequestBasicAuth,
-)
from hatchet_sdk.clients.rest.models.v1_event import V1Event
from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus
from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary
@@ -119,38 +112,27 @@ async def webhook_with_scope_expression(
username: str = TEST_BASIC_USERNAME,
password: str = TEST_BASIC_PASSWORD,
) -> AsyncGenerator[V1Webhook, None]:
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
if event_key_expression is None:
event_key_expression = (
f"'{hatchet.config.apply_namespace('webhook-scope')}:' + input.type"
)
- webhook_request = V1CreateWebhookRequestBasicAuth(
- sourceName=V1WebhookSourceName.GENERIC,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=V1WebhookSourceName.GENERIC,
name=f"test-webhook-scope-{test_run_id}",
- eventKeyExpression=event_key_expression,
- scopeExpression=scope_expression,
- authType="BASIC",
+ event_key_expression=event_key_expression,
+ scope_expression=scope_expression,
auth=V1WebhookBasicAuth(
username=username,
password=password,
),
)
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
- )
-
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
@asynccontextmanager
@@ -162,38 +144,27 @@ async def webhook_with_static_payload(
username: str = TEST_BASIC_USERNAME,
password: str = TEST_BASIC_PASSWORD,
) -> AsyncGenerator[V1Webhook, None]:
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
if event_key_expression is None:
event_key_expression = (
f"'{hatchet.config.apply_namespace('webhook-static')}:' + input.type"
)
- webhook_request = V1CreateWebhookRequestBasicAuth(
- sourceName=V1WebhookSourceName.GENERIC,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=V1WebhookSourceName.GENERIC,
name=f"test-webhook-static-{test_run_id}",
- eventKeyExpression=event_key_expression,
- staticPayload=static_payload,
- authType="BASIC",
+ event_key_expression=event_key_expression,
+ static_payload=static_payload,
auth=V1WebhookBasicAuth(
username=username,
password=password,
),
)
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
- )
-
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
@asynccontextmanager
@@ -206,39 +177,28 @@ async def webhook_with_scope_and_static(
username: str = TEST_BASIC_USERNAME,
password: str = TEST_BASIC_PASSWORD,
) -> AsyncGenerator[V1Webhook, None]:
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
if event_key_expression is None:
event_key_expression = (
f"'{hatchet.config.apply_namespace('webhook-scope')}:' + input.type"
)
- webhook_request = V1CreateWebhookRequestBasicAuth(
- sourceName=V1WebhookSourceName.GENERIC,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=V1WebhookSourceName.GENERIC,
name=f"test-webhook-both-{test_run_id}",
- eventKeyExpression=event_key_expression,
- scopeExpression=scope_expression,
- staticPayload=static_payload,
- authType="BASIC",
+ event_key_expression=event_key_expression,
+ scope_expression=scope_expression,
+ static_payload=static_payload,
auth=V1WebhookBasicAuth(
username=username,
password=password,
),
)
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
- )
-
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
def url(tenant_id: str, webhook_name: str) -> str:
@@ -483,3 +443,39 @@ async def test_scope_expression_uses_static_payload_values(
assert triggered_event is not None
assert triggered_event.scope == "cust-123"
+
+
+@pytest.mark.asyncio(loop_scope="session")
+async def test_webhook_update_scope_expression(
+ hatchet: Hatchet,
+ test_run_id: str,
+ test_start: datetime,
+ webhook_body_with_scope: WebhookInputWithScope,
+) -> None:
+ """Update webhook scope_expression without passing event_key_expression (SDK fetches current)."""
+ async with webhook_with_scope_expression(
+ hatchet,
+ test_run_id,
+ scope_expression="input.type",
+ ) as incoming_webhook:
+
+ updated = hatchet.webhooks.update(
+ incoming_webhook.name,
+ scope_expression="input.scope",
+ )
+ assert updated.scope_expression == "input.scope"
+ assert updated.event_key_expression == incoming_webhook.event_key_expression
+
+ async with await send_webhook_request(
+ url(hatchet.tenant_id, incoming_webhook.name),
+ webhook_body_with_scope.model_dump(),
+ ) as response:
+ assert response.status == 200
+ data = await response.json()
+ assert data == {"message": "ok"}
+
+ triggered_event = await wait_for_event(
+ hatchet, incoming_webhook.name, test_start
+ )
+ assert triggered_event is not None
+ assert triggered_event.scope == webhook_body_with_scope.scope
diff --git a/sdks/python/examples/webhooks/test_webhooks.py b/sdks/python/examples/webhooks/test_webhooks.py
index 317ce0657..c5ffcd96d 100644
--- a/sdks/python/examples/webhooks/test_webhooks.py
+++ b/sdks/python/examples/webhooks/test_webhooks.py
@@ -14,19 +14,6 @@ import pytest
from examples.webhooks.worker import WebhookInput
from hatchet_sdk import Hatchet
-from hatchet_sdk.clients.rest.api.webhook_api import WebhookApi
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request import (
- V1CreateWebhookRequest,
-)
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request_api_key import (
- V1CreateWebhookRequestAPIKey,
-)
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import (
- V1CreateWebhookRequestBasicAuth,
-)
-from hatchet_sdk.clients.rest.models.v1_create_webhook_request_hmac import (
- V1CreateWebhookRequestHMAC,
-)
from hatchet_sdk.clients.rest.models.v1_event import V1Event
from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus
from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary
@@ -50,6 +37,9 @@ TEST_HMAC_SIGNATURE_HEADER = "X-Signature"
TEST_HMAC_SECRET = "test_hmac_secret"
+hatchet = Hatchet(debug=True)
+
+
@pytest.fixture
def webhook_body() -> WebhookInput:
return WebhookInput(type="test", message="Hello, world!")
@@ -169,33 +159,18 @@ async def basic_auth_webhook(
password: str = TEST_BASIC_PASSWORD,
source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,
) -> AsyncGenerator[V1Webhook, None]:
- ## Hack to get the API client
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
- webhook_request = V1CreateWebhookRequestBasicAuth(
- sourceName=source_name,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=source_name,
name=f"test-webhook-basic-{test_run_id}",
- eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
- authType="BASIC",
- auth=V1WebhookBasicAuth(
- username=username,
- password=password,
- ),
- )
-
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
+ event_key_expression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
+ auth=V1WebhookBasicAuth(username=username, password=password),
)
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
@asynccontextmanager
@@ -206,32 +181,21 @@ async def api_key_webhook(
api_key: str = TEST_API_KEY_VALUE,
source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,
) -> AsyncGenerator[V1Webhook, None]:
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
- webhook_request = V1CreateWebhookRequestAPIKey(
- sourceName=source_name,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=source_name,
name=f"test-webhook-apikey-{test_run_id}",
- eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
- authType="API_KEY",
+ event_key_expression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
auth=V1WebhookAPIKeyAuth(
headerName=header_name,
apiKey=api_key,
),
)
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
- )
-
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
@asynccontextmanager
@@ -244,14 +208,11 @@ async def hmac_webhook(
encoding: V1WebhookHMACEncoding = V1WebhookHMACEncoding.HEX,
source_name: V1WebhookSourceName = V1WebhookSourceName.GENERIC,
) -> AsyncGenerator[V1Webhook, None]:
- client = hatchet.metrics.client()
- webhook_api = WebhookApi(client)
- webhook_request = V1CreateWebhookRequestHMAC(
- sourceName=source_name,
+ incoming_webhook = hatchet.webhooks.create(
+ source_name=source_name,
name=f"test-webhook-hmac-{test_run_id}",
- eventKeyExpression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
- authType="HMAC",
+ event_key_expression=f"'{hatchet.config.apply_namespace('webhook')}:' + input.type",
auth=V1WebhookHMACAuth(
algorithm=algorithm,
encoding=encoding,
@@ -260,18 +221,10 @@ async def hmac_webhook(
),
)
- incoming_webhook = webhook_api.v1_webhook_create(
- tenant=hatchet.tenant_id,
- v1_create_webhook_request=V1CreateWebhookRequest(webhook_request),
- )
-
try:
yield incoming_webhook
finally:
- webhook_api.v1_webhook_delete(
- tenant=hatchet.tenant_id,
- v1_webhook=incoming_webhook.name,
- )
+ hatchet.webhooks.delete(incoming_webhook.name)
def url(tenant_id: str, webhook_name: str) -> str:
diff --git a/sdks/python/examples/worker.py b/sdks/python/examples/worker.py
index 1a2cd8e9a..6265e26f1 100644
--- a/sdks/python/examples/worker.py
+++ b/sdks/python/examples/worker.py
@@ -39,7 +39,10 @@ from examples.lifespans.simple import lifespan, lifespan_task
from examples.logger.workflow import logging_workflow
from examples.non_retryable.worker import non_retryable_workflow
from examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details
-from examples.return_exceptions.worker import return_exceptions_task
+from examples.return_exceptions.worker import (
+ exception_parsing_workflow,
+ return_exceptions_task,
+)
from examples.run_details.worker import run_detail_test_workflow
from examples.serde.worker import serde_workflow
from examples.simple.worker import simple, simple_durable
@@ -98,6 +101,7 @@ def main() -> None:
webhook_with_scope,
webhook_with_static_payload,
return_exceptions_task,
+ exception_parsing_workflow,
wait_for_sleep_twice,
async_task_with_dependencies,
sync_task_with_dependencies,
diff --git a/sdks/python/hatchet_sdk/__init__.py b/sdks/python/hatchet_sdk/__init__.py
index 43b001b40..32475544d 100644
--- a/sdks/python/hatchet_sdk/__init__.py
+++ b/sdks/python/hatchet_sdk/__init__.py
@@ -106,6 +106,13 @@ from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import (
)
from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic
from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus
+from hatchet_sdk.clients.rest.models.v1_webhook_hmac_algorithm import (
+ V1WebhookHMACAlgorithm,
+)
+from hatchet_sdk.clients.rest.models.v1_webhook_hmac_encoding import (
+ V1WebhookHMACEncoding,
+)
+from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName
from hatchet_sdk.clients.rest.models.worker_list import WorkerList
from hatchet_sdk.clients.rest.models.workflow import Workflow
from hatchet_sdk.clients.rest.models.workflow_deployment_config import (
@@ -273,6 +280,9 @@ __all__ = [
"UserTenantMembershipsList",
"UserTenantPublic",
"V1TaskStatus",
+ "V1WebhookHMACAlgorithm",
+ "V1WebhookHMACEncoding",
+ "V1WebhookSourceName",
"Worker",
"Worker",
"WorkerContext",
diff --git a/sdks/python/hatchet_sdk/client.py b/sdks/python/hatchet_sdk/client.py
index 74b083fa0..a82fca6f1 100644
--- a/sdks/python/hatchet_sdk/client.py
+++ b/sdks/python/hatchet_sdk/client.py
@@ -13,6 +13,7 @@ from hatchet_sdk.features.rate_limits import RateLimitsClient
from hatchet_sdk.features.runs import RunsClient
from hatchet_sdk.features.scheduled import ScheduledClient
from hatchet_sdk.features.tenant import TenantClient
+from hatchet_sdk.features.webhooks import WebhooksClient
from hatchet_sdk.features.workers import WorkersClient
from hatchet_sdk.features.workflows import WorkflowsClient
@@ -55,5 +56,6 @@ class Client:
)
self.scheduled = ScheduledClient(self.config)
self.tenant = TenantClient(self.config)
+ self.webhooks = WebhooksClient(self.config)
self.workers = WorkersClient(self.config)
self.workflows = WorkflowsClient(self.config)
diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_update_webhook_request.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_update_webhook_request.py
index 13d9cff46..88707a626 100644
--- a/sdks/python/hatchet_sdk/clients/rest/models/v1_update_webhook_request.py
+++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_update_webhook_request.py
@@ -27,7 +27,8 @@ class V1UpdateWebhookRequest(BaseModel):
V1UpdateWebhookRequest
""" # noqa: E501
- event_key_expression: StrictStr = Field(
+ event_key_expression: Optional[StrictStr] = Field(
+ default=None,
description="The CEL expression to use for the event key. This is used to create the event key from the webhook payload.",
alias="eventKeyExpression",
)
diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_webhook_source_name.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_webhook_source_name.py
index b94342fe9..141c004fd 100644
--- a/sdks/python/hatchet_sdk/clients/rest/models/v1_webhook_source_name.py
+++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_webhook_source_name.py
@@ -30,6 +30,7 @@ class V1WebhookSourceName(str, Enum):
STRIPE = "STRIPE"
SLACK = "SLACK"
LINEAR = "LINEAR"
+ SVIX = "SVIX"
@classmethod
def from_json(cls, json_str: str) -> Self:
diff --git a/sdks/python/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py b/sdks/python/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py
index 45857be1a..1ff659a9e 100644
--- a/sdks/python/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py
+++ b/sdks/python/hatchet_sdk/clients/rest/models/worker_runtime_sdks.py
@@ -28,6 +28,7 @@ class WorkerRuntimeSDKs(str, Enum):
GOLANG = "GOLANG"
PYTHON = "PYTHON"
TYPESCRIPT = "TYPESCRIPT"
+ RUBY = "RUBY"
@classmethod
def from_json(cls, json_str: str) -> Self:
diff --git a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py
index 4dbd5d84e..07e478445 100644
--- a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py
+++ b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.py
@@ -25,7 +25,7 @@ _sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
-DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"Z\n\x0cWorkerLabels\x12\x16\n\tstr_value\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tint_value\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0c\n\n_str_valueB\x0c\n\n_int_value\"\xcc\x01\n\x0bRuntimeInfo\x12\x18\n\x0bsdk_version\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x08language\x18\x02 \x01(\x0e\x32\x05.SDKSH\x01\x88\x01\x01\x12\x1d\n\x10language_version\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02os\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x12\n\x05\x65xtra\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_sdk_versionB\x0b\n\t_languageB\x13\n\x11_language_versionB\x05\n\x03_osB\x08\n\x06_extra\"\xc1\x02\n\x15WorkerRegisterRequest\x12\x13\n\x0bworker_name\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x12\n\x05slots\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x17\n\nwebhook_id\x18\x06 \x01(\tH\x01\x88\x01\x01\x12\'\n\x0cruntime_info\x18\x07 \x01(\x0b\x32\x0c.RuntimeInfoH\x02\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\x08\n\x06_slotsB\r\n\x0b_webhook_idB\x0f\n\r_runtime_info\"S\n\x16WorkerRegisterResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\x12\x13\n\x0bworker_name\x18\x03 \x01(\t\"\xa4\x01\n\x19UpsertWorkerLabelsRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"B\n\x1aUpsertWorkerLabelsResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\x98\x05\n\x0e\x41ssignedAction\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x04 \x01(\t\x12\x10\n\x08job_name\x18\x05 \x01(\t\x12\x12\n\njob_run_id\x18\x06 \x01(\t\x12\x0f\n\x07task_id\x18\x07 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x08 \x01(\t\x12\x11\n\taction_id\x18\t \x01(\t\x12 \n\x0b\x61\x63tion_type\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x16\n\x0e\x61\x63tion_payload\x18\x0b \x01(\t\x12\x11\n\ttask_name\x18\x0c \x01(\t\x12\x13\n\x0bretry_count\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x12\x10\n\x08priority\x18\x12 \x01(\x05\x12\x18\n\x0bworkflow_id\x18\x13 \x01(\tH\x04\x88\x01\x01\x12 \n\x13workflow_version_id\x18\x14 \x01(\tH\x05\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_idB\x0e\n\x0c_workflow_idB\x16\n\x14_workflow_version_id\"(\n\x13WorkerListenRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"-\n\x18WorkerUnsubscribeRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"A\n\x19WorkerUnsubscribeResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xec\x01\n\x13GroupKeyActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x11\n\taction_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\nevent_type\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x15\n\revent_payload\x18\x07 \x01(\t\"\xde\x02\n\x0fStepActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x0f\n\x07task_id\x18\x04 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x05 \x01(\t\x12\x11\n\taction_id\x18\x06 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\nevent_type\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x15\n\revent_payload\x18\t \x01(\t\x12\x18\n\x0bretry_count\x18\n \x01(\x05H\x00\x88\x01\x01\x12\x1d\n\x10should_not_retry\x18\x0b \x01(\x08H\x01\x88\x01\x01\x42\x0e\n\x0c_retry_countB\x13\n\x11_should_not_retry\";\n\x13\x41\x63tionEventResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xcc\x01\n SubscribeToWorkflowEventsRequest\x12\x1c\n\x0fworkflow_run_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_meta_key\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\"\n\x15\x61\x64\x64itional_meta_value\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x12\n\x10_workflow_run_idB\x16\n\x14_additional_meta_keyB\x18\n\x16_additional_meta_value\"9\n\x1eSubscribeToWorkflowRunsRequest\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"\xe7\x02\n\rWorkflowEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12$\n\rresource_type\x18\x02 \x01(\x0e\x32\r.ResourceType\x12&\n\nevent_type\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x13\n\x0bresource_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x15\n\revent_payload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x19\n\x0ctask_retries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x18\n\x0bretry_count\x18\t \x01(\x05H\x01\x88\x01\x01\x12\x18\n\x0b\x65vent_index\x18\n \x01(\x03H\x02\x88\x01\x01\x42\x0f\n\r_task_retriesB\x0e\n\x0c_retry_countB\x0e\n\x0c_event_index\"\xac\x01\n\x10WorkflowRunEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12)\n\nevent_type\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x33\n\x0f\x65vent_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x92\x01\n\rStepRunResult\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x11\n\ttask_name\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"c\n\rOverridesData\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x17\n\x0f\x63\x61ller_filename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"W\n\x10HeartbeatRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x30\n\x0cheartbeat_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"S\n\x15RefreshTimeoutRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x1c\n\x14increment_timeout_by\x18\x02 \x01(\t\"H\n\x16RefreshTimeoutResponse\x12.\n\ntimeout_at\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"2\n\x12ReleaseSlotRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*7\n\x04SDKS\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02GO\x10\x01\x12\n\n\x06PYTHON\x10\x02\x12\x0e\n\nTYPESCRIPT\x10\x03*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3')
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x64ispatcher.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"Z\n\x0cWorkerLabels\x12\x16\n\tstr_value\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x16\n\tint_value\x18\x02 \x01(\x05H\x01\x88\x01\x01\x42\x0c\n\n_str_valueB\x0c\n\n_int_value\"\xcc\x01\n\x0bRuntimeInfo\x12\x18\n\x0bsdk_version\x18\x01 \x01(\tH\x00\x88\x01\x01\x12\x1c\n\x08language\x18\x02 \x01(\x0e\x32\x05.SDKSH\x01\x88\x01\x01\x12\x1d\n\x10language_version\x18\x03 \x01(\tH\x02\x88\x01\x01\x12\x0f\n\x02os\x18\x04 \x01(\tH\x03\x88\x01\x01\x12\x12\n\x05\x65xtra\x18\x05 \x01(\tH\x04\x88\x01\x01\x42\x0e\n\x0c_sdk_versionB\x0b\n\t_languageB\x13\n\x11_language_versionB\x05\n\x03_osB\x08\n\x06_extra\"\xc1\x02\n\x15WorkerRegisterRequest\x12\x13\n\x0bworker_name\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x63tions\x18\x02 \x03(\t\x12\x10\n\x08services\x18\x03 \x03(\t\x12\x12\n\x05slots\x18\x04 \x01(\x05H\x00\x88\x01\x01\x12\x32\n\x06labels\x18\x05 \x03(\x0b\x32\".WorkerRegisterRequest.LabelsEntry\x12\x17\n\nwebhook_id\x18\x06 \x01(\tH\x01\x88\x01\x01\x12\'\n\x0cruntime_info\x18\x07 \x01(\x0b\x32\x0c.RuntimeInfoH\x02\x88\x01\x01\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\x42\x08\n\x06_slotsB\r\n\x0b_webhook_idB\x0f\n\r_runtime_info\"S\n\x16WorkerRegisterResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\x12\x13\n\x0bworker_name\x18\x03 \x01(\t\"\xa4\x01\n\x19UpsertWorkerLabelsRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x36\n\x06labels\x18\x02 \x03(\x0b\x32&.UpsertWorkerLabelsRequest.LabelsEntry\x1a<\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1c\n\x05value\x18\x02 \x01(\x0b\x32\r.WorkerLabels:\x02\x38\x01\"B\n\x1aUpsertWorkerLabelsResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\x98\x05\n\x0e\x41ssignedAction\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x0e\n\x06job_id\x18\x04 \x01(\t\x12\x10\n\x08job_name\x18\x05 \x01(\t\x12\x12\n\njob_run_id\x18\x06 \x01(\t\x12\x0f\n\x07task_id\x18\x07 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x08 \x01(\t\x12\x11\n\taction_id\x18\t \x01(\t\x12 \n\x0b\x61\x63tion_type\x18\n \x01(\x0e\x32\x0b.ActionType\x12\x16\n\x0e\x61\x63tion_payload\x18\x0b \x01(\t\x12\x11\n\ttask_name\x18\x0c \x01(\t\x12\x13\n\x0bretry_count\x18\r \x01(\x05\x12 \n\x13\x61\x64\x64itional_metadata\x18\x0e \x01(\tH\x00\x88\x01\x01\x12!\n\x14\x63hild_workflow_index\x18\x0f \x01(\x05H\x01\x88\x01\x01\x12\x1f\n\x12\x63hild_workflow_key\x18\x10 \x01(\tH\x02\x88\x01\x01\x12#\n\x16parent_workflow_run_id\x18\x11 \x01(\tH\x03\x88\x01\x01\x12\x10\n\x08priority\x18\x12 \x01(\x05\x12\x18\n\x0bworkflow_id\x18\x13 \x01(\tH\x04\x88\x01\x01\x12 \n\x13workflow_version_id\x18\x14 \x01(\tH\x05\x88\x01\x01\x42\x16\n\x14_additional_metadataB\x17\n\x15_child_workflow_indexB\x15\n\x13_child_workflow_keyB\x19\n\x17_parent_workflow_run_idB\x0e\n\x0c_workflow_idB\x16\n\x14_workflow_version_id\"(\n\x13WorkerListenRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"-\n\x18WorkerUnsubscribeRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\"A\n\x19WorkerUnsubscribeResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xec\x01\n\x13GroupKeyActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x17\n\x0fworkflow_run_id\x18\x02 \x01(\t\x12\x1c\n\x14get_group_key_run_id\x18\x03 \x01(\t\x12\x11\n\taction_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\nevent_type\x18\x06 \x01(\x0e\x32\x18.GroupKeyActionEventType\x12\x15\n\revent_payload\x18\x07 \x01(\t\"\xde\x02\n\x0fStepActionEvent\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x0e\n\x06job_id\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x0f\n\x07task_id\x18\x04 \x01(\t\x12\x1c\n\x14task_run_external_id\x18\x05 \x01(\t\x12\x11\n\taction_id\x18\x06 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12(\n\nevent_type\x18\x08 \x01(\x0e\x32\x14.StepActionEventType\x12\x15\n\revent_payload\x18\t \x01(\t\x12\x18\n\x0bretry_count\x18\n \x01(\x05H\x00\x88\x01\x01\x12\x1d\n\x10should_not_retry\x18\x0b \x01(\x08H\x01\x88\x01\x01\x42\x0e\n\x0c_retry_countB\x13\n\x11_should_not_retry\";\n\x13\x41\x63tionEventResponse\x12\x11\n\ttenant_id\x18\x01 \x01(\t\x12\x11\n\tworker_id\x18\x02 \x01(\t\"\xcc\x01\n SubscribeToWorkflowEventsRequest\x12\x1c\n\x0fworkflow_run_id\x18\x01 \x01(\tH\x00\x88\x01\x01\x12 \n\x13\x61\x64\x64itional_meta_key\x18\x02 \x01(\tH\x01\x88\x01\x01\x12\"\n\x15\x61\x64\x64itional_meta_value\x18\x03 \x01(\tH\x02\x88\x01\x01\x42\x12\n\x10_workflow_run_idB\x16\n\x14_additional_meta_keyB\x18\n\x16_additional_meta_value\"9\n\x1eSubscribeToWorkflowRunsRequest\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\"\xe7\x02\n\rWorkflowEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12$\n\rresource_type\x18\x02 \x01(\x0e\x32\r.ResourceType\x12&\n\nevent_type\x18\x03 \x01(\x0e\x32\x12.ResourceEventType\x12\x13\n\x0bresource_id\x18\x04 \x01(\t\x12\x33\n\x0f\x65vent_timestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x15\n\revent_payload\x18\x06 \x01(\t\x12\x0e\n\x06hangup\x18\x07 \x01(\x08\x12\x19\n\x0ctask_retries\x18\x08 \x01(\x05H\x00\x88\x01\x01\x12\x18\n\x0bretry_count\x18\t \x01(\x05H\x01\x88\x01\x01\x12\x18\n\x0b\x65vent_index\x18\n \x01(\x03H\x02\x88\x01\x01\x42\x0f\n\r_task_retriesB\x0e\n\x0c_retry_countB\x0e\n\x0c_event_index\"\xac\x01\n\x10WorkflowRunEvent\x12\x17\n\x0fworkflow_run_id\x18\x01 \x01(\t\x12)\n\nevent_type\x18\x02 \x01(\x0e\x32\x15.WorkflowRunEventType\x12\x33\n\x0f\x65vent_timestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x07results\x18\x04 \x03(\x0b\x32\x0e.StepRunResult\"\x92\x01\n\rStepRunResult\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x11\n\ttask_name\x18\x02 \x01(\t\x12\x12\n\njob_run_id\x18\x03 \x01(\t\x12\x12\n\x05\x65rror\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x13\n\x06output\x18\x05 \x01(\tH\x01\x88\x01\x01\x42\x08\n\x06_errorB\t\n\x07_output\"c\n\rOverridesData\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\x12\x17\n\x0f\x63\x61ller_filename\x18\x04 \x01(\t\"\x17\n\x15OverridesDataResponse\"W\n\x10HeartbeatRequest\x12\x11\n\tworker_id\x18\x01 \x01(\t\x12\x30\n\x0cheartbeat_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x13\n\x11HeartbeatResponse\"S\n\x15RefreshTimeoutRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\x12\x1c\n\x14increment_timeout_by\x18\x02 \x01(\t\"H\n\x16RefreshTimeoutResponse\x12.\n\ntimeout_at\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"2\n\x12ReleaseSlotRequest\x12\x1c\n\x14task_run_external_id\x18\x01 \x01(\t\"\x15\n\x13ReleaseSlotResponse*A\n\x04SDKS\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02GO\x10\x01\x12\n\n\x06PYTHON\x10\x02\x12\x0e\n\nTYPESCRIPT\x10\x03\x12\x08\n\x04RUBY\x10\x04*N\n\nActionType\x12\x12\n\x0eSTART_STEP_RUN\x10\x00\x12\x13\n\x0f\x43\x41NCEL_STEP_RUN\x10\x01\x12\x17\n\x13START_GET_GROUP_KEY\x10\x02*\xa2\x01\n\x17GroupKeyActionEventType\x12 \n\x1cGROUP_KEY_EVENT_TYPE_UNKNOWN\x10\x00\x12 \n\x1cGROUP_KEY_EVENT_TYPE_STARTED\x10\x01\x12\"\n\x1eGROUP_KEY_EVENT_TYPE_COMPLETED\x10\x02\x12\x1f\n\x1bGROUP_KEY_EVENT_TYPE_FAILED\x10\x03*\xac\x01\n\x13StepActionEventType\x12\x1b\n\x17STEP_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1b\n\x17STEP_EVENT_TYPE_STARTED\x10\x01\x12\x1d\n\x19STEP_EVENT_TYPE_COMPLETED\x10\x02\x12\x1a\n\x16STEP_EVENT_TYPE_FAILED\x10\x03\x12 \n\x1cSTEP_EVENT_TYPE_ACKNOWLEDGED\x10\x04*e\n\x0cResourceType\x12\x19\n\x15RESOURCE_TYPE_UNKNOWN\x10\x00\x12\x1a\n\x16RESOURCE_TYPE_STEP_RUN\x10\x01\x12\x1e\n\x1aRESOURCE_TYPE_WORKFLOW_RUN\x10\x02*\xfe\x01\n\x11ResourceEventType\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_UNKNOWN\x10\x00\x12\x1f\n\x1bRESOURCE_EVENT_TYPE_STARTED\x10\x01\x12!\n\x1dRESOURCE_EVENT_TYPE_COMPLETED\x10\x02\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_FAILED\x10\x03\x12!\n\x1dRESOURCE_EVENT_TYPE_CANCELLED\x10\x04\x12!\n\x1dRESOURCE_EVENT_TYPE_TIMED_OUT\x10\x05\x12\x1e\n\x1aRESOURCE_EVENT_TYPE_STREAM\x10\x06*<\n\x14WorkflowRunEventType\x12$\n WORKFLOW_RUN_EVENT_TYPE_FINISHED\x10\x00\x32\xf8\x06\n\nDispatcher\x12=\n\x08Register\x12\x16.WorkerRegisterRequest\x1a\x17.WorkerRegisterResponse\"\x00\x12\x33\n\x06Listen\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x35\n\x08ListenV2\x12\x14.WorkerListenRequest\x1a\x0f.AssignedAction\"\x00\x30\x01\x12\x34\n\tHeartbeat\x12\x11.HeartbeatRequest\x1a\x12.HeartbeatResponse\"\x00\x12R\n\x19SubscribeToWorkflowEvents\x12!.SubscribeToWorkflowEventsRequest\x1a\x0e.WorkflowEvent\"\x00\x30\x01\x12S\n\x17SubscribeToWorkflowRuns\x12\x1f.SubscribeToWorkflowRunsRequest\x1a\x11.WorkflowRunEvent\"\x00(\x01\x30\x01\x12?\n\x13SendStepActionEvent\x12\x10.StepActionEvent\x1a\x14.ActionEventResponse\"\x00\x12G\n\x17SendGroupKeyActionEvent\x12\x14.GroupKeyActionEvent\x1a\x14.ActionEventResponse\"\x00\x12<\n\x10PutOverridesData\x12\x0e.OverridesData\x1a\x16.OverridesDataResponse\"\x00\x12\x46\n\x0bUnsubscribe\x12\x19.WorkerUnsubscribeRequest\x1a\x1a.WorkerUnsubscribeResponse\"\x00\x12\x43\n\x0eRefreshTimeout\x12\x16.RefreshTimeoutRequest\x1a\x17.RefreshTimeoutResponse\"\x00\x12:\n\x0bReleaseSlot\x12\x13.ReleaseSlotRequest\x1a\x14.ReleaseSlotResponse\"\x00\x12O\n\x12UpsertWorkerLabels\x12\x1a.UpsertWorkerLabelsRequest\x1a\x1b.UpsertWorkerLabelsResponse\"\x00\x42GZEgithub.com/hatchet-dev/hatchet/internal/services/dispatcher/contractsb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
@@ -38,19 +38,19 @@ if not _descriptor._USE_C_DESCRIPTORS:
_globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._loaded_options = None
_globals['_UPSERTWORKERLABELSREQUEST_LABELSENTRY']._serialized_options = b'8\001'
_globals['_SDKS']._serialized_start=3894
- _globals['_SDKS']._serialized_end=3949
- _globals['_ACTIONTYPE']._serialized_start=3951
- _globals['_ACTIONTYPE']._serialized_end=4029
- _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=4032
- _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=4194
- _globals['_STEPACTIONEVENTTYPE']._serialized_start=4197
- _globals['_STEPACTIONEVENTTYPE']._serialized_end=4369
- _globals['_RESOURCETYPE']._serialized_start=4371
- _globals['_RESOURCETYPE']._serialized_end=4472
- _globals['_RESOURCEEVENTTYPE']._serialized_start=4475
- _globals['_RESOURCEEVENTTYPE']._serialized_end=4729
- _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4731
- _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4791
+ _globals['_SDKS']._serialized_end=3959
+ _globals['_ACTIONTYPE']._serialized_start=3961
+ _globals['_ACTIONTYPE']._serialized_end=4039
+ _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_start=4042
+ _globals['_GROUPKEYACTIONEVENTTYPE']._serialized_end=4204
+ _globals['_STEPACTIONEVENTTYPE']._serialized_start=4207
+ _globals['_STEPACTIONEVENTTYPE']._serialized_end=4379
+ _globals['_RESOURCETYPE']._serialized_start=4381
+ _globals['_RESOURCETYPE']._serialized_end=4482
+ _globals['_RESOURCEEVENTTYPE']._serialized_start=4485
+ _globals['_RESOURCEEVENTTYPE']._serialized_end=4739
+ _globals['_WORKFLOWRUNEVENTTYPE']._serialized_start=4741
+ _globals['_WORKFLOWRUNEVENTTYPE']._serialized_end=4801
_globals['_WORKERLABELS']._serialized_start=53
_globals['_WORKERLABELS']._serialized_end=143
_globals['_RUNTIMEINFO']._serialized_start=146
@@ -107,6 +107,6 @@ if not _descriptor._USE_C_DESCRIPTORS:
_globals['_RELEASESLOTREQUEST']._serialized_end=3869
_globals['_RELEASESLOTRESPONSE']._serialized_start=3871
_globals['_RELEASESLOTRESPONSE']._serialized_end=3892
- _globals['_DISPATCHER']._serialized_start=4794
- _globals['_DISPATCHER']._serialized_end=5682
+ _globals['_DISPATCHER']._serialized_start=4804
+ _globals['_DISPATCHER']._serialized_end=5692
# @@protoc_insertion_point(module_scope)
diff --git a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi
index 1e872592d..74d27a165 100644
--- a/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi
+++ b/sdks/python/hatchet_sdk/contracts/dispatcher_pb2.pyi
@@ -16,6 +16,7 @@ class SDKS(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
GO: _ClassVar[SDKS]
PYTHON: _ClassVar[SDKS]
TYPESCRIPT: _ClassVar[SDKS]
+ RUBY: _ClassVar[SDKS]
class ActionType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper):
__slots__ = ()
@@ -61,6 +62,7 @@ UNKNOWN: SDKS
GO: SDKS
PYTHON: SDKS
TYPESCRIPT: SDKS
+RUBY: SDKS
START_STEP_RUN: ActionType
CANCEL_STEP_RUN: ActionType
START_GET_GROUP_KEY: ActionType
diff --git a/sdks/python/hatchet_sdk/exceptions.py b/sdks/python/hatchet_sdk/exceptions.py
index b7898e6bd..3ecc0c3e6 100644
--- a/sdks/python/hatchet_sdk/exceptions.py
+++ b/sdks/python/hatchet_sdk/exceptions.py
@@ -38,27 +38,26 @@ class TaskRunError(Exception):
return str(self)
def serialize(self, include_metadata: bool) -> str:
- if not self.exc_type or not self.exc:
- return ""
-
- metadata = json.dumps(
- {
- TASK_RUN_ERROR_METADATA_KEY: {
- "task_run_external_id": self.task_run_external_id,
- }
- },
- indent=None,
- )
-
+ exc_type = self.exc_type.replace(": ", ":::")
+ exc = self.exc.replace("\n", "\\\n")
+ header = f"{exc_type}: {exc}" if exc_type and exc else f"{exc_type}{exc}"
result = (
- self.exc_type.replace(": ", ":::")
- + ": "
- + self.exc.replace("\n", "\\\n")
- + "\n"
- + self.trace
+ f"{header}\n{self.trace}"
+ if header and self.trace
+ else f"{header}{self.trace}"
)
+ if result == "":
+ return result
if include_metadata:
+ metadata = json.dumps(
+ {
+ TASK_RUN_ERROR_METADATA_KEY: {
+ "task_run_external_id": self.task_run_external_id,
+ }
+ },
+ indent=None,
+ )
return result + "\n\n" + metadata
return result
diff --git a/sdks/python/hatchet_sdk/features/webhooks.py b/sdks/python/hatchet_sdk/features/webhooks.py
new file mode 100644
index 000000000..baf93e35a
--- /dev/null
+++ b/sdks/python/hatchet_sdk/features/webhooks.py
@@ -0,0 +1,202 @@
+from __future__ import annotations
+
+import asyncio
+from typing import Any
+
+from hatchet_sdk.clients.rest.api.webhook_api import WebhookApi
+from hatchet_sdk.clients.rest.api_client import ApiClient
+from hatchet_sdk.clients.rest.models.v1_create_webhook_request import (
+ V1CreateWebhookRequest,
+)
+from hatchet_sdk.clients.rest.models.v1_create_webhook_request_api_key import (
+ V1CreateWebhookRequestAPIKey,
+)
+from hatchet_sdk.clients.rest.models.v1_create_webhook_request_base import (
+ V1CreateWebhookRequestBase,
+)
+from hatchet_sdk.clients.rest.models.v1_create_webhook_request_basic_auth import (
+ V1CreateWebhookRequestBasicAuth,
+)
+from hatchet_sdk.clients.rest.models.v1_create_webhook_request_hmac import (
+ V1CreateWebhookRequestHMAC,
+)
+from hatchet_sdk.clients.rest.models.v1_update_webhook_request import (
+ V1UpdateWebhookRequest,
+)
+from hatchet_sdk.clients.rest.models.v1_webhook import V1Webhook
+from hatchet_sdk.clients.rest.models.v1_webhook_api_key_auth import V1WebhookAPIKeyAuth
+from hatchet_sdk.clients.rest.models.v1_webhook_auth_type import V1WebhookAuthType
+from hatchet_sdk.clients.rest.models.v1_webhook_basic_auth import V1WebhookBasicAuth
+from hatchet_sdk.clients.rest.models.v1_webhook_hmac_auth import V1WebhookHMACAuth
+from hatchet_sdk.clients.rest.models.v1_webhook_list import V1WebhookList
+from hatchet_sdk.clients.rest.models.v1_webhook_source_name import V1WebhookSourceName
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry
+from hatchet_sdk.clients.v1.api_client import BaseRestClient
+
+
+class CreateWebhookRequest(V1CreateWebhookRequestBase):
+ auth: V1WebhookBasicAuth | V1WebhookAPIKeyAuth | V1WebhookHMACAuth
+
+ def _to_api_payload(self) -> V1CreateWebhookRequest:
+ payload = self.model_dump(by_alias=True, exclude_none=True)
+ payload["auth"] = self.auth.model_dump(by_alias=True)
+ request_payload: (
+ V1CreateWebhookRequestBasicAuth
+ | V1CreateWebhookRequestAPIKey
+ | V1CreateWebhookRequestHMAC
+ | None
+ ) = None
+ if isinstance(self.auth, V1WebhookBasicAuth):
+ payload["authType"] = V1WebhookAuthType.BASIC
+ request_payload = V1CreateWebhookRequestBasicAuth.from_dict(payload)
+ elif isinstance(self.auth, V1WebhookAPIKeyAuth):
+ payload["authType"] = V1WebhookAuthType.API_KEY
+ request_payload = V1CreateWebhookRequestAPIKey.from_dict(payload)
+ else:
+ payload["authType"] = V1WebhookAuthType.HMAC
+ request_payload = V1CreateWebhookRequestHMAC.from_dict(payload)
+ if request_payload is None:
+ raise ValueError("failed to build create webhook request from payload")
+ return V1CreateWebhookRequest(request_payload)
+
+
+class WebhooksClient(BaseRestClient):
+ """
+ The webhooks client provides methods for managing incoming webhooks in Hatchet.
+
+ Webhooks allow external systems to trigger Hatchet workflows by sending HTTP
+ requests to dedicated endpoints. This enables real-time integration with
+ third-party services like GitHub, Stripe, Slack, or any system that can send
+ webhook events.
+ """
+
+ def _wa(self, client: ApiClient) -> WebhookApi:
+ return WebhookApi(client)
+
+ async def aio_list(
+ self,
+ limit: int | None = None,
+ offset: int | None = None,
+ webhook_names: list[str] | None = None,
+ source_names: list[V1WebhookSourceName] | None = None,
+ ) -> V1WebhookList:
+ return await asyncio.to_thread(
+ self.list, limit, offset, webhook_names, source_names
+ )
+
+ def list(
+ self,
+ limit: int | None = None,
+ offset: int | None = None,
+ webhook_names: list[str] | None = None,
+ source_names: list[V1WebhookSourceName] | None = None,
+ ) -> V1WebhookList:
+ with self.client() as client:
+ v1_webhook_list = tenacity_retry(
+ self._wa(client).v1_webhook_list, self.client_config.tenacity
+ )
+ return v1_webhook_list(
+ tenant=self.tenant_id,
+ limit=limit,
+ offset=offset,
+ webhook_names=webhook_names,
+ source_names=source_names,
+ )
+
+ def get(self, webhook_name: str) -> V1Webhook:
+ with self.client() as client:
+ v1_webhook_get = tenacity_retry(
+ self._wa(client).v1_webhook_get, self.client_config.tenacity
+ )
+ return v1_webhook_get(
+ tenant=self.tenant_id,
+ v1_webhook=webhook_name,
+ )
+
+ async def aio_get(self, webhook_name: str) -> V1Webhook:
+ return await asyncio.to_thread(self.get, webhook_name)
+
+ def create(
+ self,
+ source_name: V1WebhookSourceName,
+ name: str,
+ event_key_expression: str,
+ auth: V1WebhookBasicAuth | V1WebhookAPIKeyAuth | V1WebhookHMACAuth,
+ scope_expression: str | None = None,
+ static_payload: dict[str, Any] | None = None,
+ ) -> V1Webhook:
+ validated_payload = CreateWebhookRequest(
+ sourceName=source_name,
+ name=name,
+ eventKeyExpression=event_key_expression,
+ scopeExpression=scope_expression,
+ staticPayload=static_payload,
+ auth=auth,
+ )
+ with self.client() as client:
+ return self._wa(client).v1_webhook_create(
+ tenant=self.tenant_id,
+ v1_create_webhook_request=validated_payload._to_api_payload(),
+ )
+
+ async def aio_create(
+ self,
+ source_name: V1WebhookSourceName,
+ name: str,
+ event_key_expression: str,
+ auth: V1WebhookBasicAuth | V1WebhookAPIKeyAuth | V1WebhookHMACAuth,
+ scope_expression: str | None = None,
+ static_payload: dict[str, Any] | None = None,
+ ) -> V1Webhook:
+ return await asyncio.to_thread(
+ self.create,
+ source_name,
+ name,
+ event_key_expression,
+ auth,
+ scope_expression,
+ static_payload,
+ )
+
+ def update(
+ self,
+ webhook_name: str,
+ event_key_expression: str | None = None,
+ scope_expression: str | None = None,
+ static_payload: dict[str, Any] | None = None,
+ ) -> V1Webhook:
+ with self.client() as client:
+ return self._wa(client).v1_webhook_update(
+ tenant=self.tenant_id,
+ v1_webhook=webhook_name,
+ v1_update_webhook_request=V1UpdateWebhookRequest(
+ eventKeyExpression=event_key_expression,
+ scopeExpression=scope_expression,
+ staticPayload=static_payload,
+ ),
+ )
+
+ async def aio_update(
+ self,
+ webhook_name: str,
+ event_key_expression: str | None = None,
+ scope_expression: str | None = None,
+ static_payload: dict[str, Any] | None = None,
+ ) -> V1Webhook:
+ return await asyncio.to_thread(
+ self.update,
+ webhook_name,
+ event_key_expression,
+ scope_expression,
+ static_payload,
+ )
+
+ def delete(self, webhook_name: str) -> V1Webhook:
+ with self.client() as client:
+ return self._wa(client).v1_webhook_delete(
+ tenant=self.tenant_id,
+ v1_webhook=webhook_name,
+ )
+
+ async def aio_delete(self, webhook_name: str) -> V1Webhook:
+ return await asyncio.to_thread(self.delete, webhook_name)
diff --git a/sdks/python/hatchet_sdk/hatchet.py b/sdks/python/hatchet_sdk/hatchet.py
index 65c24db68..1d76fac4c 100644
--- a/sdks/python/hatchet_sdk/hatchet.py
+++ b/sdks/python/hatchet_sdk/hatchet.py
@@ -21,6 +21,7 @@ from hatchet_sdk.features.rate_limits import RateLimitsClient
from hatchet_sdk.features.runs import RunsClient
from hatchet_sdk.features.scheduled import ScheduledClient
from hatchet_sdk.features.stubs import StubsClient
+from hatchet_sdk.features.webhooks import WebhooksClient
from hatchet_sdk.features.workers import WorkersClient
from hatchet_sdk.features.workflows import WorkflowsClient
from hatchet_sdk.labels import DesiredWorkerLabel
@@ -122,6 +123,13 @@ class Hatchet:
"""
return self._client.scheduled
+ @property
+ def webhooks(self) -> WebhooksClient:
+ """
+ The webhooks client provides methods for managing webhook endpoints in Hatchet.
+ """
+ return self._client.webhooks
+
@property
def workers(self) -> WorkersClient:
"""
diff --git a/sdks/python/hatchet_sdk/worker/worker.py b/sdks/python/hatchet_sdk/worker/worker.py
index a207b2917..331af7504 100644
--- a/sdks/python/hatchet_sdk/worker/worker.py
+++ b/sdks/python/hatchet_sdk/worker/worker.py
@@ -129,9 +129,8 @@ class Worker:
def register_workflow(self, workflow: BaseWorkflow[Any]) -> None:
if not workflow.tasks:
- raise ValueError(
- "workflow must have at least one task registered before registering"
- )
+ msg = f"failed to register workflow: {workflow.name}. Workflows must have at least one task registered before registering"
+ raise ValueError(msg)
try:
self.client.admin.put_workflow(workflow.to_proto())
diff --git a/sdks/python/pyproject.toml b/sdks/python/pyproject.toml
index 8585ce7b0..89c49d4ef 100644
--- a/sdks/python/pyproject.toml
+++ b/sdks/python/pyproject.toml
@@ -1,6 +1,6 @@
[tool.poetry]
name = "hatchet-sdk"
-version = "1.23.3"
+version = "1.24.0"
description = "This is the official Python SDK for Hatchet, a distributed, fault-tolerant task queue. The SDK allows you to easily integrate Hatchet's task scheduling and workflow orchestration capabilities into your Python applications."
authors = [
"Alexander Belanger ",
diff --git a/sdks/python/tests/test_tenacity_utils.py b/sdks/python/tests/test_tenacity_utils.py
new file mode 100644
index 000000000..4b741c031
--- /dev/null
+++ b/sdks/python/tests/test_tenacity_utils.py
@@ -0,0 +1,221 @@
+"""Unit tests for the tenacity retry predicate.
+
+These tests verify which exceptions trigger retries and which do not.
+The retry predicate is used by the SDK to determine whether to retry
+failed API calls.
+
+Current retry behavior (as of this PR):
+- REST: ServiceException (5xx) and NotFoundException (404) are retried
+- REST: Transport errors (RestTimeoutError, etc.) are not retried
+- REST: Other 4xx errors are not retried
+- gRPC: Most errors are retried except specific status codes
+"""
+
+import grpc
+import pytest
+
+from hatchet_sdk.clients.rest.exceptions import (
+ BadRequestException,
+ ForbiddenException,
+ NotFoundException,
+ RestConnectionError,
+ RestProtocolError,
+ RestTimeoutError,
+ RestTLSError,
+ RestTransportError,
+ ServiceException,
+ UnauthorizedException,
+)
+from hatchet_sdk.clients.rest.tenacity_utils import tenacity_should_retry
+
+# --- REST exception retry predicate tests ---
+
+
+@pytest.mark.parametrize(
+ ("exc", "expected"),
+ [
+ pytest.param(
+ ServiceException(status=500, reason="Internal Server Error"),
+ True,
+ id="ServiceException (HTTP 5xx) should be retried",
+ ),
+ pytest.param(
+ NotFoundException(status=404, reason="Not Found"),
+ True,
+ id="NotFoundException (HTTP 404) should be retried",
+ ),
+ pytest.param(
+ BadRequestException(status=400, reason="Bad Request"),
+ False,
+ id="BadRequestException (HTTP 400) should not be retried",
+ ),
+ pytest.param(
+ UnauthorizedException(status=401, reason="Unauthorized"),
+ False,
+ id="UnauthorizedException (HTTP 401) should not be retried",
+ ),
+ pytest.param(
+ ForbiddenException(status=403, reason="Forbidden"),
+ False,
+ id="ForbiddenException (HTTP 403) should not be retried",
+ ),
+ ],
+)
+def test_rest__exception_retry_behavior(exc: BaseException, expected: bool) -> None:
+ """Test that REST exceptions have the expected retry behavior."""
+ assert tenacity_should_retry(exc) is expected
+
+
+# --- REST transport error retry predicate tests ---
+
+
+@pytest.mark.parametrize(
+ ("exc", "expected"),
+ [
+ pytest.param(
+ RestTransportError(status=0, reason="Transport error"),
+ False,
+ id="RestTransportError (base class) should not be retried",
+ ),
+ pytest.param(
+ RestTimeoutError(status=0, reason="Connection timed out"),
+ False,
+ id="RestTimeoutError should not be retried",
+ ),
+ pytest.param(
+ RestConnectionError(status=0, reason="Connection refused"),
+ False,
+ id="RestConnectionError should not be retried",
+ ),
+ pytest.param(
+ RestTLSError(status=0, reason="SSL certificate verify failed"),
+ False,
+ id="RestTLSError should not be retried",
+ ),
+ pytest.param(
+ RestProtocolError(status=0, reason="Connection aborted"),
+ False,
+ id="RestProtocolError should not be retried",
+ ),
+ ],
+)
+def test_transport__error_retry_behavior(exc: BaseException, expected: bool) -> None:
+ """Test that REST transport errors have the expected retry behavior."""
+ assert tenacity_should_retry(exc) is expected
+
+
+# --- Generic exception retry predicate tests ---
+
+
+@pytest.mark.parametrize(
+ ("exc", "expected"),
+ [
+ pytest.param(
+ RuntimeError("Something went wrong"),
+ False,
+ id="RuntimeError should not be retried",
+ ),
+ pytest.param(
+ ValueError("Invalid value"),
+ False,
+ id="ValueError should not be retried",
+ ),
+ pytest.param(
+ Exception("Generic error"),
+ False,
+ id="Generic Exception should not be retried",
+ ),
+ ],
+)
+def test_generic__exception_retry_behavior(exc: BaseException, expected: bool) -> None:
+ """Test that generic exceptions have the expected retry behavior."""
+ assert tenacity_should_retry(exc) is expected
+
+
+# --- gRPC exception retry predicate tests ---
+
+
+class FakeRpcError(grpc.RpcError):
+ """A fake gRPC RpcError for testing without real gRPC infrastructure."""
+
+ def __init__(self, code: grpc.StatusCode) -> None:
+ self._code = code
+ super().__init__()
+
+ def code(self) -> grpc.StatusCode:
+ return self._code
+
+
+@pytest.mark.parametrize(
+ ("status_code", "expected"),
+ [
+ # Status codes that should be retried (transient/server errors)
+ pytest.param(
+ grpc.StatusCode.UNAVAILABLE,
+ True,
+ id="UNAVAILABLE should be retried (transient error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.DEADLINE_EXCEEDED,
+ True,
+ id="DEADLINE_EXCEEDED should be retried (transient error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.INTERNAL,
+ True,
+ id="INTERNAL should be retried (server error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.RESOURCE_EXHAUSTED,
+ True,
+ id="RESOURCE_EXHAUSTED should be retried",
+ ),
+ pytest.param(
+ grpc.StatusCode.ABORTED,
+ True,
+ id="ABORTED should be retried",
+ ),
+ pytest.param(
+ grpc.StatusCode.UNKNOWN,
+ True,
+ id="UNKNOWN should be retried",
+ ),
+ # Status codes that should not be retried (permanent/client errors)
+ pytest.param(
+ grpc.StatusCode.UNIMPLEMENTED,
+ False,
+ id="UNIMPLEMENTED should not be retried (permanent error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.NOT_FOUND,
+ False,
+ id="NOT_FOUND should not be retried (permanent error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.INVALID_ARGUMENT,
+ False,
+ id="INVALID_ARGUMENT should not be retried (client error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.ALREADY_EXISTS,
+ False,
+ id="ALREADY_EXISTS should not be retried (permanent error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.UNAUTHENTICATED,
+ False,
+ id="UNAUTHENTICATED should not be retried (auth error)",
+ ),
+ pytest.param(
+ grpc.StatusCode.PERMISSION_DENIED,
+ False,
+ id="PERMISSION_DENIED should not be retried (auth error)",
+ ),
+ ],
+)
+def test_grpc__status_code_retry_behavior(
+ status_code: grpc.StatusCode, expected: bool
+) -> None:
+ """Test that gRPC status codes have the expected retry behavior."""
+ exc = FakeRpcError(status_code)
+ assert tenacity_should_retry(exc) is expected
diff --git a/sdks/ruby/examples/.rspec b/sdks/ruby/examples/.rspec
new file mode 100644
index 000000000..79d564947
--- /dev/null
+++ b/sdks/ruby/examples/.rspec
@@ -0,0 +1 @@
+--pattern */*_spec.rb
diff --git a/sdks/ruby/examples/Gemfile b/sdks/ruby/examples/Gemfile
index df0605186..8f4173a76 100644
--- a/sdks/ruby/examples/Gemfile
+++ b/sdks/ruby/examples/Gemfile
@@ -1,3 +1,11 @@
-source 'https://rubygems.org'
+# frozen_string_literal: true
-gem 'hatchet-sdk', path: '../src'
+source "https://rubygems.org"
+
+gem "hatchet-sdk", path: "../src"
+
+# Testing
+gem "rspec", "~> 3.0"
+
+# HTTP for health checks in worker fixture
+gem "net-http"
diff --git a/sdks/ruby/examples/Gemfile.lock b/sdks/ruby/examples/Gemfile.lock
index bbd72999a..b52344e8d 100644
--- a/sdks/ruby/examples/Gemfile.lock
+++ b/sdks/ruby/examples/Gemfile.lock
@@ -1,15 +1,21 @@
PATH
remote: ../src
specs:
- hatchet-sdk (0.1.0.pre.alpha)
+ hatchet-sdk (0.1.0)
+ concurrent-ruby (>= 1.1)
faraday (~> 2.0)
faraday-multipart
+ google-protobuf (~> 4.0)
+ grpc (~> 1.60)
json (~> 2.0)
marcel
GEM
remote: https://rubygems.org/
specs:
+ bigdecimal (4.0.1)
+ concurrent-ruby (1.3.6)
+ diff-lcs (1.6.2)
faraday (2.13.4)
faraday-net_http (>= 2.0, < 3.5)
json
@@ -18,20 +24,57 @@ GEM
multipart-post (~> 2.0)
faraday-net_http (3.4.1)
net-http (>= 0.5.0)
+ google-protobuf (4.33.5)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.33.5-arm64-darwin)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.33.5-x86_64-linux-gnu)
+ bigdecimal
+ rake (>= 13)
+ googleapis-common-protos-types (1.22.0)
+ google-protobuf (~> 4.26)
+ grpc (1.78.0)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.78.0-arm64-darwin)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.78.0-x86_64-linux-gnu)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
json (2.13.2)
logger (1.7.0)
marcel (1.0.4)
multipart-post (2.4.1)
net-http (0.6.0)
uri
+ rake (13.3.1)
+ rspec (3.13.2)
+ rspec-core (~> 3.13.0)
+ rspec-expectations (~> 3.13.0)
+ rspec-mocks (~> 3.13.0)
+ rspec-core (3.13.6)
+ rspec-support (~> 3.13.0)
+ rspec-expectations (3.13.5)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.13.0)
+ rspec-mocks (3.13.7)
+ diff-lcs (>= 1.2.0, < 2.0)
+ rspec-support (~> 3.13.0)
+ rspec-support (3.13.7)
uri (1.1.1)
PLATFORMS
arm64-darwin-24
ruby
+ x86_64-linux
DEPENDENCIES
hatchet-sdk!
+ net-http
+ rspec (~> 3.0)
BUNDLED WITH
2.6.9
diff --git a/sdks/ruby/examples/affinity_workers/worker.rb b/sdks/ruby/examples/affinity_workers/worker.rb
new file mode 100644
index 000000000..3777ba699
--- /dev/null
+++ b/sdks/ruby/examples/affinity_workers/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > AffinityWorkflow
+AFFINITY_WORKER_WORKFLOW = HATCHET.workflow(name: "AffinityWorkflow")
+
+# !!
+
+# > AffinityTask
+AFFINITY_WORKER_WORKFLOW.task(
+ :step,
+ desired_worker_labels: {
+ "model" => Hatchet::DesiredWorkerLabel.new(value: "fancy-ai-model-v2", weight: 10),
+ "memory" => Hatchet::DesiredWorkerLabel.new(
+ value: 256,
+ required: true,
+ comparator: :less_than
+ )
+ }
+) do |input, ctx|
+ if ctx.worker.labels["model"] != "fancy-ai-model-v2"
+ ctx.worker.upsert_labels("model" => "unset")
+ # DO WORK TO EVICT OLD MODEL / LOAD NEW MODEL
+ ctx.worker.upsert_labels("model" => "fancy-ai-model-v2")
+ end
+
+ { "worker" => ctx.worker.id }
+end
+
+# !!
+
+# > AffinityWorker
+def main
+ worker = HATCHET.worker(
+ "affinity-worker",
+ slots: 10,
+ labels: {
+ "model" => "fancy-ai-model-v2",
+ "memory" => 512
+ },
+ workflows: [AFFINITY_WORKER_WORKFLOW]
+ )
+ worker.start
+end
+
+# !!
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/api/worker.rb b/sdks/ruby/examples/api/worker.rb
new file mode 100644
index 000000000..5d6336310
--- /dev/null
+++ b/sdks/ruby/examples/api/worker.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+def main
+ workflow_list = HATCHET.workflows.list
+ rows = workflow_list.rows || []
+
+ rows.each do |workflow|
+ puts workflow.name
+ puts workflow.metadata.id
+ puts workflow.metadata.created_at
+ puts workflow.metadata.updated_at
+ end
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/bulk_fanout/test_bulk_fanout_spec.rb b/sdks/ruby/examples/bulk_fanout/test_bulk_fanout_spec.rb
new file mode 100644
index 000000000..48940a306
--- /dev/null
+++ b/sdks/ruby/examples/bulk_fanout/test_bulk_fanout_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "BulkFanoutParent" do
+ it "bulk fans out to child workflows" do
+ result = BULK_PARENT_WF.run({ "n" => 10 })
+
+ results = result["spawn"]["results"]
+ expect(results.length).to eq(10)
+ end
+end
diff --git a/sdks/ruby/examples/bulk_fanout/worker.rb b/sdks/ruby/examples/bulk_fanout/worker.rb
new file mode 100644
index 000000000..c6469e089
--- /dev/null
+++ b/sdks/ruby/examples/bulk_fanout/worker.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > BulkFanoutParent
+BULK_PARENT_WF = HATCHET.workflow(name: "BulkFanoutParent")
+BULK_CHILD_WF = HATCHET.workflow(name: "BulkFanoutChild")
+
+BULK_PARENT_WF.task(:spawn, execution_timeout: 300) do |input, ctx|
+ n = input["n"] || 100
+
+ # Create each workflow run to spawn
+ child_workflow_runs = n.times.map do |i|
+ BULK_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s },
+ key: "child#{i}",
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "hello" => "earth" }
+ )
+ )
+ end
+
+ # Run workflows in bulk to improve performance
+ spawn_results = BULK_CHILD_WF.run_many(child_workflow_runs)
+
+ { "results" => spawn_results }
+end
+
+BULK_CHILD_WF.task(:process) do |input, ctx|
+ puts "child process #{input['a']}"
+ { "status" => "success #{input['a']}" }
+end
+
+BULK_CHILD_WF.task(:process2) do |input, ctx|
+ puts "child process2"
+ { "status2" => "success" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "fanout-worker", slots: 40, workflows: [BULK_PARENT_WF, BULK_CHILD_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/bulk_operations/cancel.rb b/sdks/ruby/examples/bulk_operations/cancel.rb
new file mode 100644
index 000000000..23f35fba4
--- /dev/null
+++ b/sdks/ruby/examples/bulk_operations/cancel.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+# > Setup
+hatchet = Hatchet::Client.new
+
+workflows = hatchet.workflows.list
+
+workflow = workflows.rows.first
+# !!
+
+# > List runs
+workflow_runs = hatchet.runs.list(workflow_ids: [workflow.metadata.id])
+# !!
+
+# > Cancel by run ids
+workflow_run_ids = workflow_runs.rows.map { |run| run.metadata.id }
+
+hatchet.runs.bulk_cancel(ids: workflow_run_ids)
+# !!
+
+# > Cancel by filters
+hatchet.runs.bulk_cancel(
+ since: Time.now - 86_400,
+ until_time: Time.now,
+ statuses: ["RUNNING"],
+ workflow_ids: [workflow.metadata.id],
+ additional_metadata: { "key" => "value" }
+)
+# !!
diff --git a/sdks/ruby/examples/bulk_operations/test_bulk_replay_spec.rb b/sdks/ruby/examples/bulk_operations/test_bulk_replay_spec.rb
new file mode 100644
index 000000000..f095909a2
--- /dev/null
+++ b/sdks/ruby/examples/bulk_operations/test_bulk_replay_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "BulkReplay" do
+ it "replays failed runs in bulk" do
+ test_run_id = SecureRandom.uuid
+ n = 100
+
+ # First batch -- all will fail on first attempt
+ expect do
+ BULK_REPLAY_TEST_1.run_many(
+ (n + 1).times.map do
+ BULK_REPLAY_TEST_1.create_bulk_run_item(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id }
+ )
+ )
+ end
+ )
+ end.to raise_error
+
+ expect do
+ BULK_REPLAY_TEST_2.run_many(
+ ((n / 2) - 1).times.map do
+ BULK_REPLAY_TEST_2.create_bulk_run_item(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id }
+ )
+ )
+ end
+ )
+ end.to raise_error
+
+ expect do
+ BULK_REPLAY_TEST_3.run_many(
+ ((n / 2) - 2).times.map do
+ BULK_REPLAY_TEST_3.create_bulk_run_item(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id }
+ )
+ )
+ end
+ )
+ end.to raise_error
+
+ workflow_ids = [BULK_REPLAY_TEST_1.id, BULK_REPLAY_TEST_2.id, BULK_REPLAY_TEST_3.id]
+
+ # Should result in two batches of replays
+ HATCHET.runs.bulk_replay(
+ filters: {
+ workflow_ids: workflow_ids,
+ additional_metadata: { "test_run_id" => test_run_id }
+ }
+ )
+
+ total_expected = (n + 1) + (n / 2 - 1) + (n / 2 - 2)
+
+ # Poll until all runs are completed instead of a fixed sleep
+ 30.times do
+ runs = HATCHET.runs.list(
+ workflow_ids: workflow_ids,
+ additional_metadata: { "test_run_id" => test_run_id },
+ limit: 1000
+ )
+
+ all_completed = runs.rows.length == total_expected && runs.rows.all? { |r| r.status == "COMPLETED" }
+ if all_completed
+ expect(runs.rows.length).to eq(total_expected)
+ runs.rows.each { |run| expect(run.status).to eq("COMPLETED") }
+ break
+ end
+
+ sleep 1
+ end
+ end
+end
diff --git a/sdks/ruby/examples/bulk_operations/worker.rb b/sdks/ruby/examples/bulk_operations/worker.rb
new file mode 100644
index 000000000..1459f41d6
--- /dev/null
+++ b/sdks/ruby/examples/bulk_operations/worker.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+BULK_REPLAY_TEST_1 = HATCHET.task(name: "bulk_replay_test_1") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+BULK_REPLAY_TEST_2 = HATCHET.task(name: "bulk_replay_test_2") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+BULK_REPLAY_TEST_3 = HATCHET.task(name: "bulk_replay_test_3") do |input, ctx|
+ puts "retrying bulk replay test task #{ctx.retry_count}"
+ raise "This is a test error to trigger a retry." if ctx.retry_count == 0
+end
+
+def main
+ worker = HATCHET.worker(
+ "bulk-replay-test-worker",
+ workflows: [BULK_REPLAY_TEST_1, BULK_REPLAY_TEST_2, BULK_REPLAY_TEST_3]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/cancellation/test_cancellation_spec.rb b/sdks/ruby/examples/cancellation/test_cancellation_spec.rb
new file mode 100644
index 000000000..de3bb161b
--- /dev/null
+++ b/sdks/ruby/examples/cancellation/test_cancellation_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "CancelWorkflow" do
+ it "cancels a workflow run" do
+ ref = CANCELLATION_WORKFLOW.run_no_wait
+
+ # Wait for the cancellation to happen
+ sleep 10
+
+ # Poll until the run reaches a terminal state
+ run = HATCHET.runs.poll(ref.workflow_run_id, interval: 1.0, timeout: 60)
+
+ expect(run.status).to eq("CANCELLED")
+ end
+end
diff --git a/sdks/ruby/examples/cancellation/worker.rb b/sdks/ruby/examples/cancellation/worker.rb
new file mode 100644
index 000000000..22c8a4c0c
--- /dev/null
+++ b/sdks/ruby/examples/cancellation/worker.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CANCELLATION_WORKFLOW = HATCHET.workflow(name: "CancelWorkflow")
+
+# > Self-cancelling task
+CANCELLATION_WORKFLOW.task(:self_cancel) do |input, ctx|
+ sleep 2
+
+ ## Cancel the task
+ ctx.cancel
+
+ sleep 10
+
+ { "error" => "Task should have been cancelled" }
+end
+
+# !!
+
+# > Checking exit flag
+CANCELLATION_WORKFLOW.task(:check_flag) do |input, ctx|
+ 3.times do
+ sleep 1
+
+ # Note: Checking the status of the exit flag is mostly useful for cancelling
+ # sync tasks without needing to forcibly kill the thread they're running on.
+ if ctx.cancelled?
+ puts "Task has been cancelled"
+ raise "Task has been cancelled"
+ end
+ end
+
+ { "error" => "Task should have been cancelled" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("cancellation-worker", workflows: [CANCELLATION_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/child/bulk.rb b/sdks/ruby/examples/child/bulk.rb
new file mode 100644
index 000000000..b764b7cb3
--- /dev/null
+++ b/sdks/ruby/examples/child/bulk.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Bulk run a task
+greetings = ["Hello, World!", "Hello, Moon!", "Hello, Mars!"]
+
+results = CHILD_TASK_WF.run_many(
+ greetings.map do |greeting|
+ CHILD_TASK_WF.create_bulk_run_item(
+ input: { "message" => greeting }
+ )
+ end
+)
+
+puts results
+# !!
diff --git a/sdks/ruby/examples/child/simple_fanout.rb b/sdks/ruby/examples/child/simple_fanout.rb
new file mode 100644
index 000000000..e1762cc6d
--- /dev/null
+++ b/sdks/ruby/examples/child/simple_fanout.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+require_relative "worker"
+
+hatchet = Hatchet::Client.new
+
+# > Running a task from within a task
+SPAWN_TASK = hatchet.task(name: "SpawnTask") do |input, ctx|
+ result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+ { "results" => result }
+end
+# !!
diff --git a/sdks/ruby/examples/child/trigger.rb b/sdks/ruby/examples/child/trigger.rb
new file mode 100644
index 000000000..c0b9c6d7c
--- /dev/null
+++ b/sdks/ruby/examples/child/trigger.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Running a task
+result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+# !!
+
+# > Running a task aio
+# In Ruby, run is synchronous
+result = CHILD_TASK_WF.run({ "message" => "Hello, World!" })
+# !!
+
+# > Running multiple tasks
+results = CHILD_TASK_WF.run_many(
+ [
+ CHILD_TASK_WF.create_bulk_run_item(input: { "message" => "Hello, World!" }),
+ CHILD_TASK_WF.create_bulk_run_item(input: { "message" => "Hello, Moon!" })
+ ]
+)
+puts results
+# !!
diff --git a/sdks/ruby/examples/child/worker.rb b/sdks/ruby/examples/child/worker.rb
new file mode 100644
index 000000000..2d5dedbef
--- /dev/null
+++ b/sdks/ruby/examples/child/worker.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+# > Simple
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CHILD_TASK_WF = HATCHET.workflow(name: "SimpleWorkflow")
+
+CHILD_TASK_WF.task(:step1) do |input, ctx|
+ puts "executed step1: #{input['message']}"
+ { "transformed_message" => input["message"].upcase }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("test-worker", slots: 1, workflows: [CHILD_TASK_WF])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress_spec.rb b/sdks/ruby/examples/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress_spec.rb
new file mode 100644
index 000000000..d1a0c3462
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_cancel_in_progress/test_concurrency_cancel_in_progress_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ConcurrencyCancelInProgress" do
+ # TODO-RUBY: this test hangs indefinitely
+ xit "cancels in-progress runs when concurrency limit exceeded" do
+ test_run_id = SecureRandom.uuid
+ refs = []
+
+ 10.times do |i|
+ ref = CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW.run_no_wait(
+ { "group" => "A" },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id, "i" => i.to_s }
+ )
+ )
+ refs << ref
+ sleep 1
+ end
+
+ refs.each do |ref|
+ puts "Waiting for run #{ref.workflow_run_id} to complete"
+ ref.result rescue nil
+ end
+
+ # Poll until the OLAP repo has caught up (replaces fixed sleep 5)
+ all_rows = nil
+ 30.times do
+ all_rows = HATCHET.runs.list(additional_metadata: { "test_run_id" => test_run_id }, limit: 100).rows
+ break if all_rows.length >= 10
+
+ sleep 0.5
+ end
+ # Filter to workflow-level runs only (exclude individual task runs)
+ runs = all_rows.reject { |r| r.respond_to?(:type) && r.type == "TASK" }
+ runs.sort_by! { |r| (r.additional_metadata || {})["i"].to_i }
+
+ expect(runs.length).to eq(10)
+ expect((runs.last.additional_metadata || {})["i"]).to eq("9")
+ expect(runs.last.status).to eq("COMPLETED")
+ expect(runs[0..-2].all? { |r| r.status == "CANCELLED" }).to be true
+ end
+end
diff --git a/sdks/ruby/examples/concurrency_cancel_in_progress/worker.rb b/sdks/ruby/examples/concurrency_cancel_in_progress/worker.rb
new file mode 100644
index 000000000..e66f5f30f
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_cancel_in_progress/worker.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyCancelInProgress",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :cancel_in_progress
+ )
+)
+
+STEP1_CIP = CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW.task(:step1) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
+
+CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW.task(:step2, parents: [STEP1_CIP]) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
diff --git a/sdks/ruby/examples/concurrency_cancel_newest/test_concurrency_cancel_newest_spec.rb b/sdks/ruby/examples/concurrency_cancel_newest/test_concurrency_cancel_newest_spec.rb
new file mode 100644
index 000000000..9dc876709
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_cancel_newest/test_concurrency_cancel_newest_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ConcurrencyCancelNewest" do
+ # TODO-RUBY: fix this test
+ xit "cancels newest runs when concurrency limit exceeded" do
+ test_run_id = SecureRandom.uuid
+
+ to_run = CONCURRENCY_CANCEL_NEWEST_WORKFLOW.run_no_wait(
+ { "group" => "A" },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id }
+ )
+ )
+
+ sleep 1
+
+ to_cancel = CONCURRENCY_CANCEL_NEWEST_WORKFLOW.run_many_no_wait(
+ 10.times.map do
+ CONCURRENCY_CANCEL_NEWEST_WORKFLOW.create_bulk_run_item(
+ input: { "group" => "A" },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "test_run_id" => test_run_id }
+ )
+ )
+ end
+ )
+
+ to_run.result
+ to_cancel.each { |ref| ref.result rescue nil }
+
+ # Poll until the OLAP repo has caught up (replaces fixed sleep 5)
+ all_runs = nil
+ 30.times do
+ all_runs = HATCHET.runs.list(
+ additional_metadata: { "test_run_id" => test_run_id },
+ limit: 100
+ ).rows
+ break if all_runs.length >= 11
+
+ sleep 0.5
+ end
+
+ successful_run = HATCHET.runs.get(to_run.workflow_run_id)
+ expect(successful_run.status).to eq("COMPLETED")
+
+ # Filter to workflow-level runs only
+ workflow_runs = all_runs.reject { |r| r.respond_to?(:type) && r.type == "TASK" }
+
+ other_runs = workflow_runs.reject { |r| r.metadata.id == to_run.workflow_run_id }
+ expect(other_runs.all? { |r| r.status == "CANCELLED" }).to be true
+ end
+end
diff --git a/sdks/ruby/examples/concurrency_cancel_newest/worker.rb b/sdks/ruby/examples/concurrency_cancel_newest/worker.rb
new file mode 100644
index 000000000..daa1749b4
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_cancel_newest/worker.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+CONCURRENCY_CANCEL_NEWEST_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyCancelNewest",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :cancel_newest
+ )
+)
+
+STEP1_CN = CONCURRENCY_CANCEL_NEWEST_WORKFLOW.task(:step1) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
+
+CONCURRENCY_CANCEL_NEWEST_WORKFLOW.task(:step2, parents: [STEP1_CN]) do |input, ctx|
+ 50.times { sleep 0.10 }
+end
diff --git a/sdks/ruby/examples/concurrency_limit/worker.rb b/sdks/ruby/examples/concurrency_limit/worker.rb
new file mode 100644
index 000000000..20ea95c59
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_limit/worker.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Workflow
+CONCURRENCY_LIMIT_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyDemoWorkflow",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group_key",
+ max_runs: 5,
+ limit_strategy: :cancel_in_progress
+ )
+)
+
+CONCURRENCY_LIMIT_WORKFLOW.task(:step1) do |input, ctx|
+ sleep 3
+ puts "executed step1"
+ { "run" => input["run"] }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-demo-worker", slots: 10, workflows: [CONCURRENCY_LIMIT_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/concurrency_limit_rr/test_concurrency_limit_rr_spec.rb b/sdks/ruby/examples/concurrency_limit_rr/test_concurrency_limit_rr_spec.rb
new file mode 100644
index 000000000..2f8eb0b92
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_limit_rr/test_concurrency_limit_rr_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ConcurrencyDemoWorkflowRR" do
+ # The timing for this test is not reliable
+ xit "runs with round-robin concurrency" do
+ num_groups = 2
+ runs = []
+
+ num_groups.times do
+ runs << CONCURRENCY_LIMIT_RR_WORKFLOW.run_no_wait
+ runs << CONCURRENCY_LIMIT_RR_WORKFLOW.run_no_wait
+ end
+
+ successful_runs = []
+ cancelled_runs = []
+
+ start_time = Time.now
+
+ runs.each_with_index do |run, i|
+ result = run.result
+ successful_runs << [i + 1, result]
+ rescue => e
+ if e.message.include?("CANCELLED_BY_CONCURRENCY_LIMIT")
+ cancelled_runs << [i + 1, e.message]
+ else
+ raise
+ end
+ end
+
+ total_time = Time.now - start_time
+
+ expect(successful_runs.length).to eq(4)
+ expect(cancelled_runs.length).to eq(0)
+ expect(total_time).to be_between(3.8, 7)
+ end
+end
diff --git a/sdks/ruby/examples/concurrency_limit_rr/worker.rb b/sdks/ruby/examples/concurrency_limit_rr/worker.rb
new file mode 100644
index 000000000..a914eab52
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_limit_rr/worker.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Concurrency Strategy With Key
+CONCURRENCY_LIMIT_RR_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyDemoWorkflowRR",
+ concurrency: Hatchet::ConcurrencyExpression.new(
+ expression: "input.group",
+ max_runs: 1,
+ limit_strategy: :group_round_robin
+ )
+)
+
+CONCURRENCY_LIMIT_RR_WORKFLOW.task(:step1) do |input, ctx|
+ puts "starting step1"
+ sleep 2
+ puts "finished step1"
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-demo-worker-rr",
+ slots: 10,
+ workflows: [CONCURRENCY_LIMIT_RR_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/concurrency_multiple_keys/test_multiple_concurrency_keys_spec.rb b/sdks/ruby/examples/concurrency_multiple_keys/test_multiple_concurrency_keys_spec.rb
new file mode 100644
index 000000000..3f022b24b
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_multiple_keys/test_multiple_concurrency_keys_spec.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ConcurrencyWorkflowManyKeys" do
+ CHARACTERS = %w[Anna Vronsky Stiva Dolly Levin Karenin].freeze
+ DIGITS = (0..5).map(&:to_s).freeze
+
+ def are_overlapping?(x, y)
+ (x[:started_at] < y[:finished_at] && x[:finished_at] > y[:started_at]) ||
+ (x[:finished_at] > y[:started_at] && x[:started_at] < y[:started_at])
+ end
+
+ def valid_group?(group)
+ digits = Hash.new(0)
+ names = Hash.new(0)
+
+ group.each do |task|
+ digits[task[:digit]] += 1
+ names[task[:name]] += 1
+ end
+
+ return false if digits.values.any? { |v| v > DIGIT_MAX_RUNS }
+ return false if names.values.any? { |v| v > NAME_MAX_RUNS }
+
+ true
+ end
+
+ # TODO-RUBY: fix this test
+ xit "respects multiple concurrency keys" do
+ test_run_id = SecureRandom.uuid
+
+ run_refs = CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.run_many_no_wait(
+ 100.times.map do
+ name = CHARACTERS.sample
+ digit = DIGITS.sample
+
+ CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.create_bulk_run_item(
+ input: { "name" => name, "digit" => digit },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: {
+ "test_run_id" => test_run_id,
+ "key" => "#{name}-#{digit}",
+ "name" => name,
+ "digit" => digit
+ }
+ )
+ )
+ end
+ )
+
+ run_refs.each(&:result)
+
+ workflows = HATCHET.workflows.list(
+ workflow_name: CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.name,
+ limit: 1000
+ ).rows
+
+ expect(workflows).not_to be_empty
+
+ workflow = workflows.find { |w| w.name == hatchet.config.apply_namespace(CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.name) }
+ expect(workflow).not_to be_nil
+
+ runs = HATCHET.runs.list(
+ workflow_ids: [workflow.metadata.id],
+ additional_metadata: { "test_run_id" => test_run_id },
+ limit: 1000
+ )
+
+ sorted_runs = runs.rows.map do |r|
+ {
+ key: (r.additional_metadata || {})["key"],
+ name: (r.additional_metadata || {})["name"],
+ digit: (r.additional_metadata || {})["digit"],
+ started_at: r.started_at,
+ finished_at: r.finished_at
+ }
+ end.select { |r| r[:started_at] && r[:finished_at] }
+ .sort_by { |r| r[:started_at] }
+
+ overlapping_groups = {}
+
+ sorted_runs.each do |run|
+ has_group_membership = false
+
+ if overlapping_groups.empty?
+ overlapping_groups[1] = [run]
+ next
+ end
+
+ overlapping_groups.each do |id, group|
+ if group.all? { |task| are_overlapping?(run, task) }
+ overlapping_groups[id] << run
+ has_group_membership = true
+ break
+ end
+ end
+
+ unless has_group_membership
+ overlapping_groups[overlapping_groups.size + 1] = [run]
+ end
+ end
+
+ overlapping_groups.each do |id, group|
+ expect(valid_group?(group)).to be(true), "Group #{id} is not valid"
+ end
+ end
+end
diff --git a/sdks/ruby/examples/concurrency_multiple_keys/worker.rb b/sdks/ruby/examples/concurrency_multiple_keys/worker.rb
new file mode 100644
index 000000000..390e01071
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_multiple_keys/worker.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SLEEP_TIME_MK = 2
+DIGIT_MAX_RUNS = 8
+NAME_MAX_RUNS = 3
+
+# > Concurrency Strategy With Key
+CONCURRENCY_MULTIPLE_KEYS_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyWorkflowManyKeys"
+)
+
+CONCURRENCY_MULTIPLE_KEYS_WORKFLOW.task(
+ :concurrency_task,
+ concurrency: [
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.digit",
+ max_runs: DIGIT_MAX_RUNS,
+ limit_strategy: :group_round_robin
+ ),
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.name",
+ max_runs: NAME_MAX_RUNS,
+ limit_strategy: :group_round_robin
+ )
+ ]
+) do |input, ctx|
+ sleep SLEEP_TIME_MK
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-worker-multiple-keys",
+ slots: 10,
+ workflows: [CONCURRENCY_MULTIPLE_KEYS_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/concurrency_workflow_level/test_workflow_level_concurrency_spec.rb b/sdks/ruby/examples/concurrency_workflow_level/test_workflow_level_concurrency_spec.rb
new file mode 100644
index 000000000..f0eaa265e
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_workflow_level/test_workflow_level_concurrency_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ConcurrencyWorkflowLevel" do
+ CHARACTERS_WL = %w[Anna Vronsky Stiva Dolly Levin Karenin].freeze
+ DIGITS_WL = (0..5).map(&:to_s).freeze
+
+ def are_overlapping?(x, y)
+ (x[:started_at] < y[:finished_at] && x[:finished_at] > y[:started_at]) ||
+ (x[:finished_at] > y[:started_at] && x[:started_at] < y[:finished_at])
+ end
+
+ def valid_group?(group)
+ digits = Hash.new(0)
+ names = Hash.new(0)
+
+ group.each do |task|
+ digits[task[:digit]] += 1
+ names[task[:name]] += 1
+ end
+
+ return false if digits.values.any? { |v| v > DIGIT_MAX_RUNS_WL }
+ return false if names.values.any? { |v| v > NAME_MAX_RUNS_WL }
+
+ true
+ end
+
+ xit "respects workflow-level concurrency" do
+ test_run_id = SecureRandom.uuid
+
+ run_refs = CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.run_many_no_wait(
+ # TODO-RUBY: only enqueues an arbitrary number of runs, not 100
+ 2.times.map do
+ name = CHARACTERS_WL.sample
+ digit = DIGITS_WL.sample
+
+ CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.create_bulk_run_item(
+ input: { "name" => name, "digit" => digit },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: {
+ "test_run_id" => test_run_id,
+ "key" => "#{name}-#{digit}",
+ "name" => name,
+ "digit" => digit
+ }
+ )
+ )
+ end
+ )
+
+ puts "len(run_refs): #{run_refs.length}"
+
+ # TODO-RUBY: fix this test, we dont seem to be
+ run_refs.each(&:result)
+
+ workflows = HATCHET.workflows.list(
+ workflow_name: CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.name,
+ limit: 1000
+ ).rows
+
+ expect(workflows).not_to be_empty
+
+ workflow = workflows.find { |w| w.name == hatchet.config.apply_namespace(CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.name) }
+ expect(workflow).not_to be_nil
+
+ runs = HATCHET.runs.list(
+ workflow_ids: [workflow.metadata.id],
+ additional_metadata: { "test_run_id" => test_run_id },
+ limit: 1000
+ )
+
+ sorted_runs = runs.rows.map do |r|
+ {
+ key: (r.additional_metadata || {})["key"],
+ name: (r.additional_metadata || {})["name"],
+ digit: (r.additional_metadata || {})["digit"],
+ started_at: r.started_at,
+ finished_at: r.finished_at
+ }
+ end.select { |r| r[:started_at] && r[:finished_at] }
+ .sort_by { |r| r[:started_at] }
+
+ overlapping_groups = {}
+
+ sorted_runs.each do |run|
+ has_group_membership = false
+
+ if overlapping_groups.empty?
+ overlapping_groups[1] = [run]
+ next
+ end
+
+ overlapping_groups.each do |id, group|
+ if group.all? { |task| are_overlapping?(run, task) }
+ overlapping_groups[id] << run
+ has_group_membership = true
+ break
+ end
+ end
+
+ unless has_group_membership
+ overlapping_groups[overlapping_groups.size + 1] = [run]
+ end
+ end
+
+ overlapping_groups.each do |id, group|
+ expect(valid_group?(group)).to be(true), "Group #{id} is not valid"
+ end
+ end
+end
diff --git a/sdks/ruby/examples/concurrency_workflow_level/worker.rb b/sdks/ruby/examples/concurrency_workflow_level/worker.rb
new file mode 100644
index 000000000..01e86e496
--- /dev/null
+++ b/sdks/ruby/examples/concurrency_workflow_level/worker.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SLEEP_TIME_WL = 2
+DIGIT_MAX_RUNS_WL = 8
+NAME_MAX_RUNS_WL = 3
+
+# > Multiple Concurrency Keys
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW = HATCHET.workflow(
+ name: "ConcurrencyWorkflowLevel",
+ concurrency: [
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.digit",
+ max_runs: DIGIT_MAX_RUNS_WL,
+ limit_strategy: :group_round_robin
+ ),
+ Hatchet::ConcurrencyExpression.new(
+ expression: "input.name",
+ max_runs: NAME_MAX_RUNS_WL,
+ limit_strategy: :group_round_robin
+ )
+ ]
+)
+
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.task(:task_1) do |input, ctx|
+ sleep SLEEP_TIME_WL
+end
+
+CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW.task(:task_2) do |input, ctx|
+ sleep SLEEP_TIME_WL
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "concurrency-worker-workflow-level",
+ slots: 10,
+ workflows: [CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/conditions/test_conditions_spec.rb b/sdks/ruby/examples/conditions/test_conditions_spec.rb
new file mode 100644
index 000000000..33e0e272f
--- /dev/null
+++ b/sdks/ruby/examples/conditions/test_conditions_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "TaskConditionWorkflow" do
+ it "runs the condition workflow with event triggers" do
+ ref = TASK_CONDITION_WORKFLOW.run_no_wait
+
+ # Wait for the sleep conditions, then push events
+ sleep 2
+
+ HATCHET.events.create(key: "wait_for_event:start", data: {})
+
+ result = ref.result
+ expect(result["sum"]["sum"]).to be_a(Integer)
+ end
+end
diff --git a/sdks/ruby/examples/conditions/worker.rb b/sdks/ruby/examples/conditions/worker.rb
new file mode 100644
index 000000000..52da73146
--- /dev/null
+++ b/sdks/ruby/examples/conditions/worker.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+# > Create a workflow
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+TASK_CONDITION_WORKFLOW = HATCHET.workflow(name: "TaskConditionWorkflow")
+
+# !!
+
+# > Add base task
+COND_START = TASK_CONDITION_WORKFLOW.task(:start) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add wait for sleep
+WAIT_FOR_SLEEP = TASK_CONDITION_WORKFLOW.task(
+ :wait_for_sleep,
+ parents: [COND_START],
+ wait_for: [Hatchet::SleepCondition.new(10)]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add skip condition override
+TASK_CONDITION_WORKFLOW.task(
+ :skip_with_multiple_parents,
+ parents: [COND_START, WAIT_FOR_SLEEP],
+ skip_if: [Hatchet::ParentCondition.new(parent: COND_START, expression: "output.random_number > 0")]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add skip on event
+SKIP_ON_EVENT = TASK_CONDITION_WORKFLOW.task(
+ :skip_on_event,
+ parents: [COND_START],
+ wait_for: [Hatchet::SleepCondition.new(30)],
+ skip_if: [Hatchet::UserEventCondition.new(event_key: "skip_on_event:skip")]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add branching
+LEFT_BRANCH = TASK_CONDITION_WORKFLOW.task(
+ :left_branch,
+ parents: [WAIT_FOR_SLEEP],
+ skip_if: [
+ Hatchet::ParentCondition.new(
+ parent: WAIT_FOR_SLEEP,
+ expression: "output.random_number > 50"
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+RIGHT_BRANCH = TASK_CONDITION_WORKFLOW.task(
+ :right_branch,
+ parents: [WAIT_FOR_SLEEP],
+ skip_if: [
+ Hatchet::ParentCondition.new(
+ parent: WAIT_FOR_SLEEP,
+ expression: "output.random_number <= 50"
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add wait for event
+WAIT_FOR_EVENT = TASK_CONDITION_WORKFLOW.task(
+ :wait_for_event,
+ parents: [COND_START],
+ wait_for: [
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(60),
+ Hatchet::UserEventCondition.new(event_key: "wait_for_event:start")
+ )
+ ]
+) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Add sum
+TASK_CONDITION_WORKFLOW.task(
+ :sum,
+ parents: [COND_START, WAIT_FOR_SLEEP, WAIT_FOR_EVENT, SKIP_ON_EVENT, LEFT_BRANCH, RIGHT_BRANCH]
+) do |input, ctx|
+ one = ctx.task_output(COND_START)["random_number"]
+ two = ctx.task_output(WAIT_FOR_EVENT)["random_number"]
+ three = ctx.task_output(WAIT_FOR_SLEEP)["random_number"]
+ four = ctx.was_skipped?(SKIP_ON_EVENT) ? 0 : ctx.task_output(SKIP_ON_EVENT)["random_number"]
+ five = ctx.was_skipped?(LEFT_BRANCH) ? 0 : ctx.task_output(LEFT_BRANCH)["random_number"]
+ six = ctx.was_skipped?(RIGHT_BRANCH) ? 0 : ctx.task_output(RIGHT_BRANCH)["random_number"]
+
+ { "sum" => one + two + three + four + five + six }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("dag-worker", workflows: [TASK_CONDITION_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/cron/programatic_sync.rb b/sdks/ruby/examples/cron/programatic_sync.rb
new file mode 100644
index 000000000..5e67582af
--- /dev/null
+++ b/sdks/ruby/examples/cron/programatic_sync.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+dynamic_cron_workflow = hatchet.workflow(name: "DynamicCronWorkflow")
+
+# > Create
+cron_trigger = dynamic_cron_workflow.create_cron(
+ "customer-a-daily-report",
+ "0 12 * * *",
+ input: { "name" => "John Doe" }
+)
+
+id = cron_trigger.metadata.id
+# !!
+
+# > List
+cron_triggers = hatchet.cron.list
+# !!
+
+# > Delete
+hatchet.cron.delete(cron_trigger.metadata.id)
+# !!
diff --git a/sdks/ruby/examples/cron/worker.rb b/sdks/ruby/examples/cron/worker.rb
new file mode 100644
index 000000000..21b00761f
--- /dev/null
+++ b/sdks/ruby/examples/cron/worker.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Cron Workflow Definition
+CRON_WORKFLOW = HATCHET.workflow(
+ name: "CronWorkflow",
+ on_crons: ["*/5 * * * *"]
+)
+
+CRON_WORKFLOW.task(:cron_task) do |input, ctx|
+ puts "Cron task executed at #{Time.now}"
+ { "status" => "success" }
+end
+
+# !!
+
+# > Programmatic Cron Creation
+def create_cron
+ HATCHET.cron.create(
+ workflow_name: "CronWorkflow",
+ cron_name: "my-programmatic-cron",
+ expression: "*/10 * * * *",
+ input: { "message" => "hello from cron" }
+ )
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("cron-worker", workflows: [CRON_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/dag/test_dag_spec.rb b/sdks/ruby/examples/dag/test_dag_spec.rb
new file mode 100644
index 000000000..8f096ac6c
--- /dev/null
+++ b/sdks/ruby/examples/dag/test_dag_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "DAGWorkflow" do
+ it "runs the DAG workflow" do
+ result = DAG_WORKFLOW.run
+
+ one = result["step1"]["random_number"]
+ two = result["step2"]["random_number"]
+ expect(result["step3"]["sum"]).to eq(one + two)
+ expect(result["step4"]["step4"]).to eq("step4")
+ end
+end
diff --git a/sdks/ruby/examples/dag/trigger.rb b/sdks/ruby/examples/dag/trigger.rb
new file mode 100644
index 000000000..ae305a933
--- /dev/null
+++ b/sdks/ruby/examples/dag/trigger.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Trigger the DAG
+result = DAG_WORKFLOW.run
+puts result
+# !!
diff --git a/sdks/ruby/examples/dag/worker.rb b/sdks/ruby/examples/dag/worker.rb
new file mode 100644
index 000000000..02822e0f9
--- /dev/null
+++ b/sdks/ruby/examples/dag/worker.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Define a DAG
+DAG_WORKFLOW = HATCHET.workflow(name: "DAGWorkflow")
+
+# !!
+
+# > First task
+STEP1 = DAG_WORKFLOW.task(:step1, execution_timeout: 5) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+STEP2 = DAG_WORKFLOW.task(:step2, execution_timeout: 5) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+# !!
+
+# > Task with parents
+DAG_WORKFLOW.task(:step3, parents: [STEP1, STEP2]) do |input, ctx|
+ one = ctx.task_output(STEP1)["random_number"]
+ two = ctx.task_output(STEP2)["random_number"]
+
+ { "sum" => one + two }
+end
+
+DAG_WORKFLOW.task(:step4, parents: [STEP1, :step3]) do |input, ctx|
+ puts(
+ "executed step4",
+ Time.now.strftime("%H:%M:%S"),
+ input.inspect,
+ ctx.task_output(STEP1).inspect,
+ ctx.task_output(:step3).inspect
+ )
+
+ { "step4" => "step4" }
+end
+
+# !!
+
+# > Declare a worker
+def main
+ worker = HATCHET.worker("dag-worker", workflows: [DAG_WORKFLOW])
+ worker.start
+end
+
+# !!
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/dataclasses/worker.rb b/sdks/ruby/examples/dataclasses/worker.rb
new file mode 100644
index 000000000..75924cc8c
--- /dev/null
+++ b/sdks/ruby/examples/dataclasses/worker.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Task using Struct-based input
+# Ruby equivalent of Python dataclass -- use plain hashes
+SAY_HELLO = HATCHET.task(name: "say_hello") do |input, ctx|
+ { "message" => "Hello, #{input['name']}!" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SAY_HELLO])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/dedupe/worker.rb b/sdks/ruby/examples/dedupe/worker.rb
new file mode 100644
index 000000000..c02e73f9d
--- /dev/null
+++ b/sdks/ruby/examples/dedupe/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+DEDUPE_PARENT_WF = HATCHET.workflow(name: "DedupeParent")
+DEDUPE_CHILD_WF = HATCHET.workflow(name: "DedupeChild")
+
+DEDUPE_PARENT_WF.task(:spawn, execution_timeout: 60) do |input, ctx|
+ puts "spawning child"
+
+ results = []
+
+ 2.times do |i|
+ begin
+ results << DEDUPE_CHILD_WF.run(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "dedupe" => "test" },
+ key: "child#{i}"
+ )
+ )
+ rescue Hatchet::DedupeViolationError => e
+ puts "dedupe violation #{e}"
+ next
+ end
+ end
+
+ puts "results #{results}"
+ { "results" => results }
+end
+
+DEDUPE_CHILD_WF.task(:process) do |input, ctx|
+ sleep 3
+ puts "child process"
+ { "status" => "success" }
+end
+
+DEDUPE_CHILD_WF.task(:process2) do |input, ctx|
+ puts "child process2"
+ { "status2" => "success" }
+end
+
+def main
+ worker = HATCHET.worker(
+ "fanout-worker", slots: 100, workflows: [DEDUPE_PARENT_WF, DEDUPE_CHILD_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/delayed/test_delayed_spec.rb b/sdks/ruby/examples/delayed/test_delayed_spec.rb
new file mode 100644
index 000000000..bb3f6f43b
--- /dev/null
+++ b/sdks/ruby/examples/delayed/test_delayed_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "DelayedWorkflow" do
+ # TODO: Implement delayed workflow test
+ it "schedules a workflow for future execution" do
+ skip "Not yet implemented"
+ end
+end
diff --git a/sdks/ruby/examples/delayed/worker.rb b/sdks/ruby/examples/delayed/worker.rb
new file mode 100644
index 000000000..48e465a46
--- /dev/null
+++ b/sdks/ruby/examples/delayed/worker.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+PRINT_SCHEDULE_WF = HATCHET.workflow(name: "PrintScheduleWorkflow")
+PRINT_PRINTER_WF = HATCHET.workflow(name: "PrintPrinterWorkflow")
+
+PRINT_SCHEDULE_WF.task(:schedule) do |input, ctx|
+ now = Time.now.utc
+ puts "the time is \t #{now.strftime('%H:%M:%S')}"
+ future_time = now + 15
+ puts "scheduling for \t #{future_time.strftime('%H:%M:%S')}"
+
+ PRINT_PRINTER_WF.schedule(future_time, input: input)
+end
+
+PRINT_PRINTER_WF.task(:step1) do |input, ctx|
+ now = Time.now.utc
+ puts "printed at \t #{now.strftime('%H:%M:%S')}"
+ puts "message \t #{input['message']}"
+end
+
+def main
+ worker = HATCHET.worker(
+ "delayed-worker", slots: 4, workflows: [PRINT_SCHEDULE_WF, PRINT_PRINTER_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/dependency_injection/test_dependency_injection_spec.rb b/sdks/ruby/examples/dependency_injection/test_dependency_injection_spec.rb
new file mode 100644
index 000000000..2e4cc16be
--- /dev/null
+++ b/sdks/ruby/examples/dependency_injection/test_dependency_injection_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "DependencyInjection" do
+ let(:expected_output) do
+ {
+ "sync_dep" => SYNC_DEPENDENCY_VALUE,
+ "async_dep" => ASYNC_DEPENDENCY_VALUE,
+ "async_cm_dep" => "#{ASYNC_CM_DEPENDENCY_VALUE}_#{ASYNC_DEPENDENCY_VALUE}",
+ "sync_cm_dep" => "#{SYNC_CM_DEPENDENCY_VALUE}_#{SYNC_DEPENDENCY_VALUE}",
+ "chained_dep" => "chained_#{CHAINED_CM_VALUE}",
+ "chained_async_dep" => "chained_#{CHAINED_ASYNC_CM_VALUE}"
+ }
+ end
+
+ [
+ ["async_task_with_dependencies", :ASYNC_TASK_WITH_DEPS],
+ ["sync_task_with_dependencies", :SYNC_TASK_WITH_DEPS],
+ ["durable_async_task_with_dependencies", :DURABLE_ASYNC_TASK_WITH_DEPS],
+ ["durable_sync_task_with_dependencies", :DURABLE_SYNC_TASK_WITH_DEPS]
+ ].each do |name, const|
+ it "resolves dependencies for #{name}" do
+ task = Object.const_get(const)
+ result = task.run
+
+ expect(result).to eq(expected_output)
+ end
+ end
+end
diff --git a/sdks/ruby/examples/dependency_injection/worker.rb b/sdks/ruby/examples/dependency_injection/worker.rb
new file mode 100644
index 000000000..5be5d9988
--- /dev/null
+++ b/sdks/ruby/examples/dependency_injection/worker.rb
@@ -0,0 +1,142 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+SYNC_DEPENDENCY_VALUE = "sync_dependency_value"
+ASYNC_DEPENDENCY_VALUE = "async_dependency_value"
+SYNC_CM_DEPENDENCY_VALUE = "sync_cm_dependency_value"
+ASYNC_CM_DEPENDENCY_VALUE = "async_cm_dependency_value"
+CHAINED_CM_VALUE = "chained_cm_value"
+CHAINED_ASYNC_CM_VALUE = "chained_async_cm_value"
+
+# > Declare dependencies (Ruby uses callable objects instead of Python's Depends)
+sync_dep = ->(_input, _ctx) { SYNC_DEPENDENCY_VALUE }
+async_dep = ->(_input, _ctx) { ASYNC_DEPENDENCY_VALUE }
+
+sync_cm_dep = lambda { |_input, _ctx, deps|
+ "#{SYNC_CM_DEPENDENCY_VALUE}_#{deps[:sync_dep]}"
+}
+
+async_cm_dep = lambda { |_input, _ctx, deps|
+ "#{ASYNC_CM_DEPENDENCY_VALUE}_#{deps[:async_dep]}"
+}
+
+chained_dep = ->(_input, _ctx, deps) { "chained_#{CHAINED_CM_VALUE}" }
+chained_async_dep = ->(_input, _ctx, deps) { "chained_#{CHAINED_ASYNC_CM_VALUE}" }
+
+# !!
+
+# > Inject dependencies
+ASYNC_TASK_WITH_DEPS = HATCHET.task(
+ name: "async_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+SYNC_TASK_WITH_DEPS = HATCHET.task(
+ name: "sync_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DURABLE_ASYNC_TASK_WITH_DEPS = HATCHET.durable_task(
+ name: "durable_async_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DURABLE_SYNC_TASK_WITH_DEPS = HATCHET.durable_task(
+ name: "durable_sync_task_with_dependencies",
+ deps: {
+ sync_dep: sync_dep,
+ async_dep: async_dep,
+ sync_cm_dep: sync_cm_dep,
+ async_cm_dep: async_cm_dep,
+ chained_dep: chained_dep,
+ chained_async_dep: chained_async_dep
+ }
+) do |input, ctx|
+ {
+ "sync_dep" => ctx.deps[:sync_dep],
+ "async_dep" => ctx.deps[:async_dep],
+ "async_cm_dep" => ctx.deps[:async_cm_dep],
+ "sync_cm_dep" => ctx.deps[:sync_cm_dep],
+ "chained_dep" => ctx.deps[:chained_dep],
+ "chained_async_dep" => ctx.deps[:chained_async_dep]
+ }
+end
+
+DI_WORKFLOW = HATCHET.workflow(name: "dependency-injection-workflow")
+
+# Workflow tasks with dependencies follow the same pattern
+DI_WORKFLOW.task(:wf_task_with_dependencies) do |input, ctx|
+ {
+ "sync_dep" => SYNC_DEPENDENCY_VALUE,
+ "async_dep" => ASYNC_DEPENDENCY_VALUE
+ }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "dependency-injection-worker",
+ workflows: [
+ ASYNC_TASK_WITH_DEPS,
+ SYNC_TASK_WITH_DEPS,
+ DURABLE_ASYNC_TASK_WITH_DEPS,
+ DURABLE_SYNC_TASK_WITH_DEPS,
+ DI_WORKFLOW
+ ]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/durable/test_durable_spec.rb b/sdks/ruby/examples/durable/test_durable_spec.rb
new file mode 100644
index 000000000..f2d1ab5f1
--- /dev/null
+++ b/sdks/ruby/examples/durable/test_durable_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "DurableWorkflow" do
+ it "completes a durable sleep then waits for event" do
+ ref = DURABLE_WORKFLOW.run_no_wait
+
+ # Wait for the sleep to complete
+ sleep(DURABLE_SLEEP_TIME + 2)
+
+ # Push the event to unblock the durable task
+ HATCHET.events.create(key: DURABLE_EVENT_KEY, data: { "test" => true })
+
+ result = ref.result
+ expect(result["durable_task"]["status"]).to eq("success")
+ end
+
+ it "handles multi-sleep in durable tasks" do
+ result = WAIT_FOR_SLEEP_TWICE.run
+
+ expect(result["runtime"]).to be >= DURABLE_SLEEP_TIME
+ end
+end
diff --git a/sdks/ruby/examples/durable/worker.rb b/sdks/ruby/examples/durable/worker.rb
new file mode 100644
index 000000000..a068ab305
--- /dev/null
+++ b/sdks/ruby/examples/durable/worker.rb
@@ -0,0 +1,111 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Create a durable workflow
+DURABLE_WORKFLOW = HATCHET.workflow(name: "DurableWorkflow")
+EPHEMERAL_WORKFLOW = HATCHET.workflow(name: "EphemeralWorkflow")
+
+# !!
+
+# > Add durable task
+DURABLE_EVENT_KEY = "durable-example:event"
+DURABLE_SLEEP_TIME = 5
+
+DURABLE_WORKFLOW.task(:ephemeral_task) do |input, ctx|
+ puts "Running non-durable task"
+end
+
+DURABLE_WORKFLOW.durable_task(:durable_task, execution_timeout: 60) do |input, ctx|
+ puts "Waiting for sleep"
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+ puts "Sleep finished"
+
+ puts "Waiting for event"
+ ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY, expression: "true")
+ )
+ puts "Event received"
+
+ { "status" => "success" }
+end
+
+# !!
+
+# > Add durable tasks that wait for or groups
+DURABLE_WORKFLOW.durable_task(:wait_for_or_group_1, execution_timeout: 60) do |input, ctx|
+ start = Time.now
+ wait_result = ctx.wait_for(
+ SecureRandom.hex(16),
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(DURABLE_SLEEP_TIME),
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY)
+ )
+ )
+
+ key = wait_result.keys.first
+ event_id = wait_result[key].keys.first
+
+ {
+ "runtime" => (Time.now - start).to_i,
+ "key" => key,
+ "event_id" => event_id
+ }
+end
+
+DURABLE_WORKFLOW.durable_task(:wait_for_or_group_2, execution_timeout: 120) do |input, ctx|
+ start = Time.now
+ wait_result = ctx.wait_for(
+ SecureRandom.hex(16),
+ Hatchet.or_(
+ Hatchet::SleepCondition.new(6 * DURABLE_SLEEP_TIME),
+ Hatchet::UserEventCondition.new(event_key: DURABLE_EVENT_KEY)
+ )
+ )
+
+ key = wait_result.keys.first
+ event_id = wait_result[key].keys.first
+
+ {
+ "runtime" => (Time.now - start).to_i,
+ "key" => key,
+ "event_id" => event_id
+ }
+end
+
+DURABLE_WORKFLOW.durable_task(:wait_for_multi_sleep, execution_timeout: 120) do |input, ctx|
+ start = Time.now
+
+ 3.times do
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+ end
+
+ { "runtime" => (Time.now - start).to_i }
+end
+
+EPHEMERAL_WORKFLOW.task(:ephemeral_task_2) do |input, ctx|
+ puts "Running non-durable task"
+end
+
+WAIT_FOR_SLEEP_TWICE = HATCHET.durable_task(name: "wait_for_sleep_twice", execution_timeout: 60) do |input, ctx|
+ start = Time.now
+
+ ctx.sleep_for(duration: DURABLE_SLEEP_TIME)
+
+ { "runtime" => (Time.now - start).to_i }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "durable-worker",
+ workflows: [DURABLE_WORKFLOW, EPHEMERAL_WORKFLOW, WAIT_FOR_SLEEP_TWICE]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/durable_event/worker.rb b/sdks/ruby/examples/durable_event/worker.rb
new file mode 100644
index 000000000..38404351d
--- /dev/null
+++ b/sdks/ruby/examples/durable_event/worker.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+DURABLE_EVENT_TASK_KEY = "user:update"
+
+# > Durable Event
+DURABLE_EVENT_TASK = HATCHET.durable_task(name: "DurableEventTask") do |input, ctx|
+ res = ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(event_key: "user:update")
+ )
+
+ puts "got event #{res}"
+end
+
+DURABLE_EVENT_TASK_WITH_FILTER = HATCHET.durable_task(name: "DurableEventWithFilterTask") do |input, ctx|
+
+# !!
+ # > Durable Event With Filter
+ res = ctx.wait_for(
+ "event",
+ Hatchet::UserEventCondition.new(
+ event_key: "user:update",
+ expression: "input.user_id == '1234'"
+ )
+ )
+
+ puts "got event #{res}"
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "durable-event-worker",
+ workflows: [DURABLE_EVENT_TASK, DURABLE_EVENT_TASK_WITH_FILTER]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/durable_sleep/worker.rb b/sdks/ruby/examples/durable_sleep/worker.rb
new file mode 100644
index 000000000..167d67d33
--- /dev/null
+++ b/sdks/ruby/examples/durable_sleep/worker.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Durable Sleep
+DURABLE_SLEEP_TASK = HATCHET.durable_task(name: "DurableSleepTask") do |input, ctx|
+ res = ctx.sleep_for(duration: 5)
+
+ puts "got result #{res}"
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("durable-sleep-worker", workflows: [DURABLE_SLEEP_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/events/event.rb b/sdks/ruby/examples/events/event.rb
new file mode 100644
index 000000000..3727b3ea0
--- /dev/null
+++ b/sdks/ruby/examples/events/event.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Event trigger
+HATCHET.event.push("user:create", { "should_skip" => false })
+# !!
+
+# > Event trigger with metadata
+HATCHET.event.push(
+ "user:create",
+ { "userId" => "1234", "should_skip" => false },
+ additional_metadata: { "source" => "api" }
+)
+# !!
diff --git a/sdks/ruby/examples/events/filter.rb b/sdks/ruby/examples/events/filter.rb
new file mode 100644
index 000000000..5b8fdf097
--- /dev/null
+++ b/sdks/ruby/examples/events/filter.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+require_relative "worker"
+
+HATCHET_CLIENT = Hatchet::Client.new
+
+# > Create a filter
+HATCHET_CLIENT.filters.create(
+ workflow_id: EVENT_WORKFLOW.id,
+ expression: "input.should_skip == false",
+ scope: "foobarbaz",
+ payload: {
+ "main_character" => "Anna",
+ "supporting_character" => "Stiva",
+ "location" => "Moscow"
+ }
+)
+# !!
+
+# > Skip a run
+HATCHET_CLIENT.event.push(
+ EVENT_KEY,
+ { "should_skip" => true },
+ scope: "foobarbaz"
+)
+# !!
+
+# > Trigger a run
+HATCHET_CLIENT.event.push(
+ EVENT_KEY,
+ { "should_skip" => false },
+ scope: "foobarbaz"
+)
+# !!
diff --git a/sdks/ruby/examples/events/test_event_spec.rb b/sdks/ruby/examples/events/test_event_spec.rb
new file mode 100644
index 000000000..fb1228cf7
--- /dev/null
+++ b/sdks/ruby/examples/events/test_event_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "EventWorkflow" do
+ it "pushes an event" do
+ e = HATCHET.events.create(key: EVENT_KEY, data: { "should_skip" => false })
+ expect(e).not_to be_nil
+ end
+
+ it "bulk pushes events" do
+ events = [
+ { key: "event1", payload: { "message" => "Event 1", "should_skip" => false },
+ additional_metadata: { "source" => "test", "user_id" => "user123" } },
+ { key: "event2", payload: { "message" => "Event 2", "should_skip" => false },
+ additional_metadata: { "source" => "test", "user_id" => "user456" } },
+ { key: "event3", payload: { "message" => "Event 3", "should_skip" => false },
+ additional_metadata: { "source" => "test", "user_id" => "user789" } }
+ ]
+
+ result = HATCHET.events.bulk_push(events)
+ expect(result.events.length).to eq(3)
+ end
+end
diff --git a/sdks/ruby/examples/events/worker.rb b/sdks/ruby/examples/events/worker.rb
new file mode 100644
index 000000000..a798f0652
--- /dev/null
+++ b/sdks/ruby/examples/events/worker.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Event trigger
+EVENT_KEY = "user:create"
+SECONDARY_KEY = "foobarbaz"
+WILDCARD_KEY = "subscription:*"
+
+EVENT_WORKFLOW = HATCHET.workflow(
+ name: "EventWorkflow",
+ on_events: [EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY]
+)
+
+# !!
+
+# > Event trigger with filter
+EVENT_WORKFLOW_WITH_FILTER = HATCHET.workflow(
+ name: "EventWorkflow",
+ on_events: [EVENT_KEY, SECONDARY_KEY, WILDCARD_KEY],
+ default_filters: [
+ Hatchet::DefaultFilter.new(
+ expression: "true",
+ scope: "example-scope",
+ payload: {
+ "main_character" => "Anna",
+ "supporting_character" => "Stiva",
+ "location" => "Moscow"
+ }
+ )
+ ]
+)
+
+EVENT_WORKFLOW.task(:task) do |input, ctx|
+ puts "event received"
+ ctx.filter_payload
+end
+
+# !!
+
+# > Accessing the filter payload
+EVENT_WORKFLOW_WITH_FILTER.task(:filtered_task) do |input, ctx|
+ puts ctx.filter_payload.inspect
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(name: "EventWorker", workflows: [EVENT_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/fanout/example_child_spawn.rb b/sdks/ruby/examples/fanout/example_child_spawn.rb
new file mode 100644
index 000000000..86336f826
--- /dev/null
+++ b/sdks/ruby/examples/fanout/example_child_spawn.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Child spawn
+FANOUT_CHILD_WF.run({ "a" => "b" })
+# !!
+
+# > Error handling
+begin
+ FANOUT_CHILD_WF.run({ "a" => "b" })
+rescue StandardError => e
+ puts "Child workflow failed: #{e.message}"
+end
+# !!
diff --git a/sdks/ruby/examples/fanout/test_fanout_spec.rb b/sdks/ruby/examples/fanout/test_fanout_spec.rb
new file mode 100644
index 000000000..87089a4d2
--- /dev/null
+++ b/sdks/ruby/examples/fanout/test_fanout_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "FanoutParent" do
+ it "fans out to child workflows" do
+ result = FANOUT_PARENT_WF.run({ "n" => 5 })
+
+ results = result["spawn"]["results"]
+ expect(results.length).to eq(5)
+ end
+end
diff --git a/sdks/ruby/examples/fanout/trigger.rb b/sdks/ruby/examples/fanout/trigger.rb
new file mode 100644
index 000000000..8dfcb039e
--- /dev/null
+++ b/sdks/ruby/examples/fanout/trigger.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Bulk run children
+def run_child_workflows(n)
+ FANOUT_CHILD_WF.run_many(
+ n.times.map do |i|
+ FANOUT_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s }
+ )
+ end
+ )
+end
+# !!
diff --git a/sdks/ruby/examples/fanout/worker.rb b/sdks/ruby/examples/fanout/worker.rb
new file mode 100644
index 000000000..2376b4b63
--- /dev/null
+++ b/sdks/ruby/examples/fanout/worker.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > FanoutParent
+FANOUT_PARENT_WF = HATCHET.workflow(name: "FanoutParent")
+FANOUT_CHILD_WF = HATCHET.workflow(name: "FanoutChild")
+
+FANOUT_PARENT_WF.task(:spawn, execution_timeout: 300) do |input, ctx|
+ puts "spawning child"
+ n = input["n"] || 100
+
+ result = FANOUT_CHILD_WF.run_many(
+ n.times.map do |i|
+ FANOUT_CHILD_WF.create_bulk_run_item(
+ input: { "a" => i.to_s },
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "hello" => "earth" },
+ key: "child#{i}"
+ )
+ )
+ end
+ )
+
+ puts "results #{result}"
+ { "results" => result }
+end
+
+# !!
+
+# > FanoutChild
+FANOUT_CHILD_PROCESS = FANOUT_CHILD_WF.task(:process) do |input, ctx|
+ puts "child process #{input['a']}"
+ { "status" => input["a"] }
+end
+
+FANOUT_CHILD_WF.task(:process2, parents: [FANOUT_CHILD_PROCESS]) do |input, ctx|
+ process_output = ctx.task_output(FANOUT_CHILD_PROCESS)
+ a = process_output["status"]
+ { "status2" => "#{a}2" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("fanout-worker", slots: 40, workflows: [FANOUT_PARENT_WF, FANOUT_CHILD_WF])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/hatchet_client.rb b/sdks/ruby/examples/hatchet_client.rb
index 83177f1aa..49eea6aab 100644
--- a/sdks/ruby/examples/hatchet_client.rb
+++ b/sdks/ruby/examples/hatchet_client.rb
@@ -4,9 +4,9 @@ require 'hatchet-sdk'
# require_relative '../src/lib/hatchet-sdk'
# Initialize the Hatchet client
-hatchet = Hatchet::Client.new()
+HATCHET = Hatchet::Client.new() unless defined?(HATCHET)
-result = hatchet.events.create(
+result = HATCHET.events.create(
key: "test-event",
data: {
message: "test"
@@ -15,7 +15,7 @@ result = hatchet.events.create(
puts "Event created: #{result.inspect}"
-run = hatchet.runs.create(
+run = HATCHET.runs.create(
name: "simple",
input: {
Message: "test workflow run"
@@ -24,7 +24,7 @@ run = hatchet.runs.create(
puts "TriggeredRun ID: #{run.metadata.id}"
-result = hatchet.runs.poll(run.metadata.id)
+result = HATCHET.runs.poll(run.metadata.id)
puts "Runs client initialized: #{result.inspect}"
-puts "Run status: #{result.run.status}"
+puts "Run status: #{result.status}"
diff --git a/sdks/ruby/examples/logger/test_logger_spec.rb b/sdks/ruby/examples/logger/test_logger_spec.rb
new file mode 100644
index 000000000..3baabc851
--- /dev/null
+++ b/sdks/ruby/examples/logger/test_logger_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "LoggingWorkflow" do
+ it "runs the logging workflow" do
+ result = LOGGING_WORKFLOW.run
+
+ expect(result["root_logger"]["status"]).to eq("success")
+ end
+end
diff --git a/sdks/ruby/examples/logger/worker.rb b/sdks/ruby/examples/logger/worker.rb
new file mode 100644
index 000000000..75f0a9904
--- /dev/null
+++ b/sdks/ruby/examples/logger/worker.rb
@@ -0,0 +1,47 @@
+# frozen_string_literal: true
+
+# > LoggingWorkflow
+
+require "hatchet-sdk"
+require "logger"
+
+logger = Logger.new($stdout)
+logger.level = Logger::INFO
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+LOGGING_WORKFLOW = HATCHET.workflow(name: "LoggingWorkflow")
+
+LOGGING_WORKFLOW.task(:root_logger) do |input, ctx|
+ 12.times do |i|
+ logger.info("executed step1 - #{i}")
+ logger.info({ "step1" => "step1" }.inspect)
+
+ sleep 0.1
+ end
+
+ { "status" => "success" }
+end
+
+# !!
+
+# > ContextLogger
+LOGGING_WORKFLOW.task(:context_logger) do |input, ctx|
+ 12.times do |i|
+ ctx.log("executed step1 - #{i}")
+ ctx.log({ "step1" => "step1" }.inspect)
+
+ sleep 0.1
+ end
+
+ { "status" => "success" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("logger-worker", slots: 5, workflows: [LOGGING_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/manual_slot_release/worker.rb b/sdks/ruby/examples/manual_slot_release/worker.rb
new file mode 100644
index 000000000..9c16a587e
--- /dev/null
+++ b/sdks/ruby/examples/manual_slot_release/worker.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > SlotRelease
+SLOT_RELEASE_WORKFLOW = HATCHET.workflow(name: "SlotReleaseWorkflow")
+
+SLOT_RELEASE_WORKFLOW.task(:step1) do |input, ctx|
+ puts "RESOURCE INTENSIVE PROCESS"
+ sleep 10
+
+ # Release the slot after the resource-intensive process, so that other steps can run
+ ctx.release_slot
+
+ puts "NON RESOURCE INTENSIVE PROCESS"
+ { "status" => "success" }
+end
+
+# !!
diff --git a/sdks/ruby/examples/non_retryable/test_no_retry_spec.rb b/sdks/ruby/examples/non_retryable/test_no_retry_spec.rb
new file mode 100644
index 000000000..f47917550
--- /dev/null
+++ b/sdks/ruby/examples/non_retryable/test_no_retry_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "NonRetryableWorkflow" do
+ it "does not retry non-retryable exceptions" do
+ ref = NON_RETRYABLE_WORKFLOW.run_no_wait
+
+ expect { ref.result }.to raise_error(Hatchet::FailedRunError)
+
+ # Poll until all task events have been recorded (replaces fixed sleep 3)
+ run_details = nil
+ 30.times do
+ run_details = HATCHET.runs.get_details(ref.workflow_run_id)
+ failed_events = run_details.task_events.select { |e| e.event_type == "FAILED" }
+ break if failed_events.length >= 3
+
+ sleep 0.5
+ end
+
+ # Only the task with the wrong exception type should have retrying events
+ retrying_events = run_details.task_events.select { |e| e.event_type == "RETRYING" }
+ expect(retrying_events.length).to eq(1)
+
+ # Three failed events: two failing initial runs + one retry failure
+ failed_events = run_details.task_events.select { |e| e.event_type == "FAILED" }
+ expect(failed_events.length).to eq(3)
+ end
+end
diff --git a/sdks/ruby/examples/non_retryable/worker.rb b/sdks/ruby/examples/non_retryable/worker.rb
new file mode 100644
index 000000000..9accea0ab
--- /dev/null
+++ b/sdks/ruby/examples/non_retryable/worker.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+NON_RETRYABLE_WORKFLOW = HATCHET.workflow(name: "NonRetryableWorkflow")
+
+# > Non-retryable task
+NON_RETRYABLE_WORKFLOW.task(:should_not_retry, retries: 1) do |input, ctx|
+ raise Hatchet::NonRetryableError, "This task should not retry"
+end
+
+NON_RETRYABLE_WORKFLOW.task(:should_retry_wrong_exception_type, retries: 1) do |input, ctx|
+ raise TypeError, "This task should retry because it's not a NonRetryableError"
+end
+
+NON_RETRYABLE_WORKFLOW.task(:should_not_retry_successful_task, retries: 1) do |input, ctx|
+ # no-op
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("non-retry-worker", workflows: [NON_RETRYABLE_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/on_failure/test_on_failure_spec.rb b/sdks/ruby/examples/on_failure/test_on_failure_spec.rb
new file mode 100644
index 000000000..daaf92503
--- /dev/null
+++ b/sdks/ruby/examples/on_failure/test_on_failure_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "OnFailureWorkflow" do
+ it "runs the on_failure task after workflow failure" do
+ ref = ON_FAILURE_WF.run_no_wait
+
+ expect { ref.result }.to raise_error(/step1 failed/)
+
+ # Poll until both tasks are in a terminal state (replaces fixed sleep 5)
+ details = nil
+ 30.times do
+ details = HATCHET.runs.get_details(ref.workflow_run_id)
+ break if details.tasks.length >= 2 && details.tasks.all? { |t| %w[COMPLETED FAILED].include?(t.status) }
+
+ sleep 0.5
+ end
+
+ expect(details.tasks.length).to eq(2)
+
+ completed_count = details.tasks.count { |t| t.status == "COMPLETED" }
+ failed_count = details.tasks.count { |t| t.status == "FAILED" }
+
+ expect(completed_count).to eq(1)
+ expect(failed_count).to eq(1)
+
+ completed_task = details.tasks.find { |t| t.status == "COMPLETED" }
+ failed_task = details.tasks.find { |t| t.status == "FAILED" }
+
+ expect(completed_task.display_name).to include("on_failure")
+ expect(failed_task.display_name).to include("step1")
+ end
+end
diff --git a/sdks/ruby/examples/on_failure/worker.rb b/sdks/ruby/examples/on_failure/worker.rb
new file mode 100644
index 000000000..2912dc533
--- /dev/null
+++ b/sdks/ruby/examples/on_failure/worker.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+ERROR_TEXT = "step1 failed"
+
+# > OnFailure Step
+# This workflow will fail because the step will throw an error
+# we define an onFailure step to handle this case
+
+ON_FAILURE_WF = HATCHET.workflow(name: "OnFailureWorkflow")
+
+ON_FAILURE_WF.task(:step1, execution_timeout: 1) do |input, ctx|
+ # This step will always raise an exception
+ raise ERROR_TEXT
+end
+
+# After the workflow fails, this special step will run
+ON_FAILURE_WF.on_failure_task do |input, ctx|
+ # We can do things like perform cleanup logic
+ # or notify a user here
+
+ # Fetch the errors from upstream step runs from the context
+ puts ctx.task_run_errors.inspect
+
+ { "status" => "success" }
+end
+
+# !!
+
+# > OnFailure With Details
+# We can access the failure details in the onFailure step
+# via the context method
+
+ON_FAILURE_WF_WITH_DETAILS = HATCHET.workflow(name: "OnFailureWorkflowWithDetails")
+
+DETAILS_STEP1 = ON_FAILURE_WF_WITH_DETAILS.task(:details_step1, execution_timeout: 1) do |input, ctx|
+ raise ERROR_TEXT
+end
+
+# After the workflow fails, this special step will run
+ON_FAILURE_WF_WITH_DETAILS.on_failure_task do |input, ctx|
+ error = ctx.get_task_run_error(DETAILS_STEP1)
+
+ unless error
+ next { "status" => "unexpected success" }
+ end
+
+ # We can access the failure details here
+ raise "Expected Hatchet::TaskRunError" unless error.is_a?(Hatchet::TaskRunError)
+
+ if error.message.include?("step1 failed")
+ next {
+ "status" => "success",
+ "failed_run_external_id" => error.task_run_external_id
+ }
+ end
+
+ raise "unexpected failure"
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "on-failure-worker",
+ slots: 4,
+ workflows: [ON_FAILURE_WF, ON_FAILURE_WF_WITH_DETAILS]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/on_success/worker.rb b/sdks/ruby/examples/on_success/worker.rb
new file mode 100644
index 000000000..3daa05ae7
--- /dev/null
+++ b/sdks/ruby/examples/on_success/worker.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+ON_SUCCESS_WORKFLOW = HATCHET.workflow(name: "OnSuccessWorkflow")
+
+FIRST_TASK = ON_SUCCESS_WORKFLOW.task(:first_task) do |input, ctx|
+ puts "First task completed successfully"
+end
+
+SECOND_TASK = ON_SUCCESS_WORKFLOW.task(:second_task, parents: [FIRST_TASK]) do |input, ctx|
+ puts "Second task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.task(:third_task, parents: [FIRST_TASK, SECOND_TASK]) do |input, ctx|
+ puts "Third task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.task(:fourth_task) do |input, ctx|
+ puts "Fourth task completed successfully"
+end
+
+ON_SUCCESS_WORKFLOW.on_success_task do |input, ctx|
+ puts "On success task completed successfully"
+end
+
+def main
+ worker = HATCHET.worker("on-success-worker", workflows: [ON_SUCCESS_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/priority/test_priority_spec.rb b/sdks/ruby/examples/priority/test_priority_spec.rb
new file mode 100644
index 000000000..2d3e59592
--- /dev/null
+++ b/sdks/ruby/examples/priority/test_priority_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "PriorityWorkflow" do
+ def priority_to_int(priority)
+ case priority
+ when "high" then 3
+ when "medium" then 2
+ when "low" then 1
+ when "default" then DEFAULT_PRIORITY
+ else raise "Invalid priority: #{priority}"
+ end
+ end
+
+ it "executes runs in priority order" do
+ test_run_id = SecureRandom.uuid
+ choices = %w[low medium high default]
+ n = 30
+
+ run_refs = PRIORITY_WORKFLOW.run_many_no_wait(
+ n.times.map do |ix|
+ priority = choices.sample
+ PRIORITY_WORKFLOW.create_bulk_run_item(
+ options: Hatchet::TriggerWorkflowOptions.new(
+ priority: priority_to_int(priority),
+ additional_metadata: {
+ "priority" => priority,
+ "key" => ix,
+ "test_run_id" => test_run_id
+ }
+ )
+ )
+ end
+ )
+
+ # Wait for all runs to complete
+ run_refs.each(&:result)
+
+ workflows = HATCHET.workflows.list(workflow_name: PRIORITY_WORKFLOW.name)
+ expect(workflows.rows).not_to be_empty
+
+ workflow = workflows.rows.find { |w| w.name == hatchet.config.apply_namespace(PRIORITY_WORKFLOW.name) }
+ expect(workflow).not_to be_nil
+
+ runs = HATCHET.runs.list(
+ workflow_ids: [workflow.metadata.id],
+ additional_metadata: { "test_run_id" => test_run_id },
+ limit: 1000
+ )
+
+ sorted_runs = runs.rows.select(&:started_at).sort_by(&:started_at)
+ expect(sorted_runs.length).to eq(n)
+
+ sorted_runs.each_cons(2) do |curr, nxt|
+ curr_priority = (curr.additional_metadata || {})["priority"] || "low"
+ nxt_priority = (nxt.additional_metadata || {})["priority"] || "low"
+
+ # Run start times should be in order of priority
+ expect(priority_to_int(curr_priority)).to be >= priority_to_int(nxt_priority)
+ end
+ end
+end
diff --git a/sdks/ruby/examples/priority/trigger.rb b/sdks/ruby/examples/priority/trigger.rb
new file mode 100644
index 000000000..90bacf20f
--- /dev/null
+++ b/sdks/ruby/examples/priority/trigger.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Runtime priority
+low_prio = PRIORITY_WORKFLOW.run_no_wait(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ priority: 1,
+ additional_metadata: { "priority" => "low", "key" => 1 }
+ )
+)
+
+high_prio = PRIORITY_WORKFLOW.run_no_wait(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ priority: 3,
+ additional_metadata: { "priority" => "high", "key" => 1 }
+ )
+)
+# !!
+
+# > Scheduled priority
+schedule = PRIORITY_WORKFLOW.schedule(
+ Time.now + 60,
+ options: Hatchet::TriggerWorkflowOptions.new(priority: 3)
+)
+
+cron = PRIORITY_WORKFLOW.create_cron(
+ "my-scheduled-cron",
+ "0 * * * *",
+ input: {},
+)
+# !!
diff --git a/sdks/ruby/examples/priority/worker.rb b/sdks/ruby/examples/priority/worker.rb
new file mode 100644
index 000000000..20c44f19a
--- /dev/null
+++ b/sdks/ruby/examples/priority/worker.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Default priority
+DEFAULT_PRIORITY = 1
+SLEEP_TIME = 0.25
+
+PRIORITY_WORKFLOW = HATCHET.workflow(
+ name: "PriorityWorkflow",
+ default_priority: DEFAULT_PRIORITY
+)
+
+PRIORITY_WORKFLOW.task(:priority_task) do |input, ctx|
+ puts "Priority: #{ctx.priority}"
+ sleep SLEEP_TIME
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "priority-worker",
+ slots: 1,
+ workflows: [PRIORITY_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/quickstart/run.rb b/sdks/ruby/examples/quickstart/run.rb
new file mode 100644
index 000000000..8b72cb8d3
--- /dev/null
+++ b/sdks/ruby/examples/quickstart/run.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require_relative "workflows/first_task"
+
+# > Run a task
+result = FIRST_TASK.run({ "message" => "Hello World!" })
+puts "Finished running task: #{result['transformed_message']}"
+# !!
diff --git a/sdks/ruby/examples/quickstart/workflows/first_task.rb b/sdks/ruby/examples/quickstart/workflows/first_task.rb
new file mode 100644
index 000000000..15c8a8444
--- /dev/null
+++ b/sdks/ruby/examples/quickstart/workflows/first_task.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Simple task
+FIRST_TASK = HATCHET.task(name: "first-task") do |input, ctx|
+ puts "first-task called"
+ { "transformed_message" => input["message"].downcase }
+end
+# !!
diff --git a/sdks/ruby/examples/rate_limit/worker.rb b/sdks/ruby/examples/rate_limit/worker.rb
new file mode 100644
index 000000000..62932b0a2
--- /dev/null
+++ b/sdks/ruby/examples/rate_limit/worker.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > Workflow
+RATE_LIMIT_WORKFLOW = HATCHET.workflow(name: "RateLimitWorkflow")
+
+# !!
+
+# > Static
+RATE_LIMIT_KEY = "test-limit"
+
+RATE_LIMIT_WORKFLOW.task(
+ :step_1,
+ rate_limits: [Hatchet::RateLimit.new(static_key: RATE_LIMIT_KEY, units: 1)]
+) do |input, ctx|
+ puts "executed step_1"
+end
+
+# !!
+
+# > Dynamic
+RATE_LIMIT_WORKFLOW.task(
+ :step_2,
+ rate_limits: [
+ Hatchet::RateLimit.new(
+ dynamic_key: "input.user_id",
+ units: 1,
+ limit: 10,
+ duration: :minute
+ )
+ ]
+) do |input, ctx|
+ puts "executed step_2"
+end
+
+# !!
+
+# > Create a rate limit
+def main
+ HATCHET.rate_limits.put(RATE_LIMIT_KEY, 2, :second)
+
+ worker = HATCHET.worker(
+ "rate-limit-worker", slots: 10, workflows: [RATE_LIMIT_WORKFLOW]
+ )
+ worker.start
+end
+
+# !!
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/retries/worker.rb b/sdks/ruby/examples/retries/worker.rb
new file mode 100644
index 000000000..8152cd6ce
--- /dev/null
+++ b/sdks/ruby/examples/retries/worker.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SIMPLE_RETRY_WORKFLOW = HATCHET.workflow(name: "SimpleRetryWorkflow")
+BACKOFF_WORKFLOW = HATCHET.workflow(name: "BackoffWorkflow")
+
+# > Simple Step Retries
+SIMPLE_RETRY_WORKFLOW.task(:always_fail, retries: 3) do |input, ctx|
+ raise "simple task failed"
+end
+
+# !!
+
+# > Retries with Count
+SIMPLE_RETRY_WORKFLOW.task(:fail_twice, retries: 3) do |input, ctx|
+ raise "simple task failed" if ctx.retry_count < 2
+
+ { "status" => "success" }
+end
+
+# !!
+
+# > Retries with Backoff
+BACKOFF_WORKFLOW.task(
+ :backoff_task,
+ retries: 10,
+ # Maximum number of seconds to wait between retries
+ backoff_max_seconds: 10,
+ # Factor to increase the wait time between retries.
+ # This sequence will be 2s, 4s, 8s, 10s, 10s, 10s... due to the maxSeconds limit
+ backoff_factor: 2.0
+) do |input, ctx|
+ raise "backoff task failed" if ctx.retry_count < 3
+
+ { "status" => "success" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("backoff-worker", slots: 4, workflows: [BACKOFF_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/return_exceptions/test_return_exceptions_spec.rb b/sdks/ruby/examples/return_exceptions/test_return_exceptions_spec.rb
new file mode 100644
index 000000000..a636fbc87
--- /dev/null
+++ b/sdks/ruby/examples/return_exceptions/test_return_exceptions_spec.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "ReturnExceptionsTask" do
+ it "returns exceptions for failed tasks and results for successful ones" do
+ results = RETURN_EXCEPTIONS_TASK.run_many(
+ 10.times.map do |i|
+ RETURN_EXCEPTIONS_TASK.create_bulk_run_item(
+ input: { "index" => i }
+ )
+ end,
+ return_exceptions: true
+ )
+
+ results.each_with_index do |result, i|
+ if i.even?
+ expect(result).to be_a(Exception)
+ expect(result.message).to include("error in task with index #{i}")
+ else
+ expect(result).to eq({ "message" => "this is a successful task." })
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/examples/return_exceptions/worker.rb b/sdks/ruby/examples/return_exceptions/worker.rb
new file mode 100644
index 000000000..6039a1915
--- /dev/null
+++ b/sdks/ruby/examples/return_exceptions/worker.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+RETURN_EXCEPTIONS_TASK = HATCHET.task(name: "return_exceptions_task") do |input, ctx|
+ if input["index"].to_i.even?
+ raise "error in task with index #{input['index']}"
+ end
+
+ { "message" => "this is a successful task." }
+end
diff --git a/sdks/ruby/examples/run_details/worker.rb b/sdks/ruby/examples/run_details/worker.rb
new file mode 100644
index 000000000..952cfccde
--- /dev/null
+++ b/sdks/ruby/examples/run_details/worker.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+RUN_DETAIL_TEST_WORKFLOW = HATCHET.workflow(name: "RunDetailTest")
+
+DETAIL_STEP1 = RUN_DETAIL_TEST_WORKFLOW.task(:step1) do |input, ctx|
+ { "random_number" => rand(1..100) }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:cancel_step) do |input, ctx|
+ ctx.cancel
+ 10.times { sleep 1 }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:fail_step) do |input, ctx|
+ raise "Intentional Failure"
+end
+
+DETAIL_STEP2 = RUN_DETAIL_TEST_WORKFLOW.task(:step2) do |input, ctx|
+ sleep 5
+ { "random_number" => rand(1..100) }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:step3, parents: [DETAIL_STEP1, DETAIL_STEP2]) do |input, ctx|
+ one = ctx.task_output(DETAIL_STEP1)["random_number"]
+ two = ctx.task_output(DETAIL_STEP2)["random_number"]
+
+ { "sum" => one + two }
+end
+
+RUN_DETAIL_TEST_WORKFLOW.task(:step4, parents: [DETAIL_STEP1, :step3]) do |input, ctx|
+ puts(
+ "executed step4",
+ Time.now.strftime("%H:%M:%S"),
+ input.inspect,
+ ctx.task_output(DETAIL_STEP1).inspect,
+ ctx.task_output(:step3).inspect
+ )
+
+ { "step4" => "step4" }
+end
+
+def main
+ worker = HATCHET.worker("run-detail-worker", workflows: [RUN_DETAIL_TEST_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/scheduled/programatic_sync.rb b/sdks/ruby/examples/scheduled/programatic_sync.rb
new file mode 100644
index 000000000..cd799e019
--- /dev/null
+++ b/sdks/ruby/examples/scheduled/programatic_sync.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+# > Create
+scheduled_run = hatchet.scheduled.create(
+ workflow_name: "simple-workflow",
+ trigger_at: Time.now + 10,
+ input: { "data" => "simple-workflow-data" },
+ additional_metadata: { "customer_id" => "customer-a" }
+)
+
+id = scheduled_run.metadata.id
+# !!
+
+# > Reschedule
+hatchet.scheduled.update(
+ scheduled_run.metadata.id,
+ trigger_at: Time.now + 3600
+)
+# !!
+
+# > Delete
+hatchet.scheduled.delete(scheduled_run.metadata.id)
+# !!
+
+# > List
+scheduled_runs = hatchet.scheduled.list
+# !!
+
+# > Bulk delete
+hatchet.scheduled.bulk_delete(scheduled_ids: [id])
+# !!
+
+# > Bulk reschedule
+hatchet.scheduled.bulk_update(
+ [[id, Time.now + 7200]]
+)
+# !!
diff --git a/sdks/ruby/examples/scheduled/worker.rb b/sdks/ruby/examples/scheduled/worker.rb
new file mode 100644
index 000000000..448e84bec
--- /dev/null
+++ b/sdks/ruby/examples/scheduled/worker.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SCHEDULED_WORKFLOW = HATCHET.workflow(name: "ScheduledWorkflow")
+
+SCHEDULED_WORKFLOW.task(:scheduled_task) do |input, ctx|
+ puts "Scheduled task executed at #{Time.now}"
+ { "status" => "success" }
+end
+
+# > Programmatic Schedule
+def schedule_workflow
+ future_time = Time.now + 60 # 1 minute from now
+ SCHEDULED_WORKFLOW.schedule(future_time, input: { "message" => "scheduled run" })
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("scheduled-worker", workflows: [SCHEDULED_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/serde/test_serde_spec.rb b/sdks/ruby/examples/serde/test_serde_spec.rb
new file mode 100644
index 000000000..d601ee024
--- /dev/null
+++ b/sdks/ruby/examples/serde/test_serde_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "SerdeWorkflow" do
+ it "compresses and decompresses via custom serde" do
+ result = SERDE_WORKFLOW.run
+
+ # The generate_result output should be compressed (not equal to the raw value)
+ expect(result["generate_result"]["result"]).not_to eq("my_result")
+
+ # The read_result step should decompress and return the original value
+ expect(result["read_result"]["final_result"]).to eq("my_result")
+ end
+end
diff --git a/sdks/ruby/examples/serde/worker.rb b/sdks/ruby/examples/serde/worker.rb
new file mode 100644
index 000000000..ee21b8298
--- /dev/null
+++ b/sdks/ruby/examples/serde/worker.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+# > Custom Serialization/Deserialization
+
+require "hatchet-sdk"
+require "base64"
+require "zlib"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SERDE_WORKFLOW = HATCHET.workflow(name: "serde-example-workflow")
+
+GENERATE_RESULT = SERDE_WORKFLOW.task(:generate_result) do |input, ctx|
+ compressed = Base64.strict_encode64(Zlib::Deflate.deflate("my_result"))
+ { "result" => compressed }
+end
+
+SERDE_WORKFLOW.task(:read_result, parents: [GENERATE_RESULT]) do |input, ctx|
+ encoded = ctx.task_output(GENERATE_RESULT)["result"]
+ decoded = Zlib::Inflate.inflate(Base64.strict_decode64(encoded))
+ { "final_result" => decoded }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SERDE_WORKFLOW])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/simple/schedule.rb b/sdks/ruby/examples/simple/schedule.rb
new file mode 100644
index 000000000..52d1c339c
--- /dev/null
+++ b/sdks/ruby/examples/simple/schedule.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Schedule a task
+schedule = SIMPLE.schedule(Time.now + 86_400, input: { "message" => "Hello, World!" })
+
+## do something with the id
+puts schedule.metadata.id
+# !!
diff --git a/sdks/ruby/examples/simple/test_simple_spec.rb b/sdks/ruby/examples/simple/test_simple_spec.rb
new file mode 100644
index 000000000..dabfc5b65
--- /dev/null
+++ b/sdks/ruby/examples/simple/test_simple_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "SimpleWorkflow" do
+ it "runs simple task via run" do
+ result = SIMPLE.run
+ expect(result).to eq({ "result" => "Hello, world!" })
+ end
+
+ it "runs simple task via run_no_wait" do
+ ref = SIMPLE.run_no_wait
+ result = ref.result
+ expect(result).to eq({ "result" => "Hello, world!" })
+ end
+
+ it "runs simple task via run_many" do
+ results = SIMPLE.run_many([SIMPLE.create_bulk_run_item])
+ expect(results.first).to eq({ "result" => "Hello, world!" })
+ end
+
+ it "runs simple durable task" do
+ result = SIMPLE_DURABLE.run
+ expect(result).to eq({ "result" => "Hello, world!" })
+ end
+end
diff --git a/sdks/ruby/examples/simple/trigger.rb b/sdks/ruby/examples/simple/trigger.rb
new file mode 100644
index 000000000..0a38e847e
--- /dev/null
+++ b/sdks/ruby/examples/simple/trigger.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Run a task
+result = SIMPLE.run({ "message" => "Hello, World!" })
+puts result
+# !!
diff --git a/sdks/ruby/examples/simple/trigger_with_metadata.rb b/sdks/ruby/examples/simple/trigger_with_metadata.rb
new file mode 100644
index 000000000..562a0af20
--- /dev/null
+++ b/sdks/ruby/examples/simple/trigger_with_metadata.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Trigger with metadata
+SIMPLE.run(
+ {},
+ options: Hatchet::TriggerWorkflowOptions.new(
+ additional_metadata: { "source" => "api" }
+ )
+)
+# !!
diff --git a/sdks/ruby/examples/simple/worker.rb b/sdks/ruby/examples/simple/worker.rb
new file mode 100644
index 000000000..e2b3e9684
--- /dev/null
+++ b/sdks/ruby/examples/simple/worker.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+# > Simple
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+SIMPLE = HATCHET.task(name: "simple") do |input, ctx|
+ { "result" => "Hello, world!" }
+end
+
+SIMPLE_DURABLE = HATCHET.durable_task(name: "simple_durable") do |input, ctx|
+ result = SIMPLE.run(input)
+ { "result" => result["result"] }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [SIMPLE, SIMPLE_DURABLE])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/simple/workflow.rb b/sdks/ruby/examples/simple/workflow.rb
new file mode 100644
index 000000000..5eeb0f348
--- /dev/null
+++ b/sdks/ruby/examples/simple/workflow.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# > Define a workflow
+EXAMPLE_WORKFLOW = HATCHET.workflow(name: "example-workflow")
+
+# !!
diff --git a/sdks/ruby/examples/spec_helper.rb b/sdks/ruby/examples/spec_helper.rb
new file mode 100644
index 000000000..d67386a33
--- /dev/null
+++ b/sdks/ruby/examples/spec_helper.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+require_relative "worker_fixture"
+
+RSpec.configure do |config|
+ config.expect_with :rspec do |expectations|
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
+ end
+
+ config.mock_with :rspec do |mocks|
+ mocks.verify_partial_doubles = true
+ end
+
+ config.shared_context_metadata_behavior = :apply_to_host_groups
+ config.filter_run_when_matching :focus
+ config.order = :random
+
+ # Session-scoped Hatchet client
+ config.add_setting :hatchet_client
+ config.before(:suite) do
+ RSpec.configuration.hatchet_client = Hatchet::Client.new(debug: true)
+ end
+end
+
+# Helper to access the shared Hatchet client in tests
+def hatchet
+ RSpec.configuration.hatchet_client
+end
diff --git a/sdks/ruby/examples/sticky_workers/worker.rb b/sdks/ruby/examples/sticky_workers/worker.rb
new file mode 100644
index 000000000..000be2ad6
--- /dev/null
+++ b/sdks/ruby/examples/sticky_workers/worker.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > StickyWorker
+STICKY_WORKFLOW = HATCHET.workflow(
+ name: "StickyWorkflow",
+ # Specify a sticky strategy when declaring the workflow
+ sticky: :soft
+)
+
+STEP1A = STICKY_WORKFLOW.task(:step1a) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+STEP1B = STICKY_WORKFLOW.task(:step1b) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+# !!
+
+# > StickyChild
+STICKY_CHILD_WORKFLOW = HATCHET.workflow(
+ name: "StickyChildWorkflow",
+ sticky: :soft
+)
+
+STICKY_WORKFLOW.task(:step2, parents: [STEP1A, STEP1B]) do |input, ctx|
+ ref = STICKY_CHILD_WORKFLOW.run_no_wait(
+ options: Hatchet::TriggerWorkflowOptions.new(sticky: true)
+ )
+
+ ref.result
+
+ { "worker" => ctx.worker.id }
+end
+
+STICKY_CHILD_WORKFLOW.task(:child) do |input, ctx|
+ { "worker" => ctx.worker.id }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "sticky-worker", slots: 10, workflows: [STICKY_WORKFLOW, STICKY_CHILD_WORKFLOW]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/streaming/async_stream.rb b/sdks/ruby/examples/streaming/async_stream.rb
new file mode 100644
index 000000000..7723e504f
--- /dev/null
+++ b/sdks/ruby/examples/streaming/async_stream.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require_relative "worker"
+
+# > Consume
+ref = STREAM_TASK.run_no_wait
+
+HATCHET.runs.subscribe_to_stream(ref.workflow_run_id) do |chunk|
+ print chunk
+end
+# !!
diff --git a/sdks/ruby/examples/streaming/test_streaming_spec.rb b/sdks/ruby/examples/streaming/test_streaming_spec.rb
new file mode 100644
index 000000000..2eb5c73f2
--- /dev/null
+++ b/sdks/ruby/examples/streaming/test_streaming_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "StreamTask" do
+ it "streams chunks in order and completely" do
+ ref = STREAM_TASK.run_no_wait
+
+ received_chunks = []
+ HATCHET.runs.subscribe_to_stream(ref.workflow_run_id) do |chunk|
+ received_chunks << chunk
+ end
+
+ ref.result
+
+ expect(received_chunks.length).to eq(STREAM_CHUNKS.length)
+
+ received_chunks.each_with_index do |chunk, ix|
+ expect(chunk).to eq(STREAM_CHUNKS[ix])
+ end
+
+ expect(received_chunks.join).to eq(STREAM_CHUNKS.join)
+ end
+end
diff --git a/sdks/ruby/examples/streaming/worker.rb b/sdks/ruby/examples/streaming/worker.rb
new file mode 100644
index 000000000..59c89e336
--- /dev/null
+++ b/sdks/ruby/examples/streaming/worker.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: false) unless defined?(HATCHET)
+
+# > Streaming
+ANNA_KARENINA = <<~TEXT
+ Happy families are all alike; every unhappy family is unhappy in its own way.
+
+ Everything was in confusion in the Oblonskys' house. The wife had discovered that the husband was carrying on an intrigue with a French girl, who had been a governess in their family, and she had announced to her husband that she could not go on living in the same house with him.
+TEXT
+
+STREAM_CHUNKS = ANNA_KARENINA.scan(/.{1,10}/)
+
+STREAM_TASK = HATCHET.task(name: "stream_task") do |input, ctx|
+ # Sleeping to avoid race conditions
+ sleep 2
+
+ STREAM_CHUNKS.each do |chunk|
+ ctx.put_stream(chunk)
+ sleep 0.20
+ end
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("test-worker", workflows: [STREAM_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/timeout/test_timeout_spec.rb b/sdks/ruby/examples/timeout/test_timeout_spec.rb
new file mode 100644
index 000000000..bbf7f14b6
--- /dev/null
+++ b/sdks/ruby/examples/timeout/test_timeout_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "TimeoutWorkflow" do
+ it "times out on execution timeout" do
+ ref = TIMEOUT_WF.run_no_wait
+
+ expect { ref.result }.to raise_error(
+ /Task exceeded timeout|TIMED_OUT|Workflow run .* failed with multiple errors/
+ )
+ end
+
+ it "succeeds when timeout is refreshed" do
+ result = REFRESH_TIMEOUT_WF.run
+
+ expect(result["refresh_task"]["status"]).to eq("success")
+ end
+end
diff --git a/sdks/ruby/examples/timeout/worker.rb b/sdks/ruby/examples/timeout/worker.rb
new file mode 100644
index 000000000..ce2e98068
--- /dev/null
+++ b/sdks/ruby/examples/timeout/worker.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+# > ScheduleTimeout
+TIMEOUT_WF = HATCHET.workflow(
+ name: "TimeoutWorkflow",
+ task_defaults: { execution_timeout: 120 } # 2 minutes
+)
+
+# !!
+
+# > ExecutionTimeout
+# Specify an execution timeout on a task
+TIMEOUT_WF.task(:timeout_task, execution_timeout: 5, schedule_timeout: 600) do |input, ctx|
+ sleep 30
+ { "status" => "success" }
+end
+
+REFRESH_TIMEOUT_WF = HATCHET.workflow(name: "RefreshTimeoutWorkflow")
+
+# !!
+
+# > RefreshTimeout
+REFRESH_TIMEOUT_WF.task(:refresh_task, execution_timeout: 4) do |input, ctx|
+ ctx.refresh_timeout(10)
+ sleep 5
+
+ { "status" => "success" }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker(
+ "timeout-worker", slots: 4, workflows: [TIMEOUT_WF, REFRESH_TIMEOUT_WF]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/trigger_methods/workflow.rb b/sdks/ruby/examples/trigger_methods/workflow.rb
new file mode 100644
index 000000000..233fbc403
--- /dev/null
+++ b/sdks/ruby/examples/trigger_methods/workflow.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+hatchet = Hatchet::Client.new
+
+# > Define a task
+SAY_HELLO = hatchet.task(name: "say_hello") do |input, ctx|
+ { "greeting" => "Hello, #{input['name']}!" }
+end
+# !!
+
+# > Sync
+ref = SAY_HELLO.run_no_wait({ "name" => "World" })
+# !!
+
+# > Async
+# In Ruby, run_no_wait is the equivalent of async enqueuing
+ref = SAY_HELLO.run_no_wait({ "name" => "World" })
+# !!
+
+# > Result sync
+result = ref.result
+# !!
+
+# > Result async
+# In Ruby, result is synchronous - use poll for async-like behavior
+result = ref.result
+# !!
diff --git a/sdks/ruby/examples/unit_testing/test_unit_spec.rb b/sdks/ruby/examples/unit_testing/test_unit_spec.rb
new file mode 100644
index 000000000..276d8c3ef
--- /dev/null
+++ b/sdks/ruby/examples/unit_testing/test_unit_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "UnitTesting" do
+ let(:input) { { "key" => "test_key", "number" => 42 } }
+ let(:additional_metadata) { { "meta_key" => "meta_value" } }
+ let(:retry_count) { 1 }
+
+ let(:expected_output) do
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => additional_metadata,
+ "retry_count" => retry_count
+ }
+ end
+
+ [
+ :SYNC_STANDALONE,
+ :DURABLE_SYNC_STANDALONE
+ ].each do |const|
+ it "unit tests #{const}" do
+ task = Object.const_get(const)
+ result = task.mock_run(
+ input: input,
+ additional_metadata: additional_metadata,
+ retry_count: retry_count
+ )
+
+ expect(result).to eq(expected_output)
+ end
+ end
+
+ it "unit tests complex workflow with parent outputs" do
+ task = COMPLEX_UNIT_TEST_WORKFLOW
+ parent_output = expected_output
+
+ result = task.tasks[:sync_complex_workflow].mock_run(
+ input: input,
+ additional_metadata: additional_metadata,
+ retry_count: retry_count,
+ parent_outputs: { "start" => parent_output }
+ )
+
+ expect(result).to eq(parent_output)
+ end
+end
diff --git a/sdks/ruby/examples/unit_testing/worker.rb b/sdks/ruby/examples/unit_testing/worker.rb
new file mode 100644
index 000000000..19b94a760
--- /dev/null
+++ b/sdks/ruby/examples/unit_testing/worker.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new unless defined?(HATCHET)
+
+# Unit test workflow definitions
+SYNC_STANDALONE = HATCHET.task(name: "sync_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+ASYNC_STANDALONE = HATCHET.task(name: "async_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+DURABLE_SYNC_STANDALONE = HATCHET.durable_task(name: "durable_sync_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+DURABLE_ASYNC_STANDALONE = HATCHET.durable_task(name: "durable_async_standalone") do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+SIMPLE_UNIT_TEST_WORKFLOW = HATCHET.workflow(name: "simple-unit-test-workflow")
+
+SIMPLE_UNIT_TEST_WORKFLOW.task(:sync_simple_workflow) do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+COMPLEX_UNIT_TEST_WORKFLOW = HATCHET.workflow(name: "complex-unit-test-workflow")
+
+UNIT_START = COMPLEX_UNIT_TEST_WORKFLOW.task(:start) do |input, ctx|
+ {
+ "key" => input["key"],
+ "number" => input["number"],
+ "additional_metadata" => ctx.additional_metadata,
+ "retry_count" => ctx.retry_count
+ }
+end
+
+COMPLEX_UNIT_TEST_WORKFLOW.task(:sync_complex_workflow, parents: [UNIT_START]) do |input, ctx|
+ ctx.task_output(UNIT_START)
+end
diff --git a/sdks/ruby/examples/webhook_with_scope/test_webhooks_with_scope_spec.rb b/sdks/ruby/examples/webhook_with_scope/test_webhooks_with_scope_spec.rb
new file mode 100644
index 000000000..d2a20e4f1
--- /dev/null
+++ b/sdks/ruby/examples/webhook_with_scope/test_webhooks_with_scope_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require "net/http"
+require "json"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "WebhookWithScope" do
+ TEST_BASIC_USERNAME = "test_user" unless defined?(TEST_BASIC_USERNAME)
+ TEST_BASIC_PASSWORD = "test_password" unless defined?(TEST_BASIC_PASSWORD)
+
+ def send_webhook_request(url, body, username: TEST_BASIC_USERNAME, password: TEST_BASIC_PASSWORD)
+ uri = URI(url)
+ request = Net::HTTP::Post.new(uri)
+ request.basic_auth(username, password)
+ request.content_type = "application/json"
+ request.body = body.to_json
+
+ Net::HTTP.start(uri.hostname, uri.port) do |http|
+ http.request(request)
+ end
+ end
+
+ it "routes webhooks based on scope expression from payload" do
+ skip "Requires webhook infrastructure to be running"
+
+ test_run_id = SecureRandom.uuid
+ test_start = Time.now.utc
+
+ # Create webhook with scope expression, send scoped request, verify routing
+ # Full implementation depends on the webhook API being available
+ end
+
+ it "applies static payload to webhook events" do
+ skip "Requires webhook infrastructure to be running"
+
+ test_run_id = SecureRandom.uuid
+ test_start = Time.now.utc
+
+ # Create webhook with static payload, send request, verify merged payload
+ end
+end
diff --git a/sdks/ruby/examples/webhook_with_scope/worker.rb b/sdks/ruby/examples/webhook_with_scope/worker.rb
new file mode 100644
index 000000000..ce12a2ea5
--- /dev/null
+++ b/sdks/ruby/examples/webhook_with_scope/worker.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+WEBHOOK_WITH_SCOPE = HATCHET.task(
+ name: "webhook_with_scope",
+ on_events: ["webhook-scope:test"],
+ default_filters: [
+ Hatchet::DefaultFilter.new(
+ expression: "true",
+ scope: "test-scope-value",
+ payload: {}
+ )
+ ]
+) do |input, ctx|
+ input
+end
+
+WEBHOOK_WITH_STATIC_PAYLOAD = HATCHET.task(
+ name: "webhook_with_static_payload",
+ on_events: ["webhook-static:test"]
+) do |input, ctx|
+ input
+end
+
+def main
+ worker = HATCHET.worker(
+ "webhook-scope-worker",
+ workflows: [WEBHOOK_WITH_SCOPE, WEBHOOK_WITH_STATIC_PAYLOAD]
+ )
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/webhooks/test_webhooks_spec.rb b/sdks/ruby/examples/webhooks/test_webhooks_spec.rb
new file mode 100644
index 000000000..2095174b9
--- /dev/null
+++ b/sdks/ruby/examples/webhooks/test_webhooks_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require "securerandom"
+require "net/http"
+require "json"
+require_relative "../spec_helper"
+require_relative "worker"
+
+RSpec.describe "WebhookWorkflow" do
+ TEST_BASIC_USERNAME = "test_user" unless defined?(TEST_BASIC_USERNAME)
+ TEST_BASIC_PASSWORD = "test_password" unless defined?(TEST_BASIC_PASSWORD)
+
+ def send_webhook_request(url, body, username: TEST_BASIC_USERNAME, password: TEST_BASIC_PASSWORD)
+ uri = URI(url)
+ request = Net::HTTP::Post.new(uri)
+ request.basic_auth(username, password)
+ request.content_type = "application/json"
+ request.body = body.to_json
+
+ Net::HTTP.start(uri.hostname, uri.port) do |http|
+ http.request(request)
+ end
+ end
+
+ it "creates a webhook and processes incoming requests" do
+ skip "Requires webhook infrastructure to be running"
+
+ # This test requires the Hatchet server with webhook support
+ # to be running and accessible
+ test_run_id = SecureRandom.uuid
+
+ # Create webhook, send request, verify
+ # Full implementation depends on the webhook API being available
+ end
+end
diff --git a/sdks/ruby/examples/webhooks/worker.rb b/sdks/ruby/examples/webhooks/worker.rb
new file mode 100644
index 000000000..24db42aaa
--- /dev/null
+++ b/sdks/ruby/examples/webhooks/worker.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+# > Webhooks
+
+require "hatchet-sdk"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+WEBHOOK_TASK = HATCHET.task(
+ name: "webhook",
+ on_events: ["webhook:test"]
+) do |input, ctx|
+ {
+ "type" => input["type"],
+ "message" => input["message"]
+ }
+end
+
+# !!
+
+def main
+ worker = HATCHET.worker("webhook-worker", workflows: [WEBHOOK_TASK])
+ worker.start
+end
+
+main if __FILE__ == $PROGRAM_NAME
diff --git a/sdks/ruby/examples/worker.rb b/sdks/ruby/examples/worker.rb
new file mode 100644
index 000000000..06627cb59
--- /dev/null
+++ b/sdks/ruby/examples/worker.rb
@@ -0,0 +1,110 @@
+# frozen_string_literal: true
+
+# Main worker that registers all example workflows.
+
+require "hatchet-sdk"
+
+# Load all example workflows
+require_relative "simple/worker"
+require_relative "dag/worker"
+require_relative "events/worker"
+require_relative "cancellation/worker"
+require_relative "on_failure/worker"
+require_relative "on_success/worker"
+require_relative "timeout/worker"
+require_relative "retries/worker"
+require_relative "non_retryable/worker"
+require_relative "logger/worker"
+require_relative "delayed/worker"
+require_relative "priority/worker"
+require_relative "run_details/worker"
+require_relative "concurrency_limit/worker"
+require_relative "concurrency_limit_rr/worker"
+require_relative "concurrency_cancel_in_progress/worker"
+require_relative "concurrency_cancel_newest/worker"
+require_relative "concurrency_multiple_keys/worker"
+require_relative "concurrency_workflow_level/worker"
+require_relative "rate_limit/worker"
+require_relative "child/worker"
+require_relative "fanout/worker"
+require_relative "bulk_fanout/worker"
+require_relative "durable/worker"
+require_relative "durable_event/worker"
+require_relative "durable_sleep/worker"
+require_relative "conditions/worker"
+require_relative "dependency_injection/worker"
+require_relative "streaming/worker"
+require_relative "serde/worker"
+require_relative "dataclasses/worker"
+require_relative "dedupe/worker"
+require_relative "cron/worker"
+require_relative "scheduled/worker"
+require_relative "bulk_operations/worker"
+require_relative "return_exceptions/worker"
+require_relative "manual_slot_release/worker"
+require_relative "affinity_workers/worker"
+require_relative "sticky_workers/worker"
+require_relative "webhooks/worker"
+require_relative "webhook_with_scope/worker"
+require_relative "unit_testing/worker"
+
+HATCHET = Hatchet::Client.new(debug: true) unless defined?(HATCHET)
+
+ALL_WORKFLOWS = [
+ # Tier 1
+ SIMPLE, SIMPLE_DURABLE,
+ DAG_WORKFLOW,
+ EVENT_WORKFLOW,
+ CANCELLATION_WORKFLOW,
+ ON_FAILURE_WF, ON_FAILURE_WF_WITH_DETAILS,
+ ON_SUCCESS_WORKFLOW,
+ TIMEOUT_WF, REFRESH_TIMEOUT_WF,
+ SIMPLE_RETRY_WORKFLOW, BACKOFF_WORKFLOW,
+ NON_RETRYABLE_WORKFLOW,
+ LOGGING_WORKFLOW,
+ PRINT_SCHEDULE_WF, PRINT_PRINTER_WF,
+ PRIORITY_WORKFLOW,
+ RUN_DETAIL_TEST_WORKFLOW,
+
+ # Tier 2
+ CONCURRENCY_LIMIT_WORKFLOW,
+ CONCURRENCY_LIMIT_RR_WORKFLOW,
+ CONCURRENCY_CANCEL_IN_PROGRESS_WORKFLOW,
+ CONCURRENCY_CANCEL_NEWEST_WORKFLOW,
+ CONCURRENCY_MULTIPLE_KEYS_WORKFLOW,
+ CONCURRENCY_WORKFLOW_LEVEL_WORKFLOW,
+ RATE_LIMIT_WORKFLOW,
+
+ # Tier 3
+ CHILD_TASK_WF,
+ FANOUT_PARENT_WF, FANOUT_CHILD_WF,
+ BULK_PARENT_WF, BULK_CHILD_WF,
+ DURABLE_WORKFLOW, EPHEMERAL_WORKFLOW, WAIT_FOR_SLEEP_TWICE,
+ DURABLE_EVENT_TASK, DURABLE_EVENT_TASK_WITH_FILTER,
+ DURABLE_SLEEP_TASK,
+ TASK_CONDITION_WORKFLOW,
+ ASYNC_TASK_WITH_DEPS, SYNC_TASK_WITH_DEPS,
+ DURABLE_ASYNC_TASK_WITH_DEPS, DURABLE_SYNC_TASK_WITH_DEPS,
+ DI_WORKFLOW,
+
+ # Tier 4-5
+ STREAM_TASK,
+ SERDE_WORKFLOW,
+ SAY_HELLO,
+ DEDUPE_PARENT_WF, DEDUPE_CHILD_WF,
+ CRON_WORKFLOW,
+ SCHEDULED_WORKFLOW,
+ BULK_REPLAY_TEST_1, BULK_REPLAY_TEST_2, BULK_REPLAY_TEST_3,
+ RETURN_EXCEPTIONS_TASK,
+ SLOT_RELEASE_WORKFLOW,
+ AFFINITY_WORKER_WORKFLOW,
+ STICKY_WORKFLOW, STICKY_CHILD_WORKFLOW,
+ WEBHOOK_TASK,
+ WEBHOOK_WITH_SCOPE, WEBHOOK_WITH_STATIC_PAYLOAD,
+ SYNC_STANDALONE, ASYNC_STANDALONE,
+ DURABLE_SYNC_STANDALONE, DURABLE_ASYNC_STANDALONE,
+ SIMPLE_UNIT_TEST_WORKFLOW, COMPLEX_UNIT_TEST_WORKFLOW
+].freeze
+
+worker = HATCHET.worker("all-examples-worker", slots: 40, workflows: ALL_WORKFLOWS)
+worker.start
diff --git a/sdks/ruby/examples/worker_fixture.rb b/sdks/ruby/examples/worker_fixture.rb
new file mode 100644
index 000000000..d4dfbf31e
--- /dev/null
+++ b/sdks/ruby/examples/worker_fixture.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require "open3"
+require "net/http"
+require "logger"
+require "timeout"
+
+module HatchetWorkerFixture
+ LOGGER = Logger.new($stdout)
+
+ # Wait for the worker health check endpoint to respond
+ #
+ # @param port [Integer] Health check port
+ # @param max_attempts [Integer] Maximum number of attempts
+ # @return [Boolean] true if healthy
+ # @raise [RuntimeError] if worker fails to start
+ def self.wait_for_worker_health(port:, max_attempts: 25)
+ attempts = 0
+
+ loop do
+ if attempts > max_attempts
+ raise "Worker failed to start within #{max_attempts} seconds"
+ end
+
+ begin
+ uri = URI("http://localhost:#{port}/health")
+ response = Net::HTTP.get_response(uri)
+ return true if response.code == "200"
+ rescue StandardError
+ # Worker not ready yet
+ end
+
+ sleep 1
+ attempts += 1
+ end
+ end
+
+ # Start a worker subprocess and wait for it to be healthy
+ #
+ # @param command [Array] Command to run
+ # @param healthcheck_port [Integer] Port for health checks
+ # @yield [pid] Yields the process PID
+ # @return [void]
+ def self.with_worker(command, healthcheck_port: 8001)
+ LOGGER.info("Starting background worker: #{command.join(' ')}")
+
+ ENV["HATCHET_CLIENT_WORKER_HEALTHCHECK_PORT"] = healthcheck_port.to_s
+
+ stdin, stdout, stderr, wait_thr = Open3.popen3(*command)
+ pid = wait_thr.pid
+
+ # Log output in background threads
+ Thread.new do
+ stdout.each_line { |line| puts line.chomp }
+ rescue IOError
+ # Stream closed
+ end
+
+ Thread.new do
+ stderr.each_line { |line| $stderr.puts line.chomp }
+ rescue IOError
+ # Stream closed
+ end
+
+ wait_for_worker_health(port: healthcheck_port)
+
+ yield pid
+ ensure
+ LOGGER.info("Cleaning up background worker (PID: #{pid})")
+
+ if pid
+ begin
+ # Kill process group to get children too
+ Process.kill("TERM", -Process.getpgid(pid))
+ rescue Errno::ESRCH, Errno::EPERM
+ # Process already gone
+ end
+
+ begin
+ Timeout.timeout(5) { Process.wait(pid) }
+ rescue Timeout::Error
+ begin
+ Process.kill("KILL", pid)
+ Process.wait(pid)
+ rescue Errno::ESRCH, Errno::ECHILD
+ # Already gone
+ end
+ rescue Errno::ECHILD
+ # Already reaped
+ end
+ end
+
+ [stdin, stdout, stderr].each do |io|
+ io&.close rescue nil
+ end
+ end
+end
diff --git a/sdks/ruby/generate.sh b/sdks/ruby/generate.sh
new file mode 100755
index 000000000..279b14ec7
--- /dev/null
+++ b/sdks/ruby/generate.sh
@@ -0,0 +1,170 @@
+#!/bin/bash
+#
+# Single entry point for all Ruby SDK code generation.
+#
+# Generates:
+# 1. Protobuf/gRPC stubs (from api-contracts/*.proto)
+# 2. REST API client (from bin/oas/openapi.yaml via openapi-generator)
+#
+# Usage:
+# cd sdks/ruby && bash generate.sh # generate everything
+# cd sdks/ruby && bash generate.sh proto # protobuf only
+# cd sdks/ruby && bash generate.sh rest # REST client only
+#
+# Prerequisites:
+# - grpc-tools gem (gem install grpc-tools)
+# - openapi-generator-cli (npm install -g @openapitools/openapi-generator-cli)
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
+
+# ── Protobuf / gRPC generation ──────────────────────────────────────────────
+
+generate_proto() {
+ echo "==> Generating protobuf/gRPC stubs..."
+
+ local contracts_dir="$REPO_ROOT/api-contracts"
+ local output_dir="$SCRIPT_DIR/src/lib/hatchet/contracts"
+
+ mkdir -p "$output_dir/dispatcher"
+ mkdir -p "$output_dir/events"
+ mkdir -p "$output_dir/workflows"
+ mkdir -p "$output_dir/v1/shared"
+
+ local proto_files=(
+ "dispatcher/dispatcher.proto"
+ "events/events.proto"
+ "workflows/workflows.proto"
+ "v1/shared/condition.proto"
+ "v1/dispatcher.proto"
+ "v1/workflows.proto"
+ )
+
+ for proto_file in "${proto_files[@]}"; do
+ echo " $proto_file"
+ grpc_tools_ruby_protoc \
+ --proto_path="$contracts_dir" \
+ --ruby_out="$output_dir" \
+ --grpc_out="$output_dir" \
+ "$proto_file"
+ done
+
+ echo " Done."
+}
+
+# ── REST API client generation ───────────────────────────────────────────────
+
+generate_rest() {
+ echo "==> Generating REST API client from OpenAPI spec..."
+
+ local openapi_spec="$REPO_ROOT/bin/oas/openapi.yaml"
+ local output_dir="$SCRIPT_DIR/src/lib/hatchet/clients/rest"
+ local config_file="$SCRIPT_DIR/src/config/openapi_generator_config.json"
+
+ if [ ! -f "$openapi_spec" ]; then
+ echo "ERROR: OpenAPI spec not found at $openapi_spec" >&2
+ exit 1
+ fi
+
+ # Install openapi-generator-cli if missing
+ if ! command -v openapi-generator-cli &>/dev/null; then
+ echo " Installing openapi-generator-cli..."
+ npm install -g @openapitools/openapi-generator-cli
+ fi
+
+ # Generate
+ local additional_props="gemName=hatchet-sdk-rest,moduleName=HatchetSdkRest,gemVersion=0.0.1,gemDescription=HatchetRubySDKRestClient,gemAuthor=HatchetTeam,gemHomepage=https://github.com/hatchet-dev/hatchet,gemLicense=MIT,library=faraday"
+
+ local cmd=(
+ openapi-generator-cli generate
+ -i "$openapi_spec"
+ -g ruby
+ -o "$output_dir"
+ --skip-validate-spec
+ --global-property "apiTests=false,modelTests=false,apiDocs=true,modelDocs=true"
+ --additional-properties "$additional_props"
+ )
+
+ if [ -f "$config_file" ]; then
+ cmd+=(-c "$config_file")
+ fi
+
+ "${cmd[@]}"
+
+ # ── Post-generation patches ──────────────────────────────────────────────
+ echo " Applying patches..."
+ apply_cookie_auth_patch "$output_dir"
+
+ echo " Done."
+}
+
+# Patch the generated client to support cookie-based auth and skip nil values.
+apply_cookie_auth_patch() {
+ local output_dir="$1"
+
+ # 1. Fix configuration.rb: fill in empty 'in:' for cookie auth
+ local config_rb="$output_dir/lib/hatchet-sdk-rest/configuration.rb"
+ if [ -f "$config_rb" ]; then
+ sed -i.bak "s/in: ,/in: 'cookie',/g" "$config_rb" && rm -f "$config_rb.bak"
+ fi
+
+ # 2. Fix api_client.rb: add cookie support + nil guard
+ local api_client_rb="$output_dir/lib/hatchet-sdk-rest/api_client.rb"
+ if [ -f "$api_client_rb" ]; then
+ ruby -e '
+ path = ARGV[0]
+ content = File.read(path)
+
+ old_auth = <<~RUBY.strip
+ case auth_setting[:in]
+ when '\''header'\'' then header_params[auth_setting[:key]] = auth_setting[:value]
+ when '\''query'\'' then query_params[auth_setting[:key]] = auth_setting[:value]
+ else fail ArgumentError, '\''Authentication token must be in `query` or `header`'\''
+ end
+ RUBY
+
+ new_auth = <<~RUBY.strip
+ next if auth_setting[:value].nil? || auth_setting[:value].to_s.empty?
+ case auth_setting[:in]
+ when '\''header'\'' then header_params[auth_setting[:key]] = auth_setting[:value]
+ when '\''query'\'' then query_params[auth_setting[:key]] = auth_setting[:value]
+ when '\''cookie'\'' then header_params['\''Cookie'\''] = "#{auth_setting[:key]}=#{auth_setting[:value]}"
+ else next # skip unsupported auth locations
+ end
+ RUBY
+
+ if content.sub!(old_auth, new_auth)
+ File.write(path, content)
+ puts " Patched api_client.rb"
+ else
+ puts " api_client.rb already patched (skipping)"
+ end
+ ' "$api_client_rb"
+ fi
+}
+
+# ── Main ─────────────────────────────────────────────────────────────────────
+
+case "${1:-all}" in
+ proto) generate_proto ;;
+ rest) generate_rest ;;
+ all)
+ generate_proto
+ generate_rest
+ ;;
+ -h|--help)
+ echo "Usage: $0 [proto|rest|all]"
+ echo " proto Generate protobuf/gRPC stubs only"
+ echo " rest Generate REST API client only"
+ echo " all Generate everything (default)"
+ exit 0
+ ;;
+ *)
+ echo "Unknown command: $1. Use --help for usage." >&2
+ exit 1
+ ;;
+esac
+
+echo "==> All generation complete."
diff --git a/sdks/ruby/src/.rubocop.yml b/sdks/ruby/src/.rubocop.yml
index 537f3da01..33d90e346 100644
--- a/sdks/ruby/src/.rubocop.yml
+++ b/sdks/ruby/src/.rubocop.yml
@@ -1,8 +1,153 @@
+# frozen_string_literal: true
+
+require:
+ - ./lib/rubocop/cop/hatchet/rbs_signature_exists
+
AllCops:
TargetRubyVersion: 3.1
+ NewCops: enable
+ SuggestExtensions: false
+ Exclude:
+ - "lib/hatchet/clients/rest/**/*"
+ - "lib/hatchet/contracts/**/*"
+ - "vendor/**/*"
+ - "bin/**/*"
+ - "scripts/**/*"
+
+# ─── Style ───────────────────────────────────────────────────────────
Style/StringLiterals:
EnforcedStyle: double_quotes
Style/StringLiteralsInInterpolation:
EnforcedStyle: double_quotes
+
+Style/FrozenStringLiteralComment:
+ EnforcedStyle: always
+
+Style/Documentation:
+ Enabled: false
+
+Style/TrailingCommaInArguments:
+ EnforcedStyleForMultiline: consistent_comma
+
+Style/TrailingCommaInArrayLiteral:
+ EnforcedStyleForMultiline: consistent_comma
+
+Style/TrailingCommaInHashLiteral:
+ EnforcedStyleForMultiline: consistent_comma
+
+# ─── Metrics ─────────────────────────────────────────────────────────
+
+Metrics/MethodLength:
+ Max: 55
+ CountAsOne:
+ - array
+ - hash
+ - heredoc
+ - method_call
+
+Metrics/ClassLength:
+ Max: 300
+ CountAsOne:
+ - array
+ - hash
+ - heredoc
+ - method_call
+
+Metrics/ModuleLength:
+ Max: 300
+ CountAsOne:
+ - array
+ - hash
+ - heredoc
+ - method_call
+
+Metrics/BlockLength:
+ Max: 35
+ Exclude:
+ - "spec/**/*"
+ - "Rakefile"
+ - "*.gemspec"
+
+Metrics/AbcSize:
+ Max: 65
+
+Metrics/CyclomaticComplexity:
+ Max: 35
+
+Metrics/PerceivedComplexity:
+ Max: 35
+
+Metrics/ParameterLists:
+ Max: 10
+ CountKeywordArgs: false
+
+# ─── Layout ──────────────────────────────────────────────────────────
+
+Layout/LineLength:
+ Max: 140
+ AllowedPatterns:
+ - "^\\s*#"
+ Exclude:
+ - "*.gemspec"
+
+Layout/MultilineMethodCallIndentation:
+ EnforcedStyle: indented
+
+Layout/FirstHashElementIndentation:
+ EnforcedStyle: consistent
+
+Layout/FirstArrayElementIndentation:
+ EnforcedStyle: consistent
+
+# ─── Naming ──────────────────────────────────────────────────────────
+
+Naming/FileName:
+ Exclude:
+ - "lib/hatchet-sdk.rb"
+
+Naming/MethodParameterName:
+ AllowedNames:
+ - id
+ - ip
+ - to
+ - by
+ - on
+ - in
+ - at
+ - fn
+
+Naming/PredicatePrefix:
+ Enabled: false
+
+Naming/AccessorMethodName:
+ Enabled: false
+
+Naming/VariableNumber:
+ Enabled: false
+
+# ─── Gemspec ─────────────────────────────────────────────────────────
+
+Gemspec/DevelopmentDependencies:
+ Enabled: false
+
+# ─── Lint ────────────────────────────────────────────────────────────
+
+Lint/UnusedMethodArgument:
+ AllowUnusedKeywordArguments: true
+
+Lint/MissingSuper:
+ Enabled: false
+
+Lint/DuplicateBranch:
+ Enabled: false
+
+# ─── Custom ─────────────────────────────────────────────────────────
+
+Hatchet/RbsSignatureExists:
+ Enabled: true
+ Exclude:
+ - "lib/hatchet/clients/rest/**/*"
+ - "lib/hatchet/contracts/**/*"
+ - "lib/rubocop/**/*"
diff --git a/sdks/ruby/src/CHANGELOG.md b/sdks/ruby/src/CHANGELOG.md
index 07d8c5807..0174f9aa5 100644
--- a/sdks/ruby/src/CHANGELOG.md
+++ b/sdks/ruby/src/CHANGELOG.md
@@ -1,6 +1,27 @@
## Hatchet SDK Changelog
-## [0.1.0] - 2025-08-05
+## [0.1.0] - 2025-02-15
-- Initial release
-- Events and Runs Rest Clients
+- Initial release of the Ruby SDK for Hatchet
+- Task orchestration with simple tasks, DAGs, and child/fanout workflows
+- Durable execution with durable tasks, durable events, and durable sleep
+- Concurrency control (limit, round-robin, cancel in progress, cancel newest, multiple keys, workflow-level)
+- Rate limiting
+- Event-driven workflows
+- Cron and scheduled workflows
+- Retries with configurable backoff strategies
+- Timeout management with refresh support
+- On-failure and on-success callbacks
+- Streaming support
+- Webhook integration
+- Bulk operations (fanout, replay)
+- Priority scheduling
+- Sticky and affinity worker assignment
+- Deduplication
+- Manual slot release
+- Dependency injection
+- Unit testing helpers
+- Logging integration
+- Run detail inspection
+- RBS type signatures for IDE support
+- REST and gRPC client support
diff --git a/sdks/ruby/src/CLAUDE.md b/sdks/ruby/src/CLAUDE.md
index af418fe76..130b20a94 100644
--- a/sdks/ruby/src/CLAUDE.md
+++ b/sdks/ruby/src/CLAUDE.md
@@ -58,7 +58,6 @@ The SDK includes comprehensive documentation and type signatures for excellent I
- `Hatchet::Clients::Rest::*` - Generated REST API clients (WorkflowApi, EventApi, etc.)
- `Hatchet::TLSConfig` - TLS configuration for secure connections
- `Hatchet::HealthcheckConfig` - Worker health monitoring configuration
-- `Hatchet::OpenTelemetryConfig` - Observability configuration
- `Hatchet::Error` - Base error class for gem-specific exceptions
**Configuration Sources (priority order):**
diff --git a/sdks/ruby/src/Gemfile.lock b/sdks/ruby/src/Gemfile.lock
index b2a8210e5..793ecb885 100644
--- a/sdks/ruby/src/Gemfile.lock
+++ b/sdks/ruby/src/Gemfile.lock
@@ -1,9 +1,12 @@
PATH
remote: .
specs:
- hatchet-sdk (0.1.0.pre.alpha)
+ hatchet-sdk (0.1.0)
+ concurrent-ruby (>= 1.1)
faraday (~> 2.0)
faraday-multipart
+ google-protobuf (~> 4.0)
+ grpc (~> 1.60)
json (~> 2.0)
marcel
@@ -11,6 +14,8 @@ GEM
remote: https://rubygems.org/
specs:
ast (2.4.3)
+ bigdecimal (4.0.1)
+ concurrent-ruby (1.3.6)
date (3.4.1)
diff-lcs (1.6.2)
erb (5.0.2)
@@ -23,6 +28,27 @@ GEM
faraday-net_http (3.4.1)
net-http (>= 0.5.0)
gem-release (2.2.4)
+ google-protobuf (4.33.5)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.33.5-arm64-darwin)
+ bigdecimal
+ rake (>= 13)
+ google-protobuf (4.33.5-x86_64-linux-gnu)
+ bigdecimal
+ rake (>= 13)
+ googleapis-common-protos-types (1.22.0)
+ google-protobuf (~> 4.26)
+ grpc (1.78.0)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.78.0-arm64-darwin)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc (1.78.0-x86_64-linux-gnu)
+ google-protobuf (>= 3.25, < 5.0)
+ googleapis-common-protos-types (~> 1.0)
+ grpc-tools (1.78.0)
io-console (0.8.1)
irb (1.15.2)
pp (>= 0.6.0)
@@ -93,11 +119,13 @@ GEM
PLATFORMS
arm64-darwin-24
ruby
+ x86_64-linux
DEPENDENCIES
faraday (~> 2.0)
faraday-multipart
gem-release (~> 2.2)
+ grpc-tools (~> 1.60)
hatchet-sdk!
irb
json (~> 2.0)
diff --git a/sdks/ruby/src/Rakefile b/sdks/ruby/src/Rakefile
index 4413194c0..caa547bde 100644
--- a/sdks/ruby/src/Rakefile
+++ b/sdks/ruby/src/Rakefile
@@ -13,9 +13,9 @@ RuboCop::RakeTask.new
load File.expand_path("Rakefile.rest", __dir__)
# Generate REST API client before building the gem
-task :build => "api:generate"
+task build: "api:generate"
# Generate REST API client before running tests (for CI)
-task :spec => "api:generate_if_missing"
+task spec: "api:generate_if_missing"
task default: %i[spec rubocop]
diff --git a/sdks/ruby/src/Rakefile.rest b/sdks/ruby/src/Rakefile.rest
index 656cd82ce..2316f753d 100644
--- a/sdks/ruby/src/Rakefile.rest
+++ b/sdks/ruby/src/Rakefile.rest
@@ -1,31 +1,20 @@
# frozen_string_literal: true
# Additional Rake tasks for REST API client generation
+#
+# All generation goes through sdks/ruby/generate.sh, which is the single
+# entry point for both protobuf and REST generation.
require "rake"
namespace :api do
+ # Path to the unified generate script
+ GENERATE_SH = File.expand_path("../../generate.sh", __dir__)
+
desc "Generate REST API client from OpenAPI specification"
task :generate do
- puts "Generating REST API client from OpenAPI specification..."
-
- ruby_script = File.join(__dir__, "scripts", "generate_api.rb")
-
- unless File.exist?(ruby_script)
- puts "❌ Generator script not found at #{ruby_script}"
- exit 1
- end
-
- # Make the script executable
- File.chmod(0755, ruby_script)
-
- # Execute the generation script
- system("ruby #{ruby_script}") || begin
- puts "❌ API generation failed"
- exit 1
- end
-
- puts "✅ REST API client generation completed"
+ puts "Generating REST API client..."
+ system("bash", GENERATE_SH, "rest") || abort("REST API generation failed")
end
desc "Clean generated REST API client files"
@@ -33,12 +22,12 @@ namespace :api do
rest_client_dir = File.join(__dir__, "lib", "hatchet", "clients", "rest")
if Dir.exist?(rest_client_dir)
- puts "🧹 Cleaning generated REST API client files..."
+ puts "Cleaning generated REST API client files..."
require "fileutils"
FileUtils.rm_rf(rest_client_dir)
- puts "✅ REST API client files cleaned"
+ puts "REST API client files cleaned"
else
- puts "ℹ️ No REST API client files to clean"
+ puts "No REST API client files to clean"
end
end
@@ -51,83 +40,10 @@ namespace :api do
main_client_file = File.join(rest_client_dir, "lib", "hatchet-sdk-rest.rb")
unless File.exist?(main_client_file)
- puts "🔄 REST API client not found, generating..."
+ puts "REST API client not found, generating..."
Rake::Task["api:generate"].invoke
else
- puts "✅ REST API client already exists, skipping generation"
- end
- end
-
- desc "Validate OpenAPI specification"
- task :validate do
- openapi_spec = File.join(__dir__, "..", "..", "..", "bin", "oas", "openapi.yaml")
-
- unless File.exist?(openapi_spec)
- puts "❌ OpenAPI specification not found at #{openapi_spec}"
- exit 1
- end
-
- puts "🔍 Validating OpenAPI specification..."
-
- # Use openapi-generator-cli to validate the spec
- cmd = ["openapi-generator-cli", "validate", "-i", openapi_spec]
-
- if system(*cmd)
- puts "✅ OpenAPI specification is valid"
- else
- puts "❌ OpenAPI specification validation failed"
- exit 1
- end
- end
-
- desc "Install OpenAPI Generator CLI if not present"
- task :install_generator do
- puts "🔧 Checking OpenAPI Generator CLI installation..."
-
- if system("which openapi-generator-cli > /dev/null 2>&1")
- puts "✅ OpenAPI Generator CLI is already installed"
- else
- puts "📦 Installing OpenAPI Generator CLI..."
-
- if system("npm install -g @openapitools/openapi-generator-cli@7.13.0")
- puts "✅ OpenAPI Generator CLI installed successfully"
- else
- puts "❌ Failed to install OpenAPI Generator CLI"
- puts "Please install it manually: npm install -g @openapitools/openapi-generator-cli"
- exit 1
- end
- end
- end
-
- desc "Show information about the OpenAPI specification"
- task :info do
- openapi_spec = File.join(__dir__, "..", "..", "..", "bin", "oas", "openapi.yaml")
-
- unless File.exist?(openapi_spec)
- puts "❌ OpenAPI specification not found at #{openapi_spec}"
- exit 1
- end
-
- puts "📊 OpenAPI Specification Information:"
- puts " File: #{openapi_spec}"
- puts " Size: #{File.size(openapi_spec)} bytes"
- puts " Modified: #{File.mtime(openapi_spec)}"
-
- # Try to parse and show basic info
- begin
- require "yaml"
- spec = YAML.load_file(openapi_spec)
-
- puts " OpenAPI Version: #{spec.dig('openapi')}"
- puts " API Version: #{spec.dig('info', 'version')}"
- puts " API Title: #{spec.dig('info', 'title')}"
- puts " Paths: #{spec.dig('paths')&.keys&.count || 0}"
- puts " Components: #{spec.dig('components', 'schemas')&.keys&.count || 0} schemas"
- rescue => e
- puts " ⚠️ Could not parse specification: #{e.message}"
+ puts "REST API client already exists, skipping generation"
end
end
end
-
-# Add REST API tasks to the default rake task
-task default: ["api:validate"]
diff --git a/sdks/ruby/src/hatchet-sdk.gemspec b/sdks/ruby/src/hatchet-sdk.gemspec
index 73ca54738..e4d8c61e1 100644
--- a/sdks/ruby/src/hatchet-sdk.gemspec
+++ b/sdks/ruby/src/hatchet-sdk.gemspec
@@ -19,6 +19,7 @@ Gem::Specification.new do |spec|
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/hatchet-dev/hatchet/tree/main/sdks/ruby"
spec.metadata["changelog_uri"] = "https://github.com/hatchet-dev/hatchet/blob/main/sdks/ruby/src/CHANGELOG.md"
+ spec.metadata["rubygems_mfa_required"] = "true"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
@@ -37,6 +38,13 @@ Gem::Specification.new do |spec|
rest_files = Dir.glob("#{rest_client_dir}/**/*.rb").select { |f| File.file?(f) }
spec.files.concat(rest_files)
end
+
+ # Add generated protobuf/gRPC contract files to the gem
+ contracts_dir = "lib/hatchet/contracts"
+ if Dir.exist?(contracts_dir)
+ contract_files = Dir.glob("#{contracts_dir}/**/*.rb").select { |f| File.file?(f) }
+ spec.files.concat(contract_files)
+ end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
@@ -44,11 +52,17 @@ Gem::Specification.new do |spec|
# Runtime dependencies for REST API client
spec.add_dependency "faraday", "~> 2.0"
spec.add_dependency "faraday-multipart"
- spec.add_dependency "marcel"
spec.add_dependency "json", "~> 2.0"
+ spec.add_dependency "marcel"
+
+ # Runtime dependencies for gRPC
+ spec.add_dependency "concurrent-ruby", ">= 1.1"
+ spec.add_dependency "google-protobuf", "~> 4.0"
+ spec.add_dependency "grpc", "~> 1.60"
# Development dependencies
spec.add_development_dependency "gem-release", "~> 2.2"
+ spec.add_development_dependency "grpc-tools", "~> 1.60"
spec.add_development_dependency "rspec", "~> 3.0"
# For more information and examples about making a new gem, check out our
diff --git a/sdks/ruby/src/lib/hatchet-sdk.rb b/sdks/ruby/src/lib/hatchet-sdk.rb
index abf696c1b..175d186f2 100644
--- a/sdks/ruby/src/lib/hatchet-sdk.rb
+++ b/sdks/ruby/src/lib/hatchet-sdk.rb
@@ -1,19 +1,76 @@
# frozen_string_literal: true
-# typed: strict
require_relative "hatchet/version"
require_relative "hatchet/config"
+
+# Define base error class before loading submodules that depend on it
+module Hatchet
+ # Base error class for all Hatchet-related errors
+ class Error < StandardError; end
+end
+
require_relative "hatchet/clients"
require_relative "hatchet/features/events"
require_relative "hatchet/features/runs"
+require_relative "hatchet/features/tenant"
+require_relative "hatchet/features/logs"
+require_relative "hatchet/features/workers"
+require_relative "hatchet/features/cel"
+require_relative "hatchet/features/workflows"
+require_relative "hatchet/features/filters"
+require_relative "hatchet/features/metrics"
+require_relative "hatchet/features/rate_limits"
+require_relative "hatchet/features/cron"
+require_relative "hatchet/features/scheduled"
+
+# Core classes
+require_relative "hatchet/exceptions"
+require_relative "hatchet/concurrency"
+require_relative "hatchet/conditions"
+require_relative "hatchet/condition_converter"
+require_relative "hatchet/rate_limit"
+require_relative "hatchet/labels"
+require_relative "hatchet/trigger_options"
+require_relative "hatchet/default_filter"
+require_relative "hatchet/workflow_run"
+require_relative "hatchet/context"
+require_relative "hatchet/durable_context"
+require_relative "hatchet/task"
+require_relative "hatchet/workflow"
+require_relative "hatchet/context_vars"
+require_relative "hatchet/worker_obj"
+
+# gRPC connection and client infrastructure
+require_relative "hatchet/connection"
+
+# Generated protobuf contracts (add contracts directory to load path for internal requires)
+$LOAD_PATH.unshift(File.join(__dir__, "hatchet", "contracts")) unless $LOAD_PATH.include?(File.join(__dir__, "hatchet", "contracts"))
+require_relative "hatchet/contracts/dispatcher/dispatcher_pb"
+require_relative "hatchet/contracts/dispatcher/dispatcher_services_pb"
+require_relative "hatchet/contracts/events/events_pb"
+require_relative "hatchet/contracts/events/events_services_pb"
+require_relative "hatchet/contracts/workflows/workflows_pb"
+require_relative "hatchet/contracts/workflows/workflows_services_pb"
+require_relative "hatchet/contracts/v1/shared/condition_pb"
+require_relative "hatchet/contracts/v1/dispatcher_pb"
+require_relative "hatchet/contracts/v1/dispatcher_services_pb"
+require_relative "hatchet/contracts/v1/workflows_pb"
+require_relative "hatchet/contracts/v1/workflows_services_pb"
+
+# gRPC client wrappers
+require_relative "hatchet/clients/grpc/dispatcher"
+require_relative "hatchet/clients/grpc/admin"
+require_relative "hatchet/clients/grpc/event_client"
+
+# Worker runtime
+require_relative "hatchet/worker/action_listener"
+require_relative "hatchet/worker/workflow_run_listener"
+require_relative "hatchet/worker/runner"
# Ruby SDK for Hatchet workflow engine
#
# @see https://docs.hatchet.run for Hatchet documentation
module Hatchet
- # Base error class for all Hatchet-related errors
- class Error < StandardError; end
-
# The main client for interacting with Hatchet services.
#
# @example Basic usage with API token
@@ -24,6 +81,13 @@ module Hatchet
# token: "your-jwt-token",
# namespace: "production"
# )
+ #
+ # @example Define a workflow
+ # wf = hatchet.workflow(name: "MyWorkflow")
+ # step1 = wf.task(:step1) { |input, ctx| { "result" => 42 } }
+ #
+ # @example Define a standalone task
+ # my_task = hatchet.task(name: "my_task") { |input, ctx| { "result" => "done" } }
class Client
# @return [Config] The configuration object used by this client
attr_reader :config
@@ -31,56 +95,343 @@ module Hatchet
# Initialize a new Hatchet client with the given configuration options.
#
# @param options [Hash] Configuration options for the client
+ # @option options [Boolean] :debug Enable debug logging (default: false)
# @option options [String] :token The JWT token for authentication (required)
# @option options [String] :tenant_id Override tenant ID (extracted from JWT token 'sub' field if not provided)
# @option options [String] :host_port gRPC server host and port (default: "localhost:7070")
- # @option options [String] :server_url Server URL for HTTP requests (default: "https://app.dev.hatchet-tools.com")
+ # @option options [String] :server_url Server URL for HTTP requests
# @option options [String] :namespace Namespace prefix for resource names (default: "")
- # @option options [Logger] :logger Custom logger instance (default: Logger.new($stdout))
- # @option options [Integer] :listener_v2_timeout Timeout for listener v2 in milliseconds
- # @option options [Integer] :grpc_max_recv_message_length Maximum gRPC receive message length (default: 4MB)
- # @option options [Integer] :grpc_max_send_message_length Maximum gRPC send message length (default: 4MB)
- # @option options [Hash] :worker_preset_labels Hash of preset labels for workers
- # @option options [Boolean] :enable_force_kill_sync_threads Enable force killing of sync threads (default: false)
- # @option options [Boolean] :enable_thread_pool_monitoring Enable thread pool monitoring (default: false)
- # @option options [Integer] :terminate_worker_after_num_tasks Terminate worker after this many tasks
- # @option options [Boolean] :disable_log_capture Disable log capture (default: false)
- # @option options [Boolean] :grpc_enable_fork_support Enable gRPC fork support (default: false)
- # @option options [TLSConfig] :tls_config Custom TLS configuration
- # @option options [HealthcheckConfig] :healthcheck Custom healthcheck configuration
- # @option options [OpenTelemetryConfig] :otel Custom OpenTelemetry configuration
+ # @option options [Logger] :logger Custom logger instance
+ # @option options [Hash] :worker_preset_labels Default labels applied to all workers
#
# @raise [Error] if token or configuration is missing or invalid
- #
- # @example Initialize with minimal configuration
- # client = Hatchet::Client.new()
- #
- # @example Initialize with custom options
- # client = Hatchet::Client.new(
- # token: "eyJhbGciOiJIUzI1NiJ9...",
- # namespace: "my_app",
- # worker_preset_labels: { "env" => "production", "version" => "1.0.0" }
- # )
def initialize(**options)
+ @debug = options.delete(:debug) || false
@config = Config.new(**options)
end
def rest_client
- @rest_client ||= Hatchet::Clients.rest_client(@config)
+ @rest_client ||= Hatchet::Clients.rest_client(@config)
end
# Feature Client for interacting with Hatchet events
- # @return [Hatchet::Features::Events] The events client for interacting with Hatchet events
+ # @return [Hatchet::Features::Events]
def events
- # @type [Hatchet::Features::Events]
- @events ||= Hatchet::Features::Events.new(rest_client, @config)
+ @events ||= Hatchet::Features::Events.new(rest_client, event_grpc, @config)
end
# Feature Client for interacting with Hatchet workflow runs
- # @return [Hatchet::Features::Runs] The runs client for interacting with Hatchet workflow runs
+ # @return [Hatchet::Features::Runs]
def runs
- # @type [Hatchet::Features::Runs]
- @runs ||= Hatchet::Features::Runs.new(rest_client, @config)
+ @runs ||= Hatchet::Features::Runs.new(rest_client, @config, client: self)
+ end
+
+ # Feature Client for interacting with the current tenant
+ # @return [Hatchet::Features::Tenant]
+ def tenant
+ @tenant ||= Hatchet::Features::Tenant.new(rest_client, @config)
+ end
+
+ # Feature Client for interacting with Hatchet logs
+ # @return [Hatchet::Features::Logs]
+ def logs
+ @logs ||= Hatchet::Features::Logs.new(rest_client, @config)
+ end
+
+ # Feature Client for managing workers
+ # @return [Hatchet::Features::Workers]
+ def workers
+ @workers ||= Hatchet::Features::Workers.new(rest_client, @config)
+ end
+
+ # Feature Client for debugging CEL expressions
+ # @return [Hatchet::Features::CEL]
+ def cel
+ @cel ||= Hatchet::Features::CEL.new(rest_client, @config)
+ end
+
+ # Feature Client for managing workflow definitions
+ # @return [Hatchet::Features::Workflows]
+ def workflows
+ @workflows ||= Hatchet::Features::Workflows.new(rest_client, @config)
+ end
+
+ # Feature Client for managing filters
+ # @return [Hatchet::Features::Filters]
+ def filters
+ @filters ||= Hatchet::Features::Filters.new(rest_client, @config)
+ end
+
+ # Feature Client for reading metrics
+ # @return [Hatchet::Features::Metrics]
+ def metrics
+ @metrics ||= Hatchet::Features::Metrics.new(rest_client, @config)
+ end
+
+ # Feature Client for managing rate limits
+ # @return [Hatchet::Features::RateLimits]
+ def rate_limits
+ @rate_limits ||= Hatchet::Features::RateLimits.new(admin_grpc, @config)
+ end
+
+ # Feature Client for managing cron workflows
+ # @return [Hatchet::Features::Cron]
+ def cron
+ @cron ||= Hatchet::Features::Cron.new(rest_client, @config)
+ end
+
+ # Feature Client for managing scheduled workflows
+ # @return [Hatchet::Features::Scheduled]
+ def scheduled
+ @scheduled ||= Hatchet::Features::Scheduled.new(rest_client, @config)
+ end
+
+ # Create a new workflow definition
+ #
+ # @param name [String] Workflow name
+ # @param opts [Hash] Workflow options
+ # @return [Hatchet::Workflow]
+ #
+ # @example
+ # wf = hatchet.workflow(name: "MyWorkflow")
+ # wf.task(:step1) { |input, ctx| { "value" => 42 } }
+ def workflow(name:, **opts)
+ Workflow.new(name: name, client: self, **opts)
+ end
+
+ # Create a standalone task (auto-wraps in a single-task workflow)
+ #
+ # @param name [String] Task name
+ # @param opts [Hash] Task options
+ # @yield [input, ctx] The task execution block
+ # @return [Hatchet::Task]
+ #
+ # @example
+ # my_task = hatchet.task(name: "my_task") { |input, ctx| { "result" => "done" } }
+ def task(name:, **opts, &block)
+ # Create a workflow wrapper for standalone tasks
+ wf = Workflow.new(name: name, client: self,
+ on_events: opts.delete(:on_events) || [],
+ default_filters: opts.delete(:default_filters) || [],)
+ wf.task(name, **opts, &block)
+ end
+
+ # Create a standalone durable task
+ #
+ # @param name [String] Task name
+ # @param opts [Hash] Task options
+ # @yield [input, ctx] The task execution block
+ # @return [Hatchet::Task]
+ def durable_task(name:, **opts, &block)
+ wf = Workflow.new(name: name, client: self,
+ on_events: opts.delete(:on_events) || [],
+ default_filters: opts.delete(:default_filters) || [],)
+ wf.durable_task(name, **opts, &block)
+ end
+
+ # Create a new worker
+ #
+ # @param name [String] Worker name
+ # @param opts [Hash] Worker options (workflows:, slots:, labels:)
+ # @return [Hatchet::Worker]
+ #
+ # @example
+ # worker = hatchet.worker("my-worker", workflows: [wf], slots: 10)
+ # worker.start
+ def worker(name, **opts)
+ Worker.new(name: name, client: self, **opts)
+ end
+
+ # Convenience accessor for the logger
+ # @return [Logger]
+ def logger
+ @config.logger
+ end
+
+ # @return [String] The tenant ID
+ def tenant_id
+ @config.tenant_id
+ end
+
+ # Shared gRPC channel (lazy-initialized).
+ # A single channel is shared across all gRPC stubs for connection reuse.
+ #
+ # @return [GRPC::Core::Channel]
+ def channel
+ @channel ||= Connection.new_channel(@config)
+ end
+
+ # gRPC Dispatcher client (lazy-initialized).
+ #
+ # @return [Hatchet::Clients::Grpc::Dispatcher]
+ def dispatcher_grpc
+ @dispatcher_grpc ||= Clients::Grpc::Dispatcher.new(config: @config, channel: channel)
+ end
+
+ # gRPC Admin client (lazy-initialized).
+ # Uses both v0 WorkflowService and v1 AdminService stubs.
+ #
+ # @return [Hatchet::Clients::Grpc::Admin]
+ def admin_grpc
+ @admin_grpc ||= Clients::Grpc::Admin.new(config: @config, channel: channel)
+ end
+
+ # gRPC Event client (lazy-initialized).
+ #
+ # @return [Hatchet::Clients::Grpc::EventClient]
+ def event_grpc
+ @event_grpc ||= Clients::Grpc::EventClient.new(config: @config, channel: channel)
+ end
+
+ # Pooled gRPC listener for workflow run completion events (lazy-initialized).
+ #
+ # Maintains a single bidi stream to `Dispatcher.SubscribeToWorkflowRuns`
+ # shared by all callers of `WorkflowRunRef#result`.
+ #
+ # @return [Hatchet::WorkflowRunListener]
+ def workflow_run_listener
+ @workflow_run_listener ||= WorkflowRunListener.new(config: @config, channel: channel)
+ end
+
+ # High-level admin client for workflow triggering.
+ # Delegates to the gRPC admin client with context variable propagation.
+ #
+ # @return [AdminClient]
+ def admin
+ @admin ||= AdminClient.new(client: self)
+ end
+ end
+
+ # Admin client for triggering and scheduling workflows.
+ #
+ # Delegates to the gRPC admin client for actual RPC calls, while handling
+ # context variable propagation for parent-child workflow linking.
+ class AdminClient
+ def initialize(client:)
+ @client = client
+ @spawn_indices = ContextVars::SpawnIndexTracker.new
+ end
+
+ # Trigger a workflow run and wait for result.
+ #
+ # @param workflow_or_task [Workflow, Task, String] The workflow or task to trigger
+ # @param input [Hash] Workflow input
+ # @param options [TriggerWorkflowOptions, nil] Trigger options
+ # @return [Hash] The workflow run result
+ def trigger_workflow(workflow_or_task, input, options: nil)
+ ref = trigger_workflow_no_wait(workflow_or_task, input, options: options)
+ ref.result
+ end
+
+ # Trigger a workflow run without waiting for the result.
+ #
+ # @param workflow_or_task [Workflow, Task, String] The workflow or task to trigger
+ # @param input [Hash] Workflow input
+ # @param options [TriggerWorkflowOptions, nil] Trigger options
+ # @return [WorkflowRunRef] A reference to the running workflow
+ def trigger_workflow_no_wait(workflow_or_task, input, options: nil)
+ name = workflow_or_task.respond_to?(:name) ? workflow_or_task.name : workflow_or_task.to_s
+
+ # Merge user options with context vars for parent-child linking
+ opts = build_trigger_options(options)
+
+ run_id = @client.admin_grpc.trigger_workflow(name, input: input, options: opts)
+ WorkflowRunRef.new(
+ workflow_run_id: run_id,
+ client: @client,
+ listener: @client.workflow_run_listener,
+ )
+ end
+
+ # Trigger many workflow runs and wait for all results.
+ #
+ # @param workflow_or_task [Workflow, Task, String] The workflow or task to trigger
+ # @param items [Array] Array of { input:, options: } items
+ # @param return_exceptions [Boolean] Return exceptions instead of raising
+ # @return [Array] Results or exceptions
+ def trigger_workflow_many(workflow_or_task, items, return_exceptions: false)
+ refs = trigger_workflow_many_no_wait(workflow_or_task, items)
+
+ # Collect results concurrently using threads so that all subscriptions
+ # are sent at once rather than serially waiting for each one.
+ threads = refs.map do |ref|
+ Thread.new do
+ if return_exceptions
+ begin
+ ref.result
+ rescue StandardError => e
+ e
+ end
+ else
+ ref.result
+ end
+ end
+ end
+
+ threads.map(&:value)
+ end
+
+ # Trigger many workflow runs without waiting.
+ #
+ # Uses bulk gRPC triggering for efficiency (batched by 1000).
+ #
+ # @param workflow_or_task [Workflow, Task, String] The workflow or task to trigger
+ # @param items [Array] Array of { input:, options: } items
+ # @return [Array] References to the running workflows
+ def trigger_workflow_many_no_wait(workflow_or_task, items)
+ name = workflow_or_task.respond_to?(:name) ? workflow_or_task.name : workflow_or_task.to_s
+
+ # Build trigger items with context vars for parent-child linking
+ trigger_items = items.map do |item|
+ input = item[:input] || {}
+ opts = build_trigger_options(item[:options])
+ { input: input, options: opts }
+ end
+
+ run_ids = @client.admin_grpc.bulk_trigger_workflow(name, trigger_items)
+ run_ids.map do |run_id|
+ WorkflowRunRef.new(
+ workflow_run_id: run_id,
+ client: @client,
+ listener: @client.workflow_run_listener,
+ )
+ end
+ end
+
+ # Schedule a workflow for future execution.
+ #
+ # @param workflow [Workflow, Task, String] The workflow to schedule
+ # @param time [Time] When to execute
+ # @param input [Hash] Workflow input
+ # @param options [TriggerWorkflowOptions, nil] Schedule options
+ # @return [Object] Schedule response
+ def schedule_workflow(workflow, time, input: {}, options: nil)
+ name = workflow.respond_to?(:name) ? workflow.name : workflow.to_s
+ opts = build_trigger_options(options)
+ @client.admin_grpc.schedule_workflow(name, run_at: time, input: input, options: opts)
+ end
+
+ private
+
+ def build_trigger_options(user_options)
+ # Merge user options with context vars for parent-child linking
+ parent_workflow_run_id = ContextVars.workflow_run_id
+ parent_step_run_id = ContextVars.step_run_id
+ action_key = ContextVars.action_key
+
+ opts = user_options.to_h
+
+ if parent_workflow_run_id
+ opts[:parent_id] ||= parent_workflow_run_id
+ opts[:parent_task_run_external_id] ||= parent_step_run_id
+
+ opts[:child_index] ||= @spawn_indices.next_index(action_key) if action_key
+
+ parent_meta = ContextVars.additional_metadata
+ opts[:additional_metadata] = parent_meta.merge(opts[:additional_metadata] || {}) if parent_meta && !parent_meta.empty?
+ end
+
+ opts
end
end
end
diff --git a/sdks/ruby/src/lib/hatchet/clients.rb b/sdks/ruby/src/lib/hatchet/clients.rb
index 4de461be4..d694211d6 100644
--- a/sdks/ruby/src/lib/hatchet/clients.rb
+++ b/sdks/ruby/src/lib/hatchet/clients.rb
@@ -1,5 +1,4 @@
# frozen_string_literal: true
-# typed: strict
# Hatchet clients module
# This module provides access to different client types (REST, gRPC, etc.)
@@ -28,9 +27,7 @@ module Hatchet
# rest_client = Hatchet::Clients.rest_client(config)
# workflows_api = Hatchet::Clients::Rest::WorkflowApi.new(rest_client)
def rest_client(config)
- unless rest_available?
- raise LoadError, "REST client not available. Run `rake api:generate` to generate it from the OpenAPI spec."
- end
+ raise LoadError, "REST client not available. Run `rake api:generate` to generate it from the OpenAPI spec." unless rest_available?
rest_config = Rest::Configuration.from_hatchet_config(config)
Rest::ApiClient.new(rest_config)
diff --git a/sdks/ruby/src/lib/hatchet/clients/grpc/admin.rb b/sdks/ruby/src/lib/hatchet/clients/grpc/admin.rb
new file mode 100644
index 000000000..7c6e21563
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/grpc/admin.rb
@@ -0,0 +1,264 @@
+# frozen_string_literal: true
+
+require "json"
+require "google/protobuf/timestamp_pb"
+
+module Hatchet
+ module Clients
+ module Grpc
+ # gRPC client for the Hatchet Admin service (workflow registration & triggering).
+ #
+ # Uses two stubs on the shared channel:
+ # - V1::AdminService::Stub (v1) for: PutWorkflow, GetRunDetails, CancelTasks, ReplayTasks
+ # - WorkflowService::Stub (v0) for: TriggerWorkflow, BulkTriggerWorkflow, ScheduleWorkflow, PutRateLimit
+ #
+ # The v0 WorkflowService is retained for triggering because it supports
+ # parent-child linking fields that the v1 TriggerWorkflowRun does not expose.
+ #
+ # @example
+ # admin = Admin.new(config: hatchet_config, channel: channel)
+ # admin.put_workflow(workflow.to_proto(config))
+ # ref = admin.trigger_workflow("MyWorkflow", input: { "key" => "value" })
+ class Admin
+ BULK_TRIGGER_BATCH_SIZE = 1000
+
+ # @param config [Hatchet::Config] The Hatchet configuration
+ # @param channel [GRPC::Core::Channel] Shared gRPC channel
+ def initialize(config:, channel:)
+ @config = config
+ @logger = config.logger
+ @channel = channel
+ @v0_stub = nil
+ @v1_stub = nil
+ end
+
+ # Register a workflow definition with the server via v1 AdminService.
+ #
+ # @param workflow_proto [V1::CreateWorkflowVersionRequest] The workflow proto
+ # @return [V1::CreateWorkflowVersionResponse] Registration response
+ def put_workflow(workflow_proto)
+ ensure_connected!
+
+ response = @v1_stub.put_workflow(workflow_proto, metadata: @config.auth_metadata)
+ @logger.debug("Registered workflow: #{workflow_proto.name}")
+ response
+ end
+
+ # Trigger a workflow run via v0 WorkflowService.
+ #
+ # @param workflow_name [String] The workflow name (will be namespaced)
+ # @param input [Hash] Workflow input
+ # @param options [Hash] Trigger options
+ # @option options [String] :parent_id Parent workflow run ID
+ # @option options [String] :parent_task_run_external_id Parent step run ID
+ # @option options [Integer] :child_index Child workflow index
+ # @option options [String] :child_key Child workflow key
+ # @option options [Hash] :additional_metadata Additional metadata
+ # @option options [String] :desired_worker_id Desired worker for sticky dispatch
+ # @option options [Integer] :priority Priority value
+ # @return [String] The workflow run ID
+ # @raise [DedupeViolationError] If a deduplication violation occurs
+ def trigger_workflow(workflow_name, input: {}, options: {})
+ ensure_connected!
+
+ name = @config.apply_namespace(workflow_name)
+
+ request_args = {
+ name: name,
+ input: JSON.generate(input),
+ }
+
+ request_args[:parent_id] = options[:parent_id] if options[:parent_id]
+ request_args[:parent_task_run_external_id] = options[:parent_task_run_external_id] if options[:parent_task_run_external_id]
+ request_args[:child_index] = options[:child_index] if options[:child_index]
+ request_args[:child_key] = options[:child_key] if options[:child_key]
+ request_args[:desired_worker_id] = options[:desired_worker_id] if options[:desired_worker_id]
+ request_args[:priority] = options[:priority] if options[:priority]
+
+ if options[:additional_metadata]
+ request_args[:additional_metadata] = if options[:additional_metadata].is_a?(String)
+ options[:additional_metadata]
+ else
+ JSON.generate(options[:additional_metadata])
+ end
+ end
+
+ request = ::TriggerWorkflowRequest.new(**request_args)
+
+ begin
+ response = @v0_stub.trigger_workflow(request, metadata: @config.auth_metadata)
+ response.workflow_run_id
+ rescue ::GRPC::AlreadyExists => e
+ raise DedupeViolationError, "Deduplication violation: #{e.message}"
+ rescue ::GRPC::ResourceExhausted => e
+ raise ResourceExhaustedError, e.message
+ rescue ::GRPC::BadStatus => e
+ raise Error, "gRPC error triggering workflow: #{e.class}: #{e.message}"
+ end
+ end
+
+ # Trigger multiple workflow runs in bulk via v0 WorkflowService.
+ #
+ # Automatically batches requests in groups of 1000.
+ #
+ # @param workflow_name [String] The workflow name (will be namespaced)
+ # @param items [Array] Array of { input:, options: } items
+ # @return [Array] Array of workflow run IDs
+ def bulk_trigger_workflow(workflow_name, items)
+ ensure_connected!
+
+ name = @config.apply_namespace(workflow_name)
+
+ requests = items.map do |item|
+ input = item[:input] || {}
+ opts = item[:options] || {}
+
+ request_args = {
+ name: name,
+ input: JSON.generate(input),
+ }
+
+ request_args[:parent_id] = opts[:parent_id] if opts[:parent_id]
+ request_args[:parent_task_run_external_id] = opts[:parent_task_run_external_id] if opts[:parent_task_run_external_id]
+ request_args[:child_index] = opts[:child_index] if opts[:child_index]
+ request_args[:child_key] = opts[:child_key] if opts[:child_key]
+ request_args[:desired_worker_id] = opts[:desired_worker_id] if opts[:desired_worker_id]
+ request_args[:priority] = opts[:priority] if opts[:priority]
+
+ if opts[:additional_metadata]
+ request_args[:additional_metadata] = if opts[:additional_metadata].is_a?(String)
+ opts[:additional_metadata]
+ else
+ JSON.generate(opts[:additional_metadata])
+ end
+ end
+
+ ::TriggerWorkflowRequest.new(**request_args)
+ end
+
+ # Batch in groups of BULK_TRIGGER_BATCH_SIZE
+ all_run_ids = []
+ begin
+ requests.each_slice(BULK_TRIGGER_BATCH_SIZE) do |batch|
+ bulk_request = ::BulkTriggerWorkflowRequest.new(workflows: batch)
+ response = @v0_stub.bulk_trigger_workflow(bulk_request, metadata: @config.auth_metadata)
+ all_run_ids.concat(response.workflow_run_ids.to_a)
+ end
+ rescue ::GRPC::ResourceExhausted => e
+ raise ResourceExhaustedError, e.message
+ rescue ::GRPC::BadStatus => e
+ raise Error, "gRPC error triggering bulk workflow: #{e.class}: #{e.message}"
+ end
+
+ all_run_ids
+ end
+
+ # Schedule a workflow for future execution via v0 WorkflowService.
+ #
+ # @param workflow_name [String] The workflow name (will be namespaced)
+ # @param run_at [Time] When to run
+ # @param input [Hash] Workflow input
+ # @param options [Hash] Trigger options
+ # @return [WorkflowVersion] Schedule response
+ # @raise [DedupeViolationError] If a deduplication violation occurs
+ def schedule_workflow(workflow_name, run_at:, input: {}, options: {})
+ ensure_connected!
+
+ name = @config.apply_namespace(workflow_name)
+
+ schedule_timestamp = Google::Protobuf::Timestamp.new(
+ seconds: run_at.to_i,
+ nanos: run_at.respond_to?(:nsec) ? run_at.nsec : 0,
+ )
+
+ request_args = {
+ name: name,
+ schedules: [schedule_timestamp],
+ input: JSON.generate(input),
+ }
+
+ request_args[:parent_id] = options[:parent_id] if options[:parent_id]
+ request_args[:parent_task_run_external_id] = options[:parent_task_run_external_id] if options[:parent_task_run_external_id]
+ request_args[:child_index] = options[:child_index] if options[:child_index]
+ request_args[:child_key] = options[:child_key] if options[:child_key]
+ request_args[:priority] = options[:priority] if options[:priority]
+
+ if options[:additional_metadata]
+ request_args[:additional_metadata] = if options[:additional_metadata].is_a?(String)
+ options[:additional_metadata]
+ else
+ JSON.generate(options[:additional_metadata])
+ end
+ end
+
+ request = ::ScheduleWorkflowRequest.new(**request_args)
+
+ begin
+ @v0_stub.schedule_workflow(request, metadata: @config.auth_metadata)
+ rescue ::GRPC::AlreadyExists => e
+ raise DedupeViolationError, "Deduplication violation: #{e.message}"
+ rescue ::GRPC::ResourceExhausted => e
+ raise ResourceExhaustedError, e.message
+ rescue ::GRPC::BadStatus => e
+ raise Error, "gRPC error scheduling workflow: #{e.class}: #{e.message}"
+ end
+ end
+
+ # Get run details via v1 AdminService.
+ #
+ # @param external_id [String] The workflow run external ID
+ # @return [V1::GetRunDetailsResponse]
+ def get_run_details(external_id:)
+ ensure_connected!
+
+ request = ::V1::GetRunDetailsRequest.new(external_id: external_id)
+ @v1_stub.get_run_details(request, metadata: @config.auth_metadata)
+ end
+
+ # Put a rate limit via v0 WorkflowService.
+ #
+ # @param key [String] Rate limit key
+ # @param limit [Integer] Rate limit value
+ # @param duration [Symbol] Rate limit duration enum
+ # @return [PutRateLimitResponse]
+ def put_rate_limit(key:, limit:, duration:)
+ ensure_connected!
+
+ request = ::PutRateLimitRequest.new(
+ key: key,
+ limit: limit,
+ duration: duration,
+ )
+
+ @v0_stub.put_rate_limit(request, metadata: @config.auth_metadata)
+ end
+
+ # Close the connection.
+ def close
+ @v0_stub = nil
+ @v1_stub = nil
+ end
+
+ private
+
+ def ensure_connected!
+ return if @v0_stub && @v1_stub
+
+ @v0_stub = ::WorkflowService::Stub.new(
+ @config.host_port,
+ nil,
+ channel_override: @channel,
+ )
+
+ @v1_stub = ::V1::AdminService::Stub.new(
+ @config.host_port,
+ nil,
+ channel_override: @channel,
+ )
+
+ @logger.debug("Admin gRPC stubs (v0 + v1) connected via shared channel")
+ end
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/grpc/dispatcher.rb b/sdks/ruby/src/lib/hatchet/clients/grpc/dispatcher.rb
new file mode 100644
index 000000000..57a1df7cb
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/grpc/dispatcher.rb
@@ -0,0 +1,253 @@
+# frozen_string_literal: true
+
+require "google/protobuf/timestamp_pb"
+
+module Hatchet
+ module Clients
+ module Grpc
+ # gRPC client for the Hatchet Dispatcher service.
+ #
+ # Handles worker registration, action listening, result reporting,
+ # heartbeats, and other dispatcher interactions.
+ #
+ # Uses the generated Dispatcher::Stub from dispatcher.proto for v0 RPCs,
+ # and shares a gRPC channel provided by Hatchet::Connection.
+ #
+ # @example
+ # dispatcher = Dispatcher.new(config: hatchet_config, channel: channel)
+ # response = dispatcher.register(name: "my-worker", ...)
+ # dispatcher.listen(worker_id: response.worker_id) { |action| ... }
+ class Dispatcher
+ # @param config [Hatchet::Config] The Hatchet configuration
+ # @param channel [GRPC::Core::Channel] Shared gRPC channel
+ # @return [String, nil] Worker ID assigned after registration
+ attr_reader :worker_id
+
+ def initialize(config:, channel:)
+ @config = config
+ @logger = config.logger
+ @channel = channel
+ @stub = nil
+ @worker_id = nil
+ end
+
+ # Register a worker with the dispatcher.
+ #
+ # @param name [String] Worker name
+ # @param actions [Array] List of action IDs this worker handles
+ # @param slots [Integer] Number of concurrent task slots
+ # @param labels [Hash] Worker labels (String keys, String or Integer values)
+ # @return [WorkerRegisterResponse] Registration response with worker_id
+ def register(name:, actions:, slots:, labels: {})
+ ensure_connected!
+
+ label_map = labels.each_with_object({}) do |(k, v), map|
+ wl = if v.is_a?(Integer)
+ ::WorkerLabels.new(int_value: v)
+ else
+ ::WorkerLabels.new(str_value: v.to_s)
+ end
+ map[k.to_s] = wl
+ end
+
+ runtime_info = ::RuntimeInfo.new(
+ language: :RUBY,
+ sdk_version: Hatchet::VERSION,
+ language_version: RUBY_VERSION,
+ os: RUBY_PLATFORM,
+ )
+
+ request = ::WorkerRegisterRequest.new(
+ worker_name: name,
+ actions: actions,
+ slots: slots,
+ labels: label_map,
+ runtime_info: runtime_info,
+ )
+
+ begin
+ response = @stub.register(request, metadata: @config.auth_metadata)
+ rescue ::GRPC::Internal
+ request = ::WorkerRegisterRequest.new(
+ worker_name: name,
+ actions: actions,
+ slots: slots,
+ labels: label_map,
+ )
+ response = @stub.register(request, metadata: @config.auth_metadata)
+ @logger.warn("Registered without runtime_info — engine may not support " \
+ "RUBY language type. Consider upgrading your Hatchet engine.")
+ end
+
+ @worker_id = response.worker_id
+ @logger.info("Registered worker '#{name}' with #{actions.length} action(s), worker_id=#{response.worker_id}")
+ response
+ end
+
+ # Listen for action assignments via gRPC server-streaming (ListenV2).
+ #
+ # Returns an Enumerator of AssignedAction messages. The caller is
+ # responsible for iterating and handling reconnection.
+ #
+ # @param worker_id [String] The registered worker ID
+ # @return [Enumerator] Stream of AssignedAction messages
+ def listen(worker_id:)
+ ensure_connected!
+
+ request = ::WorkerListenRequest.new(worker_id: worker_id)
+ @stub.listen_v2(request, metadata: @config.auth_metadata)
+ end
+
+ # Send a heartbeat to keep the worker registration alive.
+ #
+ # @param worker_id [String] The worker ID
+ # @return [HeartbeatResponse]
+ def heartbeat(worker_id:)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ request = ::HeartbeatRequest.new(
+ worker_id: worker_id,
+ heartbeat_at: timestamp,
+ )
+
+ @stub.heartbeat(request, metadata: @config.auth_metadata)
+ end
+
+ # Send a step action event (completion/failure/started) back to the dispatcher.
+ #
+ # Accepts the full action object (AssignedAction) so all StepActionEvent
+ # fields can be populated, matching the Python SDK pattern.
+ #
+ # @param action [AssignedAction] The assigned action object
+ # @param event_type [Symbol] Protobuf enum value (e.g., :STEP_EVENT_TYPE_COMPLETED)
+ # @param payload [String] JSON-serialized event payload
+ # @param retry_count [Integer, nil] Current retry count
+ # @param should_not_retry [Boolean, nil] Whether to suppress further retries
+ # @return [ActionEventResponse]
+ def send_step_action_event(action:, event_type:, payload: "{}", retry_count: nil, should_not_retry: nil)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ event_args = {
+ worker_id: @worker_id || "",
+ job_id: action.job_id,
+ job_run_id: action.job_run_id,
+ task_id: action.task_id,
+ task_run_external_id: action.task_run_external_id,
+ action_id: action.action_id,
+ event_timestamp: timestamp,
+ event_type: event_type,
+ event_payload: payload,
+ }
+
+ event_args[:retry_count] = retry_count unless retry_count.nil?
+ event_args[:should_not_retry] = should_not_retry unless should_not_retry.nil?
+
+ request = ::StepActionEvent.new(**event_args)
+ @stub.send_step_action_event(request, metadata: @config.auth_metadata)
+ end
+
+ # Refresh the timeout for a running task.
+ #
+ # @param step_run_id [String] The task run external ID
+ # @param timeout_seconds [Integer, String] New timeout increment (in seconds or as a duration string)
+ # @return [RefreshTimeoutResponse]
+ def refresh_timeout(step_run_id:, timeout_seconds:)
+ ensure_connected!
+
+ increment = timeout_seconds.is_a?(String) ? timeout_seconds : "#{timeout_seconds}s"
+
+ request = ::RefreshTimeoutRequest.new(
+ task_run_external_id: step_run_id,
+ increment_timeout_by: increment,
+ )
+
+ @stub.refresh_timeout(request, metadata: @config.auth_metadata)
+ end
+
+ # Release a worker slot for a task.
+ #
+ # @param step_run_id [String] The task run external ID
+ # @return [ReleaseSlotResponse]
+ def release_slot(step_run_id:)
+ ensure_connected!
+
+ request = ::ReleaseSlotRequest.new(
+ task_run_external_id: step_run_id,
+ )
+
+ @stub.release_slot(request, metadata: @config.auth_metadata)
+ end
+
+ # Update worker labels.
+ #
+ # @param worker_id [String] The worker ID
+ # @param labels [Hash] New labels to upsert (String keys, String/Integer values)
+ # @return [UpsertWorkerLabelsResponse]
+ def upsert_worker_labels(worker_id:, labels:)
+ ensure_connected!
+
+ label_map = labels.each_with_object({}) do |(k, v), map|
+ wl = if v.is_a?(Integer)
+ ::WorkerLabels.new(int_value: v)
+ else
+ ::WorkerLabels.new(str_value: v.to_s)
+ end
+ map[k.to_s] = wl
+ end
+
+ request = ::UpsertWorkerLabelsRequest.new(
+ worker_id: worker_id,
+ labels: label_map,
+ )
+
+ @stub.upsert_worker_labels(request, metadata: @config.auth_metadata)
+ end
+
+ # Open a bidirectional streaming subscription for workflow run events.
+ #
+ # The caller provides an Enumerable (typically an Enumerator backed by
+ # a Queue) of SubscribeToWorkflowRunsRequest messages. The server
+ # streams back WorkflowRunEvent messages as workflow runs complete.
+ #
+ # @param request_enum [Enumerable] Outgoing request stream
+ # @return [Enumerator] Incoming response stream
+ def subscribe_to_workflow_runs(request_enum)
+ ensure_connected!
+
+ @stub.subscribe_to_workflow_runs(request_enum, metadata: @config.auth_metadata)
+ end
+
+ # Close the gRPC channel.
+ def close
+ @stub = nil
+ end
+
+ private
+
+ def ensure_connected!
+ return if @stub
+
+ @stub = ::Dispatcher::Stub.new(
+ @config.host_port,
+ nil,
+ channel_override: @channel,
+ )
+
+ @logger.debug("Dispatcher gRPC stub connected via shared channel")
+ end
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/grpc/event_client.rb b/sdks/ruby/src/lib/hatchet/clients/grpc/event_client.rb
new file mode 100644
index 000000000..3b48e99aa
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/grpc/event_client.rb
@@ -0,0 +1,181 @@
+# frozen_string_literal: true
+
+require "json"
+require "google/protobuf/timestamp_pb"
+
+module Hatchet
+ module Clients
+ module Grpc
+ # gRPC client for the Hatchet Events service.
+ #
+ # Handles pushing events, bulk events, logs, and stream events via gRPC.
+ # Uses the generated EventsService::Stub from events.proto on the shared channel.
+ #
+ # @example
+ # event_client = EventClient.new(config: hatchet_config, channel: channel)
+ # event_client.push(key: "user:create", payload: { "name" => "Alice" })
+ class EventClient
+ MAX_LOG_MESSAGE_LENGTH = 10_000
+
+ # @param config [Hatchet::Config] The Hatchet configuration
+ # @param channel [GRPC::Core::Channel] Shared gRPC channel
+ def initialize(config:, channel:)
+ @config = config
+ @logger = config.logger
+ @channel = channel
+ @stub = nil
+ end
+
+ # Push an event via gRPC.
+ #
+ # @param key [String] Event key (will be namespaced)
+ # @param payload [Hash] Event payload
+ # @param additional_metadata [Hash, nil] Additional metadata
+ # @param priority [Integer, nil] Event priority
+ # @param scope [String, nil] Event scope
+ # @param namespace [String, nil] Optional namespace override
+ # @return [Event] Push response
+ def push(key:, payload:, additional_metadata: nil, priority: nil, scope: nil, namespace: nil)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ namespaced_key = @config.apply_namespace(key, namespace_override: namespace)
+
+ request_args = {
+ key: namespaced_key,
+ payload: JSON.generate(payload),
+ event_timestamp: timestamp,
+ }
+
+ if additional_metadata
+ request_args[:additional_metadata] = if additional_metadata.is_a?(String)
+ additional_metadata
+ else
+ JSON.generate(additional_metadata)
+ end
+ end
+
+ request_args[:priority] = priority if priority
+ request_args[:scope] = scope if scope
+
+ request = ::PushEventRequest.new(**request_args)
+ @stub.push(request, metadata: @config.auth_metadata)
+ end
+
+ # Push multiple events via gRPC.
+ #
+ # @param events [Array] Array of event hashes with :key, :payload, :additional_metadata, :priority, :scope
+ # @param namespace [String, nil] Optional namespace override applied to all events
+ # @return [Events] Bulk push response
+ def bulk_push(events, namespace: nil)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ items = events.map do |e|
+ request_args = {
+ key: @config.apply_namespace(e[:key], namespace_override: namespace),
+ payload: JSON.generate(e[:payload] || {}),
+ event_timestamp: timestamp,
+ }
+
+ if e[:additional_metadata]
+ request_args[:additional_metadata] = if e[:additional_metadata].is_a?(String)
+ e[:additional_metadata]
+ else
+ JSON.generate(e[:additional_metadata])
+ end
+ end
+
+ request_args[:priority] = e[:priority] if e[:priority]
+ request_args[:scope] = e[:scope] if e[:scope]
+
+ ::PushEventRequest.new(**request_args)
+ end
+
+ request = ::BulkPushEventRequest.new(events: items)
+ @stub.bulk_push(request, metadata: @config.auth_metadata)
+ end
+
+ # Put a log message for a task run.
+ #
+ # @param step_run_id [String] The task run external ID
+ # @param message [String] Log message (truncated to 10K chars)
+ # @return [PutLogResponse]
+ def put_log(step_run_id:, message:)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ truncated_message = message.length > MAX_LOG_MESSAGE_LENGTH ? message[0...MAX_LOG_MESSAGE_LENGTH] : message
+
+ request = ::PutLogRequest.new(
+ task_run_external_id: step_run_id,
+ created_at: timestamp,
+ message: truncated_message,
+ )
+
+ @stub.put_log(request, metadata: @config.auth_metadata)
+ end
+
+ # Put a stream event for real-time streaming.
+ #
+ # @param step_run_id [String] The task run external ID
+ # @param data [String] Stream data chunk (sent as bytes)
+ # @return [PutStreamEventResponse]
+ def put_stream(step_run_id:, data:)
+ ensure_connected!
+
+ now = Time.now
+ timestamp = Google::Protobuf::Timestamp.new(
+ seconds: now.to_i,
+ nanos: now.nsec,
+ )
+
+ # The message field in PutStreamEventRequest is bytes
+ message_bytes = data.is_a?(String) ? data.b : data.to_s.b
+
+ request = ::PutStreamEventRequest.new(
+ task_run_external_id: step_run_id,
+ created_at: timestamp,
+ message: message_bytes,
+ )
+
+ @stub.put_stream_event(request, metadata: @config.auth_metadata)
+ end
+
+ # Close the connection.
+ def close
+ @stub = nil
+ end
+
+ private
+
+ def ensure_connected!
+ return if @stub
+
+ @stub = ::EventsService::Stub.new(
+ @config.host_port,
+ nil,
+ channel_override: @channel,
+ )
+
+ @logger.debug("Events gRPC stub connected via shared channel")
+ end
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest.rb b/sdks/ruby/src/lib/hatchet/clients/rest.rb
index 01b2299b0..3552b7f5d 100644
--- a/sdks/ruby/src/lib/hatchet/clients/rest.rb
+++ b/sdks/ruby/src/lib/hatchet/clients/rest.rb
@@ -1,35 +1,14 @@
# frozen_string_literal: true
-# typed: strict
-
-require 'fileutils'
-require 'rbconfig'
# Integration file for generated Hatchet REST API client
# This file loads the generated REST client and makes it available under the Hatchet::Clients::Rest namespace
begin
- # Set up load paths for the generated client
+ # Add the generated client's lib/ directory to $LOAD_PATH so that
+ # `require 'hatchet-sdk-rest/...'` calls in the entry point resolve correctly.
rest_lib_path = File.expand_path("rest/lib", __dir__)
$LOAD_PATH.unshift(rest_lib_path) unless $LOAD_PATH.include?(rest_lib_path)
- # Create an alias so hatchet-sdk-rest/ paths resolve to the actual location
- # This is a bit of a hack, but necessary because the generator expects gem-style paths
- hatchet_sdk_rest_base = File.expand_path("rest/lib/hatchet-sdk-rest", __dir__)
- $LOAD_PATH.unshift(hatchet_sdk_rest_base) unless $LOAD_PATH.include?(hatchet_sdk_rest_base)
-
- # Create a symlink in the load path to make hatchet-sdk-rest/ paths work
- fake_gem_path = File.expand_path("rest/lib/hatchet-sdk-rest/hatchet-sdk-rest", __dir__)
- unless File.exist?(fake_gem_path)
- FileUtils.mkdir_p(File.dirname(fake_gem_path))
- # On Unix systems, create a symlink; on Windows, copy the files
- if RbConfig::CONFIG['host_os'] =~ /mswin|mingw|cygwin/
- require 'fileutils'
- FileUtils.cp_r(hatchet_sdk_rest_base, fake_gem_path)
- else
- File.symlink(hatchet_sdk_rest_base, fake_gem_path)
- end
- end
-
# Load the generated REST client
require_relative "rest/lib/hatchet-sdk-rest"
@@ -54,8 +33,8 @@ begin
# Extract host from server_url
if hatchet_config.server_url && !hatchet_config.server_url.empty?
- config.host = hatchet_config.server_url.gsub(/^https?:\/\//, '').split('/').first
- config.scheme = hatchet_config.server_url.start_with?('https') ? 'https' : 'http'
+ config.host = hatchet_config.server_url.gsub(%r{^https?://}, "").split("/").first
+ config.scheme = hatchet_config.server_url.start_with?("https") ? "https" : "http"
end
# Set timeout if available
@@ -63,6 +42,11 @@ begin
config.timeout = hatchet_config.listener_v2_timeout / 1000.0 # Convert ms to seconds
end
+ # Use FlatParamsEncoder so array query parameters are serialized as
+ # key=val1&key=val2 instead of key[]=val1&key[]=val2, matching what
+ # the Hatchet API server expects.
+ config.params_encoder = Faraday::FlatParamsEncoder
+
config
end
end
@@ -86,7 +70,6 @@ begin
end
end
end
-
rescue LoadError => e
# If the generated client files are not available, define an empty module
# This allows the main SDK to load without errors even before generation
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.gitignore b/sdks/ruby/src/lib/hatchet/clients/rest/.gitignore
new file mode 100644
index 000000000..05a17cb8f
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.gitignore
@@ -0,0 +1,39 @@
+# Generated by: https://openapi-generator.tech
+#
+
+*.gem
+*.rbc
+/.config
+/coverage/
+/InstalledFiles
+/pkg/
+/spec/reports/
+/spec/examples.txt
+/test/tmp/
+/test/version_tmp/
+/tmp/
+
+## Specific to RubyMotion:
+.dat*
+.repl_history
+build/
+
+## Documentation cache and generated files:
+/.yardoc/
+/_yardoc/
+/doc/
+/rdoc/
+
+## Environment normalization:
+/.bundle/
+/vendor/bundle
+/lib/bundler/man/
+
+# for a library or gem, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# Gemfile.lock
+# .ruby-version
+# .ruby-gemset
+
+# unless supporting rvm < 1.11.0 or doing something fancy, ignore this:
+.rvmrc
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.gitlab-ci.yml b/sdks/ruby/src/lib/hatchet/clients/rest/.gitlab-ci.yml
new file mode 100644
index 000000000..8be20493b
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.gitlab-ci.yml
@@ -0,0 +1,25 @@
+.ruby: &ruby
+ variables:
+ LANG: "C.UTF-8"
+ before_script:
+ - ruby -v
+ - bundle config set --local deployment true
+ - bundle install -j $(nproc)
+ parallel:
+ matrix:
+ - RUBY_VERSION: ['2.7', '3.0', '3.1']
+ image: "ruby:$RUBY_VERSION"
+ cache:
+ paths:
+ - vendor/ruby
+ key: 'ruby-$RUBY_VERSION'
+
+gem:
+ extends: .ruby
+ script:
+ - bundle exec rspec
+ - bundle exec rake build
+ - bundle exec rake install
+ artifacts:
+ paths:
+ - pkg/*.gem
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator-ignore b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator-ignore
new file mode 100644
index 000000000..7484ee590
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator-ignore
@@ -0,0 +1,23 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/FILES b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/FILES
new file mode 100644
index 000000000..459feaee7
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/FILES
@@ -0,0 +1,257 @@
+.gitignore
+.gitlab-ci.yml
+.openapi-generator-ignore
+.rspec
+.rubocop.yml
+.travis.yml
+Gemfile
+README.md
+Rakefile
+git_push.sh
+hatchet-sdk-rest.gemspec
+lib/hatchet-sdk-rest.rb
+lib/hatchet-sdk-rest/api/api_token_api.rb
+lib/hatchet-sdk-rest/api/cel_api.rb
+lib/hatchet-sdk-rest/api/default_api.rb
+lib/hatchet-sdk-rest/api/event_api.rb
+lib/hatchet-sdk-rest/api/filter_api.rb
+lib/hatchet-sdk-rest/api/github_api.rb
+lib/hatchet-sdk-rest/api/healthcheck_api.rb
+lib/hatchet-sdk-rest/api/log_api.rb
+lib/hatchet-sdk-rest/api/metadata_api.rb
+lib/hatchet-sdk-rest/api/rate_limits_api.rb
+lib/hatchet-sdk-rest/api/slack_api.rb
+lib/hatchet-sdk-rest/api/sns_api.rb
+lib/hatchet-sdk-rest/api/step_run_api.rb
+lib/hatchet-sdk-rest/api/task_api.rb
+lib/hatchet-sdk-rest/api/tenant_api.rb
+lib/hatchet-sdk-rest/api/user_api.rb
+lib/hatchet-sdk-rest/api/webhook_api.rb
+lib/hatchet-sdk-rest/api/worker_api.rb
+lib/hatchet-sdk-rest/api/workflow_api.rb
+lib/hatchet-sdk-rest/api/workflow_run_api.rb
+lib/hatchet-sdk-rest/api/workflow_runs_api.rb
+lib/hatchet-sdk-rest/api_client.rb
+lib/hatchet-sdk-rest/api_error.rb
+lib/hatchet-sdk-rest/configuration.rb
+lib/hatchet-sdk-rest/models/accept_invite_request.rb
+lib/hatchet-sdk-rest/models/api_error.rb
+lib/hatchet-sdk-rest/models/api_errors.rb
+lib/hatchet-sdk-rest/models/api_meta.rb
+lib/hatchet-sdk-rest/models/api_meta_auth.rb
+lib/hatchet-sdk-rest/models/api_meta_integration.rb
+lib/hatchet-sdk-rest/models/api_meta_posthog.rb
+lib/hatchet-sdk-rest/models/api_resource_meta.rb
+lib/hatchet-sdk-rest/models/api_token.rb
+lib/hatchet-sdk-rest/models/bulk_create_event_request.rb
+lib/hatchet-sdk-rest/models/cancel_event_request.rb
+lib/hatchet-sdk-rest/models/concurrency_limit_strategy.rb
+lib/hatchet-sdk-rest/models/concurrency_scope.rb
+lib/hatchet-sdk-rest/models/concurrency_setting.rb
+lib/hatchet-sdk-rest/models/concurrency_stat.rb
+lib/hatchet-sdk-rest/models/create_api_token_request.rb
+lib/hatchet-sdk-rest/models/create_api_token_response.rb
+lib/hatchet-sdk-rest/models/create_cron_workflow_trigger_request.rb
+lib/hatchet-sdk-rest/models/create_event_request.rb
+lib/hatchet-sdk-rest/models/create_pull_request_from_step_run.rb
+lib/hatchet-sdk-rest/models/create_sns_integration_request.rb
+lib/hatchet-sdk-rest/models/create_tenant_alert_email_group_request.rb
+lib/hatchet-sdk-rest/models/create_tenant_invite_request.rb
+lib/hatchet-sdk-rest/models/create_tenant_request.rb
+lib/hatchet-sdk-rest/models/cron_workflows.rb
+lib/hatchet-sdk-rest/models/cron_workflows_list.rb
+lib/hatchet-sdk-rest/models/cron_workflows_method.rb
+lib/hatchet-sdk-rest/models/cron_workflows_order_by_field.rb
+lib/hatchet-sdk-rest/models/event.rb
+lib/hatchet-sdk-rest/models/event_data.rb
+lib/hatchet-sdk-rest/models/event_key_list.rb
+lib/hatchet-sdk-rest/models/event_list.rb
+lib/hatchet-sdk-rest/models/event_order_by_direction.rb
+lib/hatchet-sdk-rest/models/event_order_by_field.rb
+lib/hatchet-sdk-rest/models/event_update_cancel200_response.rb
+lib/hatchet-sdk-rest/models/event_workflow_run_summary.rb
+lib/hatchet-sdk-rest/models/events.rb
+lib/hatchet-sdk-rest/models/get_step_run_diff_response.rb
+lib/hatchet-sdk-rest/models/info_get_version200_response.rb
+lib/hatchet-sdk-rest/models/job.rb
+lib/hatchet-sdk-rest/models/job_run.rb
+lib/hatchet-sdk-rest/models/job_run_status.rb
+lib/hatchet-sdk-rest/models/list_api_tokens_response.rb
+lib/hatchet-sdk-rest/models/list_pull_requests_response.rb
+lib/hatchet-sdk-rest/models/list_slack_webhooks.rb
+lib/hatchet-sdk-rest/models/list_sns_integrations.rb
+lib/hatchet-sdk-rest/models/log_line.rb
+lib/hatchet-sdk-rest/models/log_line_level.rb
+lib/hatchet-sdk-rest/models/log_line_list.rb
+lib/hatchet-sdk-rest/models/log_line_order_by_direction.rb
+lib/hatchet-sdk-rest/models/log_line_order_by_field.rb
+lib/hatchet-sdk-rest/models/pagination_response.rb
+lib/hatchet-sdk-rest/models/pull_request.rb
+lib/hatchet-sdk-rest/models/pull_request_state.rb
+lib/hatchet-sdk-rest/models/queue_metrics.rb
+lib/hatchet-sdk-rest/models/rate_limit.rb
+lib/hatchet-sdk-rest/models/rate_limit_list.rb
+lib/hatchet-sdk-rest/models/rate_limit_order_by_direction.rb
+lib/hatchet-sdk-rest/models/rate_limit_order_by_field.rb
+lib/hatchet-sdk-rest/models/recent_step_runs.rb
+lib/hatchet-sdk-rest/models/registered_workflow.rb
+lib/hatchet-sdk-rest/models/reject_invite_request.rb
+lib/hatchet-sdk-rest/models/replay_event_request.rb
+lib/hatchet-sdk-rest/models/replay_workflow_runs_request.rb
+lib/hatchet-sdk-rest/models/replay_workflow_runs_response.rb
+lib/hatchet-sdk-rest/models/rerun_step_run_request.rb
+lib/hatchet-sdk-rest/models/schedule_workflow_run_request.rb
+lib/hatchet-sdk-rest/models/scheduled_run_status.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_filter.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_request.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_response.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_error.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_update_item.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_update_request.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_bulk_update_response.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_list.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_method.rb
+lib/hatchet-sdk-rest/models/scheduled_workflows_order_by_field.rb
+lib/hatchet-sdk-rest/models/semaphore_slots.rb
+lib/hatchet-sdk-rest/models/slack_webhook.rb
+lib/hatchet-sdk-rest/models/sns_integration.rb
+lib/hatchet-sdk-rest/models/step.rb
+lib/hatchet-sdk-rest/models/step_run.rb
+lib/hatchet-sdk-rest/models/step_run_archive.rb
+lib/hatchet-sdk-rest/models/step_run_archive_list.rb
+lib/hatchet-sdk-rest/models/step_run_diff.rb
+lib/hatchet-sdk-rest/models/step_run_event.rb
+lib/hatchet-sdk-rest/models/step_run_event_list.rb
+lib/hatchet-sdk-rest/models/step_run_event_reason.rb
+lib/hatchet-sdk-rest/models/step_run_event_severity.rb
+lib/hatchet-sdk-rest/models/step_run_status.rb
+lib/hatchet-sdk-rest/models/task_stat.rb
+lib/hatchet-sdk-rest/models/task_status_stat.rb
+lib/hatchet-sdk-rest/models/tenant.rb
+lib/hatchet-sdk-rest/models/tenant_alert_email_group.rb
+lib/hatchet-sdk-rest/models/tenant_alert_email_group_list.rb
+lib/hatchet-sdk-rest/models/tenant_alerting_settings.rb
+lib/hatchet-sdk-rest/models/tenant_environment.rb
+lib/hatchet-sdk-rest/models/tenant_invite.rb
+lib/hatchet-sdk-rest/models/tenant_invite_list.rb
+lib/hatchet-sdk-rest/models/tenant_list.rb
+lib/hatchet-sdk-rest/models/tenant_member.rb
+lib/hatchet-sdk-rest/models/tenant_member_list.rb
+lib/hatchet-sdk-rest/models/tenant_member_role.rb
+lib/hatchet-sdk-rest/models/tenant_queue_metrics.rb
+lib/hatchet-sdk-rest/models/tenant_resource.rb
+lib/hatchet-sdk-rest/models/tenant_resource_limit.rb
+lib/hatchet-sdk-rest/models/tenant_resource_policy.rb
+lib/hatchet-sdk-rest/models/tenant_step_run_queue_metrics.rb
+lib/hatchet-sdk-rest/models/tenant_version.rb
+lib/hatchet-sdk-rest/models/trigger_workflow_run_request.rb
+lib/hatchet-sdk-rest/models/update_cron_workflow_trigger_request.rb
+lib/hatchet-sdk-rest/models/update_scheduled_workflow_run_request.rb
+lib/hatchet-sdk-rest/models/update_tenant_alert_email_group_request.rb
+lib/hatchet-sdk-rest/models/update_tenant_invite_request.rb
+lib/hatchet-sdk-rest/models/update_tenant_member_request.rb
+lib/hatchet-sdk-rest/models/update_tenant_request.rb
+lib/hatchet-sdk-rest/models/update_worker_request.rb
+lib/hatchet-sdk-rest/models/user.rb
+lib/hatchet-sdk-rest/models/user_change_password_request.rb
+lib/hatchet-sdk-rest/models/user_login_request.rb
+lib/hatchet-sdk-rest/models/user_register_request.rb
+lib/hatchet-sdk-rest/models/user_tenant_memberships_list.rb
+lib/hatchet-sdk-rest/models/user_tenant_public.rb
+lib/hatchet-sdk-rest/models/v1_cancel_task_request.rb
+lib/hatchet-sdk-rest/models/v1_cancelled_tasks.rb
+lib/hatchet-sdk-rest/models/v1_cel_debug_request.rb
+lib/hatchet-sdk-rest/models/v1_cel_debug_response.rb
+lib/hatchet-sdk-rest/models/v1_cel_debug_response_status.rb
+lib/hatchet-sdk-rest/models/v1_create_filter_request.rb
+lib/hatchet-sdk-rest/models/v1_create_webhook_request.rb
+lib/hatchet-sdk-rest/models/v1_create_webhook_request_api_key.rb
+lib/hatchet-sdk-rest/models/v1_create_webhook_request_base.rb
+lib/hatchet-sdk-rest/models/v1_create_webhook_request_basic_auth.rb
+lib/hatchet-sdk-rest/models/v1_create_webhook_request_hmac.rb
+lib/hatchet-sdk-rest/models/v1_dag_children.rb
+lib/hatchet-sdk-rest/models/v1_event.rb
+lib/hatchet-sdk-rest/models/v1_event_list.rb
+lib/hatchet-sdk-rest/models/v1_event_triggered_run.rb
+lib/hatchet-sdk-rest/models/v1_event_workflow_run_summary.rb
+lib/hatchet-sdk-rest/models/v1_filter.rb
+lib/hatchet-sdk-rest/models/v1_filter_list.rb
+lib/hatchet-sdk-rest/models/v1_log_line.rb
+lib/hatchet-sdk-rest/models/v1_log_line_level.rb
+lib/hatchet-sdk-rest/models/v1_log_line_list.rb
+lib/hatchet-sdk-rest/models/v1_log_line_order_by_direction.rb
+lib/hatchet-sdk-rest/models/v1_replay_task_request.rb
+lib/hatchet-sdk-rest/models/v1_replayed_tasks.rb
+lib/hatchet-sdk-rest/models/v1_task_event.rb
+lib/hatchet-sdk-rest/models/v1_task_event_list.rb
+lib/hatchet-sdk-rest/models/v1_task_event_type.rb
+lib/hatchet-sdk-rest/models/v1_task_filter.rb
+lib/hatchet-sdk-rest/models/v1_task_point_metric.rb
+lib/hatchet-sdk-rest/models/v1_task_point_metrics.rb
+lib/hatchet-sdk-rest/models/v1_task_run_metric.rb
+lib/hatchet-sdk-rest/models/v1_task_run_status.rb
+lib/hatchet-sdk-rest/models/v1_task_status.rb
+lib/hatchet-sdk-rest/models/v1_task_summary.rb
+lib/hatchet-sdk-rest/models/v1_task_summary_list.rb
+lib/hatchet-sdk-rest/models/v1_task_timing.rb
+lib/hatchet-sdk-rest/models/v1_task_timing_list.rb
+lib/hatchet-sdk-rest/models/v1_trigger_workflow_run_request.rb
+lib/hatchet-sdk-rest/models/v1_update_filter_request.rb
+lib/hatchet-sdk-rest/models/v1_update_webhook_request.rb
+lib/hatchet-sdk-rest/models/v1_webhook.rb
+lib/hatchet-sdk-rest/models/v1_webhook_api_key_auth.rb
+lib/hatchet-sdk-rest/models/v1_webhook_auth_type.rb
+lib/hatchet-sdk-rest/models/v1_webhook_basic_auth.rb
+lib/hatchet-sdk-rest/models/v1_webhook_hmac_algorithm.rb
+lib/hatchet-sdk-rest/models/v1_webhook_hmac_auth.rb
+lib/hatchet-sdk-rest/models/v1_webhook_hmac_encoding.rb
+lib/hatchet-sdk-rest/models/v1_webhook_list.rb
+lib/hatchet-sdk-rest/models/v1_webhook_source_name.rb
+lib/hatchet-sdk-rest/models/v1_workflow_run.rb
+lib/hatchet-sdk-rest/models/v1_workflow_run_details.rb
+lib/hatchet-sdk-rest/models/v1_workflow_run_display_name.rb
+lib/hatchet-sdk-rest/models/v1_workflow_run_display_name_list.rb
+lib/hatchet-sdk-rest/models/v1_workflow_type.rb
+lib/hatchet-sdk-rest/models/webhook_worker.rb
+lib/hatchet-sdk-rest/models/webhook_worker_create_request.rb
+lib/hatchet-sdk-rest/models/webhook_worker_create_response.rb
+lib/hatchet-sdk-rest/models/webhook_worker_created.rb
+lib/hatchet-sdk-rest/models/webhook_worker_list_response.rb
+lib/hatchet-sdk-rest/models/webhook_worker_request.rb
+lib/hatchet-sdk-rest/models/webhook_worker_request_list_response.rb
+lib/hatchet-sdk-rest/models/webhook_worker_request_method.rb
+lib/hatchet-sdk-rest/models/worker.rb
+lib/hatchet-sdk-rest/models/worker_label.rb
+lib/hatchet-sdk-rest/models/worker_list.rb
+lib/hatchet-sdk-rest/models/worker_runtime_info.rb
+lib/hatchet-sdk-rest/models/worker_runtime_sdks.rb
+lib/hatchet-sdk-rest/models/worker_slot_config.rb
+lib/hatchet-sdk-rest/models/worker_type.rb
+lib/hatchet-sdk-rest/models/workflow.rb
+lib/hatchet-sdk-rest/models/workflow_concurrency.rb
+lib/hatchet-sdk-rest/models/workflow_kind.rb
+lib/hatchet-sdk-rest/models/workflow_list.rb
+lib/hatchet-sdk-rest/models/workflow_metrics.rb
+lib/hatchet-sdk-rest/models/workflow_run.rb
+lib/hatchet-sdk-rest/models/workflow_run_list.rb
+lib/hatchet-sdk-rest/models/workflow_run_order_by_direction.rb
+lib/hatchet-sdk-rest/models/workflow_run_order_by_field.rb
+lib/hatchet-sdk-rest/models/workflow_run_shape.rb
+lib/hatchet-sdk-rest/models/workflow_run_shape_item_for_workflow_run_details.rb
+lib/hatchet-sdk-rest/models/workflow_run_status.rb
+lib/hatchet-sdk-rest/models/workflow_run_triggered_by.rb
+lib/hatchet-sdk-rest/models/workflow_runs_cancel_request.rb
+lib/hatchet-sdk-rest/models/workflow_runs_metrics.rb
+lib/hatchet-sdk-rest/models/workflow_runs_metrics_counts.rb
+lib/hatchet-sdk-rest/models/workflow_tag.rb
+lib/hatchet-sdk-rest/models/workflow_trigger_cron_ref.rb
+lib/hatchet-sdk-rest/models/workflow_trigger_event_ref.rb
+lib/hatchet-sdk-rest/models/workflow_triggers.rb
+lib/hatchet-sdk-rest/models/workflow_update_request.rb
+lib/hatchet-sdk-rest/models/workflow_version.rb
+lib/hatchet-sdk-rest/models/workflow_version_definition.rb
+lib/hatchet-sdk-rest/models/workflow_version_meta.rb
+lib/hatchet-sdk-rest/models/workflow_workers_count.rb
+lib/hatchet-sdk-rest/version.rb
+spec/spec_helper.rb
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/VERSION b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/VERSION
new file mode 100644
index 000000000..e465da431
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.openapi-generator/VERSION
@@ -0,0 +1 @@
+7.14.0
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.rspec b/sdks/ruby/src/lib/hatchet/clients/rest/.rspec
new file mode 100644
index 000000000..83e16f804
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.rspec
@@ -0,0 +1,2 @@
+--color
+--require spec_helper
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.rubocop.yml b/sdks/ruby/src/lib/hatchet/clients/rest/.rubocop.yml
new file mode 100644
index 000000000..d32b2b1cd
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.rubocop.yml
@@ -0,0 +1,148 @@
+# This file is based on https://github.com/rails/rails/blob/master/.rubocop.yml (MIT license)
+# Automatically generated by OpenAPI Generator (https://openapi-generator.tech)
+AllCops:
+ TargetRubyVersion: 2.4
+ # RuboCop has a bunch of cops enabled by default. This setting tells RuboCop
+ # to ignore them, so only the ones explicitly set in this file are enabled.
+ DisabledByDefault: true
+ Exclude:
+ - '**/templates/**/*'
+ - '**/vendor/**/*'
+ - 'actionpack/lib/action_dispatch/journey/parser.rb'
+
+# Prefer &&/|| over and/or.
+Style/AndOr:
+ Enabled: true
+
+# Align `when` with `case`.
+Layout/CaseIndentation:
+ Enabled: true
+
+# Align comments with method definitions.
+Layout/CommentIndentation:
+ Enabled: true
+
+Layout/ElseAlignment:
+ Enabled: true
+
+Layout/EmptyLineAfterMagicComment:
+ Enabled: true
+
+# In a regular class definition, no empty lines around the body.
+Layout/EmptyLinesAroundClassBody:
+ Enabled: true
+
+# In a regular method definition, no empty lines around the body.
+Layout/EmptyLinesAroundMethodBody:
+ Enabled: true
+
+# In a regular module definition, no empty lines around the body.
+Layout/EmptyLinesAroundModuleBody:
+ Enabled: true
+
+Layout/FirstArgumentIndentation:
+ Enabled: true
+
+# Use Ruby >= 1.9 syntax for hashes. Prefer { a: :b } over { :a => :b }.
+Style/HashSyntax:
+ Enabled: false
+
+# Method definitions after `private` or `protected` isolated calls need one
+# extra level of indentation.
+Layout/IndentationConsistency:
+ Enabled: true
+ EnforcedStyle: indented_internal_methods
+
+# Two spaces, no tabs (for indentation).
+Layout/IndentationWidth:
+ Enabled: true
+
+Layout/LeadingCommentSpace:
+ Enabled: true
+
+Layout/SpaceAfterColon:
+ Enabled: true
+
+Layout/SpaceAfterComma:
+ Enabled: true
+
+Layout/SpaceAroundEqualsInParameterDefault:
+ Enabled: true
+
+Layout/SpaceAroundKeyword:
+ Enabled: true
+
+Layout/SpaceAroundOperators:
+ Enabled: true
+
+Layout/SpaceBeforeComma:
+ Enabled: true
+
+Layout/SpaceBeforeFirstArg:
+ Enabled: true
+
+Style/DefWithParentheses:
+ Enabled: true
+
+# Defining a method with parameters needs parentheses.
+Style/MethodDefParentheses:
+ Enabled: true
+
+Style/FrozenStringLiteralComment:
+ Enabled: false
+ EnforcedStyle: always
+
+# Use `foo {}` not `foo{}`.
+Layout/SpaceBeforeBlockBraces:
+ Enabled: true
+
+# Use `foo { bar }` not `foo {bar}`.
+Layout/SpaceInsideBlockBraces:
+ Enabled: true
+
+# Use `{ a: 1 }` not `{a:1}`.
+Layout/SpaceInsideHashLiteralBraces:
+ Enabled: true
+
+Layout/SpaceInsideParens:
+ Enabled: true
+
+# Check quotes usage according to lint rule below.
+#Style/StringLiterals:
+# Enabled: true
+# EnforcedStyle: single_quotes
+
+# Detect hard tabs, no hard tabs.
+Layout/IndentationStyle:
+ Enabled: true
+
+# Blank lines should not have any spaces.
+Layout/TrailingEmptyLines:
+ Enabled: true
+
+# No trailing whitespace.
+Layout/TrailingWhitespace:
+ Enabled: false
+
+# Use quotes for string literals when they are enough.
+Style/RedundantPercentQ:
+ Enabled: true
+
+# Align `end` with the matching keyword or starting expression except for
+# assignments, where it should be aligned with the LHS.
+Layout/EndAlignment:
+ Enabled: true
+ EnforcedStyleAlignWith: variable
+ AutoCorrect: true
+
+# Use my_method(my_arg) not my_method( my_arg ) or my_method my_arg.
+Lint/RequireParentheses:
+ Enabled: true
+
+Style/RedundantReturn:
+ Enabled: true
+ AllowMultipleReturnValues: true
+
+Style/Semicolon:
+ Enabled: true
+ AllowAsExpressionSeparator: true
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/.travis.yml b/sdks/ruby/src/lib/hatchet/clients/rest/.travis.yml
new file mode 100644
index 000000000..6516f0dca
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/.travis.yml
@@ -0,0 +1,11 @@
+language: ruby
+cache: bundler
+rvm:
+ - 2.7
+ - 3.0
+ - 3.1
+script:
+ - bundle install --path vendor/bundle
+ - bundle exec rspec
+ - gem build hatchet-sdk-rest.gemspec
+ - gem install ./hatchet-sdk-rest-0.0.1.gem
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/Gemfile b/sdks/ruby/src/lib/hatchet/clients/rest/Gemfile
new file mode 100644
index 000000000..c2e3127cd
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/Gemfile
@@ -0,0 +1,9 @@
+source 'https://rubygems.org'
+
+gemspec
+
+group :development, :test do
+ gem 'rake', '~> 13.0.1'
+ gem 'pry-byebug'
+ gem 'rubocop', '~> 0.66.0'
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/README.md b/sdks/ruby/src/lib/hatchet/clients/rest/README.md
new file mode 100644
index 000000000..7d6fb6bf3
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/README.md
@@ -0,0 +1,475 @@
+# hatchet-sdk-rest
+
+HatchetSdkRest - the Ruby gem for the Hatchet API
+
+The Hatchet API
+
+This SDK is automatically generated by the [OpenAPI Generator](https://openapi-generator.tech) project:
+
+- API version: 1.0.0
+- Package version: 0.0.1
+- Generator version: 7.14.0
+- Build package: org.openapitools.codegen.languages.RubyClientCodegen
+
+## Installation
+
+### Build a gem
+
+To build the Ruby code into a gem:
+
+```shell
+gem build hatchet-sdk-rest.gemspec
+```
+
+Then either install the gem locally:
+
+```shell
+gem install ./hatchet-sdk-rest-0.0.1.gem
+```
+
+(for development, run `gem install --dev ./hatchet-sdk-rest-0.0.1.gem` to install the development dependencies)
+
+or publish the gem to a gem hosting service, e.g. [RubyGems](https://rubygems.org/).
+
+Finally add this to the Gemfile:
+
+ gem 'hatchet-sdk-rest', '~> 0.0.1'
+
+### Install from Git
+
+If the Ruby gem is hosted at a git repository: https://github.com/GIT_USER_ID/GIT_REPO_ID, then add the following in the Gemfile:
+
+ gem 'hatchet-sdk-rest', :git => 'https://github.com/GIT_USER_ID/GIT_REPO_ID.git'
+
+### Include the Ruby code directly
+
+Include the Ruby code directly using `-I` as follows:
+
+```shell
+ruby -Ilib script.rb
+```
+
+## Getting Started
+
+Please follow the [installation](#installation) procedure and then run the following code:
+
+```ruby
+# Load the gem
+require 'hatchet-sdk-rest'
+
+# Setup authorization
+HatchetSdkRest.configure do |config|
+ # Configure API key authorization: cookieAuth
+ config.api_key['hatchet'] = 'YOUR API KEY'
+ # Uncomment the following line to set a prefix for the API key, e.g. 'Bearer' (defaults to nil)
+ # config.api_key_prefix['hatchet'] = 'Bearer'
+ # Configure faraday connection
+ config.configure_faraday_connection { |connection| 'YOUR CONNECTION CONFIG PROC' }
+
+ # Configure Bearer authorization: bearerAuth
+ config.access_token = 'YOUR_BEARER_TOKEN'
+ # Configure a proc to get access tokens in lieu of the static access_token configuration
+ config.access_token_getter = -> { 'YOUR TOKEN GETTER PROC' }
+ # Configure faraday connection
+ config.configure_faraday_connection { |connection| 'YOUR CONNECTION CONFIG PROC' }
+end
+
+api_instance = HatchetSdkRest::APITokenApi.new
+tenant = '38400000-8cf0-11bd-b23e-10b96e4ef00d' # String | The tenant id
+opts = {
+ create_api_token_request: HatchetSdkRest::CreateAPITokenRequest.new({name: 'name_example'}) # CreateAPITokenRequest |
+}
+
+begin
+ #Create API Token
+ result = api_instance.api_token_create(tenant, opts)
+ p result
+rescue HatchetSdkRest::ApiError => e
+ puts "Exception when calling APITokenApi->api_token_create: #{e}"
+end
+
+```
+
+## Documentation for API Endpoints
+
+All URIs are relative to *http://localhost*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*HatchetSdkRest::APITokenApi* | [**api_token_create**](docs/APITokenApi.md#api_token_create) | **POST** /api/v1/tenants/{tenant}/api-tokens | Create API Token
+*HatchetSdkRest::APITokenApi* | [**api_token_list**](docs/APITokenApi.md#api_token_list) | **GET** /api/v1/tenants/{tenant}/api-tokens | List API Tokens
+*HatchetSdkRest::APITokenApi* | [**api_token_update_revoke**](docs/APITokenApi.md#api_token_update_revoke) | **POST** /api/v1/api-tokens/{api-token} | Revoke API Token
+*HatchetSdkRest::CELApi* | [**v1_cel_debug**](docs/CELApi.md#v1_cel_debug) | **POST** /api/v1/stable/tenants/{tenant}/cel/debug | Debug a CEL expression
+*HatchetSdkRest::DefaultApi* | [**info_get_version**](docs/DefaultApi.md#info_get_version) | **GET** /api/v1/version | We return the version for the currently running server
+*HatchetSdkRest::DefaultApi* | [**monitoring_post_run_probe**](docs/DefaultApi.md#monitoring_post_run_probe) | **POST** /api/v1/monitoring/{tenant}/probe | Detailed Health Probe For the Instance
+*HatchetSdkRest::DefaultApi* | [**tenant_invite_delete**](docs/DefaultApi.md#tenant_invite_delete) | **DELETE** /api/v1/tenants/{tenant}/invites/{tenant-invite} | Delete invite
+*HatchetSdkRest::DefaultApi* | [**tenant_invite_update**](docs/DefaultApi.md#tenant_invite_update) | **PATCH** /api/v1/tenants/{tenant}/invites/{tenant-invite} | Update invite
+*HatchetSdkRest::DefaultApi* | [**webhook_create**](docs/DefaultApi.md#webhook_create) | **POST** /api/v1/tenants/{tenant}/webhook-workers | Create a webhook
+*HatchetSdkRest::DefaultApi* | [**webhook_delete**](docs/DefaultApi.md#webhook_delete) | **DELETE** /api/v1/webhook-workers/{webhook} | Delete a webhook
+*HatchetSdkRest::DefaultApi* | [**webhook_list**](docs/DefaultApi.md#webhook_list) | **GET** /api/v1/tenants/{tenant}/webhook-workers | List webhooks
+*HatchetSdkRest::DefaultApi* | [**webhook_requests_list**](docs/DefaultApi.md#webhook_requests_list) | **GET** /api/v1/webhook-workers/{webhook}/requests | List webhook requests
+*HatchetSdkRest::EventApi* | [**event_create**](docs/EventApi.md#event_create) | **POST** /api/v1/tenants/{tenant}/events | Create event
+*HatchetSdkRest::EventApi* | [**event_create_bulk**](docs/EventApi.md#event_create_bulk) | **POST** /api/v1/tenants/{tenant}/events/bulk | Bulk Create events
+*HatchetSdkRest::EventApi* | [**event_data_get**](docs/EventApi.md#event_data_get) | **GET** /api/v1/events/{event}/data | Get event data
+*HatchetSdkRest::EventApi* | [**event_data_get_with_tenant**](docs/EventApi.md#event_data_get_with_tenant) | **GET** /api/v1/tenants/{tenant}/events/{event-with-tenant}/data | Get event data
+*HatchetSdkRest::EventApi* | [**event_get**](docs/EventApi.md#event_get) | **GET** /api/v1/events/{event} | Get event data
+*HatchetSdkRest::EventApi* | [**event_key_list**](docs/EventApi.md#event_key_list) | **GET** /api/v1/tenants/{tenant}/events/keys | List event keys
+*HatchetSdkRest::EventApi* | [**event_list**](docs/EventApi.md#event_list) | **GET** /api/v1/tenants/{tenant}/events | List events
+*HatchetSdkRest::EventApi* | [**event_update_cancel**](docs/EventApi.md#event_update_cancel) | **POST** /api/v1/tenants/{tenant}/events/cancel | Replay events
+*HatchetSdkRest::EventApi* | [**event_update_replay**](docs/EventApi.md#event_update_replay) | **POST** /api/v1/tenants/{tenant}/events/replay | Replay events
+*HatchetSdkRest::EventApi* | [**v1_event_get**](docs/EventApi.md#v1_event_get) | **GET** /api/v1/stable/tenants/{tenant}/events/{v1-event} | Get events
+*HatchetSdkRest::EventApi* | [**v1_event_key_list**](docs/EventApi.md#v1_event_key_list) | **GET** /api/v1/stable/tenants/{tenant}/events/keys | List event keys
+*HatchetSdkRest::EventApi* | [**v1_event_list**](docs/EventApi.md#v1_event_list) | **GET** /api/v1/stable/tenants/{tenant}/events | List events
+*HatchetSdkRest::FilterApi* | [**v1_filter_create**](docs/FilterApi.md#v1_filter_create) | **POST** /api/v1/stable/tenants/{tenant}/filters | Create a filter
+*HatchetSdkRest::FilterApi* | [**v1_filter_delete**](docs/FilterApi.md#v1_filter_delete) | **DELETE** /api/v1/stable/tenants/{tenant}/filters/{v1-filter} |
+*HatchetSdkRest::FilterApi* | [**v1_filter_get**](docs/FilterApi.md#v1_filter_get) | **GET** /api/v1/stable/tenants/{tenant}/filters/{v1-filter} | Get a filter
+*HatchetSdkRest::FilterApi* | [**v1_filter_list**](docs/FilterApi.md#v1_filter_list) | **GET** /api/v1/stable/tenants/{tenant}/filters | List filters
+*HatchetSdkRest::FilterApi* | [**v1_filter_update**](docs/FilterApi.md#v1_filter_update) | **PATCH** /api/v1/stable/tenants/{tenant}/filters/{v1-filter} |
+*HatchetSdkRest::GithubApi* | [**sns_update**](docs/GithubApi.md#sns_update) | **POST** /api/v1/sns/{tenant}/{event} | Github app tenant webhook
+*HatchetSdkRest::HealthcheckApi* | [**liveness_get**](docs/HealthcheckApi.md#liveness_get) | **GET** /api/live | Get liveness
+*HatchetSdkRest::HealthcheckApi* | [**readiness_get**](docs/HealthcheckApi.md#readiness_get) | **GET** /api/ready | Get readiness
+*HatchetSdkRest::LogApi* | [**log_line_list**](docs/LogApi.md#log_line_list) | **GET** /api/v1/step-runs/{step-run}/logs | List log lines
+*HatchetSdkRest::LogApi* | [**v1_log_line_list**](docs/LogApi.md#v1_log_line_list) | **GET** /api/v1/stable/tasks/{task}/logs | List log lines
+*HatchetSdkRest::MetadataApi* | [**cloud_metadata_get**](docs/MetadataApi.md#cloud_metadata_get) | **GET** /api/v1/cloud/metadata | Get cloud metadata
+*HatchetSdkRest::MetadataApi* | [**metadata_get**](docs/MetadataApi.md#metadata_get) | **GET** /api/v1/meta | Get metadata
+*HatchetSdkRest::MetadataApi* | [**metadata_list_integrations**](docs/MetadataApi.md#metadata_list_integrations) | **GET** /api/v1/meta/integrations | List integrations
+*HatchetSdkRest::RateLimitsApi* | [**rate_limit_list**](docs/RateLimitsApi.md#rate_limit_list) | **GET** /api/v1/tenants/{tenant}/rate-limits | List rate limits
+*HatchetSdkRest::SNSApi* | [**sns_create**](docs/SNSApi.md#sns_create) | **POST** /api/v1/tenants/{tenant}/sns | Create SNS integration
+*HatchetSdkRest::SNSApi* | [**sns_delete**](docs/SNSApi.md#sns_delete) | **DELETE** /api/v1/sns/{sns} | Delete SNS integration
+*HatchetSdkRest::SNSApi* | [**sns_list**](docs/SNSApi.md#sns_list) | **GET** /api/v1/tenants/{tenant}/sns | List SNS integrations
+*HatchetSdkRest::SlackApi* | [**slack_webhook_delete**](docs/SlackApi.md#slack_webhook_delete) | **DELETE** /api/v1/slack/{slack} | Delete Slack webhook
+*HatchetSdkRest::SlackApi* | [**slack_webhook_list**](docs/SlackApi.md#slack_webhook_list) | **GET** /api/v1/tenants/{tenant}/slack | List Slack integrations
+*HatchetSdkRest::StepRunApi* | [**step_run_get**](docs/StepRunApi.md#step_run_get) | **GET** /api/v1/tenants/{tenant}/step-runs/{step-run} | Get step run
+*HatchetSdkRest::StepRunApi* | [**step_run_get_schema**](docs/StepRunApi.md#step_run_get_schema) | **GET** /api/v1/tenants/{tenant}/step-runs/{step-run}/schema | Get step run schema
+*HatchetSdkRest::StepRunApi* | [**step_run_list_archives**](docs/StepRunApi.md#step_run_list_archives) | **GET** /api/v1/step-runs/{step-run}/archives | List archives for step run
+*HatchetSdkRest::StepRunApi* | [**step_run_list_events**](docs/StepRunApi.md#step_run_list_events) | **GET** /api/v1/step-runs/{step-run}/events | List events for step run
+*HatchetSdkRest::StepRunApi* | [**step_run_update_cancel**](docs/StepRunApi.md#step_run_update_cancel) | **POST** /api/v1/tenants/{tenant}/step-runs/{step-run}/cancel | Attempts to cancel a step run
+*HatchetSdkRest::StepRunApi* | [**step_run_update_rerun**](docs/StepRunApi.md#step_run_update_rerun) | **POST** /api/v1/tenants/{tenant}/step-runs/{step-run}/rerun | Rerun step run
+*HatchetSdkRest::StepRunApi* | [**workflow_run_list_step_run_events**](docs/StepRunApi.md#workflow_run_list_step_run_events) | **GET** /api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/step-run-events | List events for all step runs for a workflow run
+*HatchetSdkRest::TaskApi* | [**v1_dag_list_tasks**](docs/TaskApi.md#v1_dag_list_tasks) | **GET** /api/v1/stable/dags/tasks | List tasks
+*HatchetSdkRest::TaskApi* | [**v1_task_cancel**](docs/TaskApi.md#v1_task_cancel) | **POST** /api/v1/stable/tenants/{tenant}/tasks/cancel | Cancel tasks
+*HatchetSdkRest::TaskApi* | [**v1_task_event_list**](docs/TaskApi.md#v1_task_event_list) | **GET** /api/v1/stable/tasks/{task}/task-events | List events for a task
+*HatchetSdkRest::TaskApi* | [**v1_task_get**](docs/TaskApi.md#v1_task_get) | **GET** /api/v1/stable/tasks/{task} | Get a task
+*HatchetSdkRest::TaskApi* | [**v1_task_get_point_metrics**](docs/TaskApi.md#v1_task_get_point_metrics) | **GET** /api/v1/stable/tenants/{tenant}/task-point-metrics | Get task point metrics
+*HatchetSdkRest::TaskApi* | [**v1_task_list_status_metrics**](docs/TaskApi.md#v1_task_list_status_metrics) | **GET** /api/v1/stable/tenants/{tenant}/task-metrics | Get task metrics
+*HatchetSdkRest::TaskApi* | [**v1_task_replay**](docs/TaskApi.md#v1_task_replay) | **POST** /api/v1/stable/tenants/{tenant}/tasks/replay | Replay tasks
+*HatchetSdkRest::TenantApi* | [**alert_email_group_create**](docs/TenantApi.md#alert_email_group_create) | **POST** /api/v1/tenants/{tenant}/alerting-email-groups | Create tenant alert email group
+*HatchetSdkRest::TenantApi* | [**alert_email_group_delete**](docs/TenantApi.md#alert_email_group_delete) | **DELETE** /api/v1/alerting-email-groups/{alert-email-group} | Delete tenant alert email group
+*HatchetSdkRest::TenantApi* | [**alert_email_group_list**](docs/TenantApi.md#alert_email_group_list) | **GET** /api/v1/tenants/{tenant}/alerting-email-groups | List tenant alert email groups
+*HatchetSdkRest::TenantApi* | [**alert_email_group_update**](docs/TenantApi.md#alert_email_group_update) | **PATCH** /api/v1/alerting-email-groups/{alert-email-group} | Update tenant alert email group
+*HatchetSdkRest::TenantApi* | [**tenant_alerting_settings_get**](docs/TenantApi.md#tenant_alerting_settings_get) | **GET** /api/v1/tenants/{tenant}/alerting/settings | Get tenant alerting settings
+*HatchetSdkRest::TenantApi* | [**tenant_create**](docs/TenantApi.md#tenant_create) | **POST** /api/v1/tenants | Create tenant
+*HatchetSdkRest::TenantApi* | [**tenant_get**](docs/TenantApi.md#tenant_get) | **GET** /api/v1/tenants/{tenant} | Get tenant
+*HatchetSdkRest::TenantApi* | [**tenant_get_prometheus_metrics**](docs/TenantApi.md#tenant_get_prometheus_metrics) | **GET** /api/v1/tenants/{tenant}/prometheus-metrics | Get prometheus metrics
+*HatchetSdkRest::TenantApi* | [**tenant_get_step_run_queue_metrics**](docs/TenantApi.md#tenant_get_step_run_queue_metrics) | **GET** /api/v1/tenants/{tenant}/step-run-queue-metrics | Get step run metrics
+*HatchetSdkRest::TenantApi* | [**tenant_get_task_stats**](docs/TenantApi.md#tenant_get_task_stats) | **GET** /api/v1/tenants/{tenant}/task-stats | Get task stats for tenant
+*HatchetSdkRest::TenantApi* | [**tenant_invite_accept**](docs/TenantApi.md#tenant_invite_accept) | **POST** /api/v1/users/invites/accept | Accept tenant invite
+*HatchetSdkRest::TenantApi* | [**tenant_invite_create**](docs/TenantApi.md#tenant_invite_create) | **POST** /api/v1/tenants/{tenant}/invites | Create tenant invite
+*HatchetSdkRest::TenantApi* | [**tenant_invite_list**](docs/TenantApi.md#tenant_invite_list) | **GET** /api/v1/tenants/{tenant}/invites | List tenant invites
+*HatchetSdkRest::TenantApi* | [**tenant_invite_reject**](docs/TenantApi.md#tenant_invite_reject) | **POST** /api/v1/users/invites/reject | Reject tenant invite
+*HatchetSdkRest::TenantApi* | [**tenant_member_delete**](docs/TenantApi.md#tenant_member_delete) | **DELETE** /api/v1/tenants/{tenant}/members/{member} | Delete a tenant member
+*HatchetSdkRest::TenantApi* | [**tenant_member_list**](docs/TenantApi.md#tenant_member_list) | **GET** /api/v1/tenants/{tenant}/members | List tenant members
+*HatchetSdkRest::TenantApi* | [**tenant_member_update**](docs/TenantApi.md#tenant_member_update) | **PATCH** /api/v1/tenants/{tenant}/members/{member} | Update a tenant member
+*HatchetSdkRest::TenantApi* | [**tenant_resource_policy_get**](docs/TenantApi.md#tenant_resource_policy_get) | **GET** /api/v1/tenants/{tenant}/resource-policy | Create tenant alert email group
+*HatchetSdkRest::TenantApi* | [**tenant_update**](docs/TenantApi.md#tenant_update) | **PATCH** /api/v1/tenants/{tenant} | Update tenant
+*HatchetSdkRest::TenantApi* | [**user_list_tenant_invites**](docs/TenantApi.md#user_list_tenant_invites) | **GET** /api/v1/users/invites | List tenant invites
+*HatchetSdkRest::UserApi* | [**tenant_memberships_list**](docs/UserApi.md#tenant_memberships_list) | **GET** /api/v1/users/memberships | List tenant memberships
+*HatchetSdkRest::UserApi* | [**user_create**](docs/UserApi.md#user_create) | **POST** /api/v1/users/register | Register user
+*HatchetSdkRest::UserApi* | [**user_get_current**](docs/UserApi.md#user_get_current) | **GET** /api/v1/users/current | Get current user
+*HatchetSdkRest::UserApi* | [**user_update_github_oauth_callback**](docs/UserApi.md#user_update_github_oauth_callback) | **GET** /api/v1/users/github/callback | Complete OAuth flow
+*HatchetSdkRest::UserApi* | [**user_update_github_oauth_start**](docs/UserApi.md#user_update_github_oauth_start) | **GET** /api/v1/users/github/start | Start OAuth flow
+*HatchetSdkRest::UserApi* | [**user_update_google_oauth_callback**](docs/UserApi.md#user_update_google_oauth_callback) | **GET** /api/v1/users/google/callback | Complete OAuth flow
+*HatchetSdkRest::UserApi* | [**user_update_google_oauth_start**](docs/UserApi.md#user_update_google_oauth_start) | **GET** /api/v1/users/google/start | Start OAuth flow
+*HatchetSdkRest::UserApi* | [**user_update_login**](docs/UserApi.md#user_update_login) | **POST** /api/v1/users/login | Login user
+*HatchetSdkRest::UserApi* | [**user_update_logout**](docs/UserApi.md#user_update_logout) | **POST** /api/v1/users/logout | Logout user
+*HatchetSdkRest::UserApi* | [**user_update_password**](docs/UserApi.md#user_update_password) | **POST** /api/v1/users/password | Change user password
+*HatchetSdkRest::UserApi* | [**user_update_slack_oauth_callback**](docs/UserApi.md#user_update_slack_oauth_callback) | **GET** /api/v1/users/slack/callback | Complete OAuth flow
+*HatchetSdkRest::UserApi* | [**user_update_slack_oauth_start**](docs/UserApi.md#user_update_slack_oauth_start) | **GET** /api/v1/tenants/{tenant}/slack/start | Start OAuth flow
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_create**](docs/WebhookApi.md#v1_webhook_create) | **POST** /api/v1/stable/tenants/{tenant}/webhooks | Create a webhook
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_delete**](docs/WebhookApi.md#v1_webhook_delete) | **DELETE** /api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook} |
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_get**](docs/WebhookApi.md#v1_webhook_get) | **GET** /api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook} | Get a webhook
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_list**](docs/WebhookApi.md#v1_webhook_list) | **GET** /api/v1/stable/tenants/{tenant}/webhooks | List webhooks
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_receive**](docs/WebhookApi.md#v1_webhook_receive) | **POST** /api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook} | Post a webhook message
+*HatchetSdkRest::WebhookApi* | [**v1_webhook_update**](docs/WebhookApi.md#v1_webhook_update) | **PATCH** /api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook} | Update a webhook
+*HatchetSdkRest::WorkerApi* | [**worker_get**](docs/WorkerApi.md#worker_get) | **GET** /api/v1/workers/{worker} | Get worker
+*HatchetSdkRest::WorkerApi* | [**worker_list**](docs/WorkerApi.md#worker_list) | **GET** /api/v1/tenants/{tenant}/worker | Get workers
+*HatchetSdkRest::WorkerApi* | [**worker_update**](docs/WorkerApi.md#worker_update) | **PATCH** /api/v1/workers/{worker} | Update worker
+*HatchetSdkRest::WorkflowApi* | [**cron_workflow_list**](docs/WorkflowApi.md#cron_workflow_list) | **GET** /api/v1/tenants/{tenant}/workflows/crons | Get cron job workflows
+*HatchetSdkRest::WorkflowApi* | [**tenant_get_queue_metrics**](docs/WorkflowApi.md#tenant_get_queue_metrics) | **GET** /api/v1/tenants/{tenant}/queue-metrics | Get workflow metrics
+*HatchetSdkRest::WorkflowApi* | [**workflow_cron_delete**](docs/WorkflowApi.md#workflow_cron_delete) | **DELETE** /api/v1/tenants/{tenant}/workflows/crons/{cron-workflow} | Delete cron job workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_cron_get**](docs/WorkflowApi.md#workflow_cron_get) | **GET** /api/v1/tenants/{tenant}/workflows/crons/{cron-workflow} | Get cron job workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_cron_update**](docs/WorkflowApi.md#workflow_cron_update) | **PATCH** /api/v1/tenants/{tenant}/workflows/crons/{cron-workflow} | Update cron job workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_delete**](docs/WorkflowApi.md#workflow_delete) | **DELETE** /api/v1/workflows/{workflow} | Delete workflow
+*HatchetSdkRest::WorkflowApi* | [**workflow_get**](docs/WorkflowApi.md#workflow_get) | **GET** /api/v1/workflows/{workflow} | Get workflow
+*HatchetSdkRest::WorkflowApi* | [**workflow_get_metrics**](docs/WorkflowApi.md#workflow_get_metrics) | **GET** /api/v1/workflows/{workflow}/metrics | Get workflow metrics
+*HatchetSdkRest::WorkflowApi* | [**workflow_get_workers_count**](docs/WorkflowApi.md#workflow_get_workers_count) | **GET** /api/v1/tenants/{tenant}/workflows/{workflow}/worker-count | Get workflow worker count
+*HatchetSdkRest::WorkflowApi* | [**workflow_list**](docs/WorkflowApi.md#workflow_list) | **GET** /api/v1/tenants/{tenant}/workflows | Get workflows
+*HatchetSdkRest::WorkflowApi* | [**workflow_run_get**](docs/WorkflowApi.md#workflow_run_get) | **GET** /api/v1/tenants/{tenant}/workflow-runs/{workflow-run} | Get workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_run_get_metrics**](docs/WorkflowApi.md#workflow_run_get_metrics) | **GET** /api/v1/tenants/{tenant}/workflows/runs/metrics | Get workflow runs metrics
+*HatchetSdkRest::WorkflowApi* | [**workflow_run_get_shape**](docs/WorkflowApi.md#workflow_run_get_shape) | **GET** /api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/shape | Get workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_run_list**](docs/WorkflowApi.md#workflow_run_list) | **GET** /api/v1/tenants/{tenant}/workflows/runs | Get workflow runs
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_bulk_delete**](docs/WorkflowApi.md#workflow_scheduled_bulk_delete) | **POST** /api/v1/tenants/{tenant}/workflows/scheduled/bulk-delete | Bulk delete scheduled workflow runs
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_bulk_update**](docs/WorkflowApi.md#workflow_scheduled_bulk_update) | **POST** /api/v1/tenants/{tenant}/workflows/scheduled/bulk-update | Bulk update scheduled workflow runs
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_delete**](docs/WorkflowApi.md#workflow_scheduled_delete) | **DELETE** /api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run} | Delete scheduled workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_get**](docs/WorkflowApi.md#workflow_scheduled_get) | **GET** /api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run} | Get scheduled workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_list**](docs/WorkflowApi.md#workflow_scheduled_list) | **GET** /api/v1/tenants/{tenant}/workflows/scheduled | Get scheduled workflow runs
+*HatchetSdkRest::WorkflowApi* | [**workflow_scheduled_update**](docs/WorkflowApi.md#workflow_scheduled_update) | **PATCH** /api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run} | Update scheduled workflow run
+*HatchetSdkRest::WorkflowApi* | [**workflow_update**](docs/WorkflowApi.md#workflow_update) | **PATCH** /api/v1/workflows/{workflow} | Update workflow
+*HatchetSdkRest::WorkflowApi* | [**workflow_version_get**](docs/WorkflowApi.md#workflow_version_get) | **GET** /api/v1/workflows/{workflow}/versions | Get workflow version
+*HatchetSdkRest::WorkflowRunApi* | [**cron_workflow_trigger_create**](docs/WorkflowRunApi.md#cron_workflow_trigger_create) | **POST** /api/v1/tenants/{tenant}/workflows/{workflow}/crons | Create cron job workflow trigger
+*HatchetSdkRest::WorkflowRunApi* | [**scheduled_workflow_run_create**](docs/WorkflowRunApi.md#scheduled_workflow_run_create) | **POST** /api/v1/tenants/{tenant}/workflows/{workflow}/scheduled | Trigger workflow run
+*HatchetSdkRest::WorkflowRunApi* | [**workflow_run_cancel**](docs/WorkflowRunApi.md#workflow_run_cancel) | **POST** /api/v1/tenants/{tenant}/workflows/cancel | Cancel workflow runs
+*HatchetSdkRest::WorkflowRunApi* | [**workflow_run_create**](docs/WorkflowRunApi.md#workflow_run_create) | **POST** /api/v1/workflows/{workflow}/trigger | Trigger workflow run
+*HatchetSdkRest::WorkflowRunApi* | [**workflow_run_get_input**](docs/WorkflowRunApi.md#workflow_run_get_input) | **GET** /api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/input | Get workflow run input
+*HatchetSdkRest::WorkflowRunApi* | [**workflow_run_update_replay**](docs/WorkflowRunApi.md#workflow_run_update_replay) | **POST** /api/v1/tenants/{tenant}/workflow-runs/replay | Replay workflow runs
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_create**](docs/WorkflowRunsApi.md#v1_workflow_run_create) | **POST** /api/v1/stable/tenants/{tenant}/workflow-runs/trigger | Create workflow run
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_display_names_list**](docs/WorkflowRunsApi.md#v1_workflow_run_display_names_list) | **GET** /api/v1/stable/tenants/{tenant}/workflow-runs/display-names | List workflow runs
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_external_ids_list**](docs/WorkflowRunsApi.md#v1_workflow_run_external_ids_list) | **GET** /api/v1/stable/tenants/{tenant}/workflow-runs/external-ids | List workflow run external ids
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_get**](docs/WorkflowRunsApi.md#v1_workflow_run_get) | **GET** /api/v1/stable/workflow-runs/{v1-workflow-run} | List tasks
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_get_status**](docs/WorkflowRunsApi.md#v1_workflow_run_get_status) | **GET** /api/v1/stable/workflow-runs/{v1-workflow-run}/status | Get workflow run status
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_get_timings**](docs/WorkflowRunsApi.md#v1_workflow_run_get_timings) | **GET** /api/v1/stable/workflow-runs/{v1-workflow-run}/task-timings | List timings for a workflow run
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_list**](docs/WorkflowRunsApi.md#v1_workflow_run_list) | **GET** /api/v1/stable/tenants/{tenant}/workflow-runs | List workflow runs
+*HatchetSdkRest::WorkflowRunsApi* | [**v1_workflow_run_task_events_list**](docs/WorkflowRunsApi.md#v1_workflow_run_task_events_list) | **GET** /api/v1/stable/workflow-runs/{v1-workflow-run}/task-events | List tasks
+
+
+## Documentation for Models
+
+ - [HatchetSdkRest::APIError](docs/APIError.md)
+ - [HatchetSdkRest::APIErrors](docs/APIErrors.md)
+ - [HatchetSdkRest::APIMeta](docs/APIMeta.md)
+ - [HatchetSdkRest::APIMetaAuth](docs/APIMetaAuth.md)
+ - [HatchetSdkRest::APIMetaIntegration](docs/APIMetaIntegration.md)
+ - [HatchetSdkRest::APIMetaPosthog](docs/APIMetaPosthog.md)
+ - [HatchetSdkRest::APIResourceMeta](docs/APIResourceMeta.md)
+ - [HatchetSdkRest::APIToken](docs/APIToken.md)
+ - [HatchetSdkRest::AcceptInviteRequest](docs/AcceptInviteRequest.md)
+ - [HatchetSdkRest::BulkCreateEventRequest](docs/BulkCreateEventRequest.md)
+ - [HatchetSdkRest::CancelEventRequest](docs/CancelEventRequest.md)
+ - [HatchetSdkRest::ConcurrencyLimitStrategy](docs/ConcurrencyLimitStrategy.md)
+ - [HatchetSdkRest::ConcurrencyScope](docs/ConcurrencyScope.md)
+ - [HatchetSdkRest::ConcurrencySetting](docs/ConcurrencySetting.md)
+ - [HatchetSdkRest::ConcurrencyStat](docs/ConcurrencyStat.md)
+ - [HatchetSdkRest::CreateAPITokenRequest](docs/CreateAPITokenRequest.md)
+ - [HatchetSdkRest::CreateAPITokenResponse](docs/CreateAPITokenResponse.md)
+ - [HatchetSdkRest::CreateCronWorkflowTriggerRequest](docs/CreateCronWorkflowTriggerRequest.md)
+ - [HatchetSdkRest::CreateEventRequest](docs/CreateEventRequest.md)
+ - [HatchetSdkRest::CreatePullRequestFromStepRun](docs/CreatePullRequestFromStepRun.md)
+ - [HatchetSdkRest::CreateSNSIntegrationRequest](docs/CreateSNSIntegrationRequest.md)
+ - [HatchetSdkRest::CreateTenantAlertEmailGroupRequest](docs/CreateTenantAlertEmailGroupRequest.md)
+ - [HatchetSdkRest::CreateTenantInviteRequest](docs/CreateTenantInviteRequest.md)
+ - [HatchetSdkRest::CreateTenantRequest](docs/CreateTenantRequest.md)
+ - [HatchetSdkRest::CronWorkflows](docs/CronWorkflows.md)
+ - [HatchetSdkRest::CronWorkflowsList](docs/CronWorkflowsList.md)
+ - [HatchetSdkRest::CronWorkflowsMethod](docs/CronWorkflowsMethod.md)
+ - [HatchetSdkRest::CronWorkflowsOrderByField](docs/CronWorkflowsOrderByField.md)
+ - [HatchetSdkRest::Event](docs/Event.md)
+ - [HatchetSdkRest::EventData](docs/EventData.md)
+ - [HatchetSdkRest::EventKeyList](docs/EventKeyList.md)
+ - [HatchetSdkRest::EventList](docs/EventList.md)
+ - [HatchetSdkRest::EventOrderByDirection](docs/EventOrderByDirection.md)
+ - [HatchetSdkRest::EventOrderByField](docs/EventOrderByField.md)
+ - [HatchetSdkRest::EventUpdateCancel200Response](docs/EventUpdateCancel200Response.md)
+ - [HatchetSdkRest::EventWorkflowRunSummary](docs/EventWorkflowRunSummary.md)
+ - [HatchetSdkRest::Events](docs/Events.md)
+ - [HatchetSdkRest::GetStepRunDiffResponse](docs/GetStepRunDiffResponse.md)
+ - [HatchetSdkRest::InfoGetVersion200Response](docs/InfoGetVersion200Response.md)
+ - [HatchetSdkRest::Job](docs/Job.md)
+ - [HatchetSdkRest::JobRun](docs/JobRun.md)
+ - [HatchetSdkRest::JobRunStatus](docs/JobRunStatus.md)
+ - [HatchetSdkRest::ListAPITokensResponse](docs/ListAPITokensResponse.md)
+ - [HatchetSdkRest::ListPullRequestsResponse](docs/ListPullRequestsResponse.md)
+ - [HatchetSdkRest::ListSNSIntegrations](docs/ListSNSIntegrations.md)
+ - [HatchetSdkRest::ListSlackWebhooks](docs/ListSlackWebhooks.md)
+ - [HatchetSdkRest::LogLine](docs/LogLine.md)
+ - [HatchetSdkRest::LogLineLevel](docs/LogLineLevel.md)
+ - [HatchetSdkRest::LogLineList](docs/LogLineList.md)
+ - [HatchetSdkRest::LogLineOrderByDirection](docs/LogLineOrderByDirection.md)
+ - [HatchetSdkRest::LogLineOrderByField](docs/LogLineOrderByField.md)
+ - [HatchetSdkRest::PaginationResponse](docs/PaginationResponse.md)
+ - [HatchetSdkRest::PullRequest](docs/PullRequest.md)
+ - [HatchetSdkRest::PullRequestState](docs/PullRequestState.md)
+ - [HatchetSdkRest::QueueMetrics](docs/QueueMetrics.md)
+ - [HatchetSdkRest::RateLimit](docs/RateLimit.md)
+ - [HatchetSdkRest::RateLimitList](docs/RateLimitList.md)
+ - [HatchetSdkRest::RateLimitOrderByDirection](docs/RateLimitOrderByDirection.md)
+ - [HatchetSdkRest::RateLimitOrderByField](docs/RateLimitOrderByField.md)
+ - [HatchetSdkRest::RecentStepRuns](docs/RecentStepRuns.md)
+ - [HatchetSdkRest::RegisteredWorkflow](docs/RegisteredWorkflow.md)
+ - [HatchetSdkRest::RejectInviteRequest](docs/RejectInviteRequest.md)
+ - [HatchetSdkRest::ReplayEventRequest](docs/ReplayEventRequest.md)
+ - [HatchetSdkRest::ReplayWorkflowRunsRequest](docs/ReplayWorkflowRunsRequest.md)
+ - [HatchetSdkRest::ReplayWorkflowRunsResponse](docs/ReplayWorkflowRunsResponse.md)
+ - [HatchetSdkRest::RerunStepRunRequest](docs/RerunStepRunRequest.md)
+ - [HatchetSdkRest::SNSIntegration](docs/SNSIntegration.md)
+ - [HatchetSdkRest::ScheduleWorkflowRunRequest](docs/ScheduleWorkflowRunRequest.md)
+ - [HatchetSdkRest::ScheduledRunStatus](docs/ScheduledRunStatus.md)
+ - [HatchetSdkRest::ScheduledWorkflows](docs/ScheduledWorkflows.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkDeleteFilter](docs/ScheduledWorkflowsBulkDeleteFilter.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkDeleteRequest](docs/ScheduledWorkflowsBulkDeleteRequest.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkDeleteResponse](docs/ScheduledWorkflowsBulkDeleteResponse.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkError](docs/ScheduledWorkflowsBulkError.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkUpdateItem](docs/ScheduledWorkflowsBulkUpdateItem.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkUpdateRequest](docs/ScheduledWorkflowsBulkUpdateRequest.md)
+ - [HatchetSdkRest::ScheduledWorkflowsBulkUpdateResponse](docs/ScheduledWorkflowsBulkUpdateResponse.md)
+ - [HatchetSdkRest::ScheduledWorkflowsList](docs/ScheduledWorkflowsList.md)
+ - [HatchetSdkRest::ScheduledWorkflowsMethod](docs/ScheduledWorkflowsMethod.md)
+ - [HatchetSdkRest::ScheduledWorkflowsOrderByField](docs/ScheduledWorkflowsOrderByField.md)
+ - [HatchetSdkRest::SemaphoreSlots](docs/SemaphoreSlots.md)
+ - [HatchetSdkRest::SlackWebhook](docs/SlackWebhook.md)
+ - [HatchetSdkRest::Step](docs/Step.md)
+ - [HatchetSdkRest::StepRun](docs/StepRun.md)
+ - [HatchetSdkRest::StepRunArchive](docs/StepRunArchive.md)
+ - [HatchetSdkRest::StepRunArchiveList](docs/StepRunArchiveList.md)
+ - [HatchetSdkRest::StepRunDiff](docs/StepRunDiff.md)
+ - [HatchetSdkRest::StepRunEvent](docs/StepRunEvent.md)
+ - [HatchetSdkRest::StepRunEventList](docs/StepRunEventList.md)
+ - [HatchetSdkRest::StepRunEventReason](docs/StepRunEventReason.md)
+ - [HatchetSdkRest::StepRunEventSeverity](docs/StepRunEventSeverity.md)
+ - [HatchetSdkRest::StepRunStatus](docs/StepRunStatus.md)
+ - [HatchetSdkRest::TaskStat](docs/TaskStat.md)
+ - [HatchetSdkRest::TaskStatusStat](docs/TaskStatusStat.md)
+ - [HatchetSdkRest::Tenant](docs/Tenant.md)
+ - [HatchetSdkRest::TenantAlertEmailGroup](docs/TenantAlertEmailGroup.md)
+ - [HatchetSdkRest::TenantAlertEmailGroupList](docs/TenantAlertEmailGroupList.md)
+ - [HatchetSdkRest::TenantAlertingSettings](docs/TenantAlertingSettings.md)
+ - [HatchetSdkRest::TenantEnvironment](docs/TenantEnvironment.md)
+ - [HatchetSdkRest::TenantInvite](docs/TenantInvite.md)
+ - [HatchetSdkRest::TenantInviteList](docs/TenantInviteList.md)
+ - [HatchetSdkRest::TenantList](docs/TenantList.md)
+ - [HatchetSdkRest::TenantMember](docs/TenantMember.md)
+ - [HatchetSdkRest::TenantMemberList](docs/TenantMemberList.md)
+ - [HatchetSdkRest::TenantMemberRole](docs/TenantMemberRole.md)
+ - [HatchetSdkRest::TenantQueueMetrics](docs/TenantQueueMetrics.md)
+ - [HatchetSdkRest::TenantResource](docs/TenantResource.md)
+ - [HatchetSdkRest::TenantResourceLimit](docs/TenantResourceLimit.md)
+ - [HatchetSdkRest::TenantResourcePolicy](docs/TenantResourcePolicy.md)
+ - [HatchetSdkRest::TenantStepRunQueueMetrics](docs/TenantStepRunQueueMetrics.md)
+ - [HatchetSdkRest::TenantVersion](docs/TenantVersion.md)
+ - [HatchetSdkRest::TriggerWorkflowRunRequest](docs/TriggerWorkflowRunRequest.md)
+ - [HatchetSdkRest::UpdateCronWorkflowTriggerRequest](docs/UpdateCronWorkflowTriggerRequest.md)
+ - [HatchetSdkRest::UpdateScheduledWorkflowRunRequest](docs/UpdateScheduledWorkflowRunRequest.md)
+ - [HatchetSdkRest::UpdateTenantAlertEmailGroupRequest](docs/UpdateTenantAlertEmailGroupRequest.md)
+ - [HatchetSdkRest::UpdateTenantInviteRequest](docs/UpdateTenantInviteRequest.md)
+ - [HatchetSdkRest::UpdateTenantMemberRequest](docs/UpdateTenantMemberRequest.md)
+ - [HatchetSdkRest::UpdateTenantRequest](docs/UpdateTenantRequest.md)
+ - [HatchetSdkRest::UpdateWorkerRequest](docs/UpdateWorkerRequest.md)
+ - [HatchetSdkRest::User](docs/User.md)
+ - [HatchetSdkRest::UserChangePasswordRequest](docs/UserChangePasswordRequest.md)
+ - [HatchetSdkRest::UserLoginRequest](docs/UserLoginRequest.md)
+ - [HatchetSdkRest::UserRegisterRequest](docs/UserRegisterRequest.md)
+ - [HatchetSdkRest::UserTenantMembershipsList](docs/UserTenantMembershipsList.md)
+ - [HatchetSdkRest::UserTenantPublic](docs/UserTenantPublic.md)
+ - [HatchetSdkRest::V1CELDebugRequest](docs/V1CELDebugRequest.md)
+ - [HatchetSdkRest::V1CELDebugResponse](docs/V1CELDebugResponse.md)
+ - [HatchetSdkRest::V1CELDebugResponseStatus](docs/V1CELDebugResponseStatus.md)
+ - [HatchetSdkRest::V1CancelTaskRequest](docs/V1CancelTaskRequest.md)
+ - [HatchetSdkRest::V1CancelledTasks](docs/V1CancelledTasks.md)
+ - [HatchetSdkRest::V1CreateFilterRequest](docs/V1CreateFilterRequest.md)
+ - [HatchetSdkRest::V1CreateWebhookRequest](docs/V1CreateWebhookRequest.md)
+ - [HatchetSdkRest::V1CreateWebhookRequestAPIKey](docs/V1CreateWebhookRequestAPIKey.md)
+ - [HatchetSdkRest::V1CreateWebhookRequestBase](docs/V1CreateWebhookRequestBase.md)
+ - [HatchetSdkRest::V1CreateWebhookRequestBasicAuth](docs/V1CreateWebhookRequestBasicAuth.md)
+ - [HatchetSdkRest::V1CreateWebhookRequestHMAC](docs/V1CreateWebhookRequestHMAC.md)
+ - [HatchetSdkRest::V1DagChildren](docs/V1DagChildren.md)
+ - [HatchetSdkRest::V1Event](docs/V1Event.md)
+ - [HatchetSdkRest::V1EventList](docs/V1EventList.md)
+ - [HatchetSdkRest::V1EventTriggeredRun](docs/V1EventTriggeredRun.md)
+ - [HatchetSdkRest::V1EventWorkflowRunSummary](docs/V1EventWorkflowRunSummary.md)
+ - [HatchetSdkRest::V1Filter](docs/V1Filter.md)
+ - [HatchetSdkRest::V1FilterList](docs/V1FilterList.md)
+ - [HatchetSdkRest::V1LogLine](docs/V1LogLine.md)
+ - [HatchetSdkRest::V1LogLineLevel](docs/V1LogLineLevel.md)
+ - [HatchetSdkRest::V1LogLineList](docs/V1LogLineList.md)
+ - [HatchetSdkRest::V1LogLineOrderByDirection](docs/V1LogLineOrderByDirection.md)
+ - [HatchetSdkRest::V1ReplayTaskRequest](docs/V1ReplayTaskRequest.md)
+ - [HatchetSdkRest::V1ReplayedTasks](docs/V1ReplayedTasks.md)
+ - [HatchetSdkRest::V1TaskEvent](docs/V1TaskEvent.md)
+ - [HatchetSdkRest::V1TaskEventList](docs/V1TaskEventList.md)
+ - [HatchetSdkRest::V1TaskEventType](docs/V1TaskEventType.md)
+ - [HatchetSdkRest::V1TaskFilter](docs/V1TaskFilter.md)
+ - [HatchetSdkRest::V1TaskPointMetric](docs/V1TaskPointMetric.md)
+ - [HatchetSdkRest::V1TaskPointMetrics](docs/V1TaskPointMetrics.md)
+ - [HatchetSdkRest::V1TaskRunMetric](docs/V1TaskRunMetric.md)
+ - [HatchetSdkRest::V1TaskRunStatus](docs/V1TaskRunStatus.md)
+ - [HatchetSdkRest::V1TaskStatus](docs/V1TaskStatus.md)
+ - [HatchetSdkRest::V1TaskSummary](docs/V1TaskSummary.md)
+ - [HatchetSdkRest::V1TaskSummaryList](docs/V1TaskSummaryList.md)
+ - [HatchetSdkRest::V1TaskTiming](docs/V1TaskTiming.md)
+ - [HatchetSdkRest::V1TaskTimingList](docs/V1TaskTimingList.md)
+ - [HatchetSdkRest::V1TriggerWorkflowRunRequest](docs/V1TriggerWorkflowRunRequest.md)
+ - [HatchetSdkRest::V1UpdateFilterRequest](docs/V1UpdateFilterRequest.md)
+ - [HatchetSdkRest::V1UpdateWebhookRequest](docs/V1UpdateWebhookRequest.md)
+ - [HatchetSdkRest::V1Webhook](docs/V1Webhook.md)
+ - [HatchetSdkRest::V1WebhookAPIKeyAuth](docs/V1WebhookAPIKeyAuth.md)
+ - [HatchetSdkRest::V1WebhookAuthType](docs/V1WebhookAuthType.md)
+ - [HatchetSdkRest::V1WebhookBasicAuth](docs/V1WebhookBasicAuth.md)
+ - [HatchetSdkRest::V1WebhookHMACAlgorithm](docs/V1WebhookHMACAlgorithm.md)
+ - [HatchetSdkRest::V1WebhookHMACAuth](docs/V1WebhookHMACAuth.md)
+ - [HatchetSdkRest::V1WebhookHMACEncoding](docs/V1WebhookHMACEncoding.md)
+ - [HatchetSdkRest::V1WebhookList](docs/V1WebhookList.md)
+ - [HatchetSdkRest::V1WebhookSourceName](docs/V1WebhookSourceName.md)
+ - [HatchetSdkRest::V1WorkflowRun](docs/V1WorkflowRun.md)
+ - [HatchetSdkRest::V1WorkflowRunDetails](docs/V1WorkflowRunDetails.md)
+ - [HatchetSdkRest::V1WorkflowRunDisplayName](docs/V1WorkflowRunDisplayName.md)
+ - [HatchetSdkRest::V1WorkflowRunDisplayNameList](docs/V1WorkflowRunDisplayNameList.md)
+ - [HatchetSdkRest::V1WorkflowType](docs/V1WorkflowType.md)
+ - [HatchetSdkRest::WebhookWorker](docs/WebhookWorker.md)
+ - [HatchetSdkRest::WebhookWorkerCreateRequest](docs/WebhookWorkerCreateRequest.md)
+ - [HatchetSdkRest::WebhookWorkerCreateResponse](docs/WebhookWorkerCreateResponse.md)
+ - [HatchetSdkRest::WebhookWorkerCreated](docs/WebhookWorkerCreated.md)
+ - [HatchetSdkRest::WebhookWorkerListResponse](docs/WebhookWorkerListResponse.md)
+ - [HatchetSdkRest::WebhookWorkerRequest](docs/WebhookWorkerRequest.md)
+ - [HatchetSdkRest::WebhookWorkerRequestListResponse](docs/WebhookWorkerRequestListResponse.md)
+ - [HatchetSdkRest::WebhookWorkerRequestMethod](docs/WebhookWorkerRequestMethod.md)
+ - [HatchetSdkRest::Worker](docs/Worker.md)
+ - [HatchetSdkRest::WorkerLabel](docs/WorkerLabel.md)
+ - [HatchetSdkRest::WorkerList](docs/WorkerList.md)
+ - [HatchetSdkRest::WorkerRuntimeInfo](docs/WorkerRuntimeInfo.md)
+ - [HatchetSdkRest::WorkerRuntimeSDKs](docs/WorkerRuntimeSDKs.md)
+ - [HatchetSdkRest::WorkerSlotConfig](docs/WorkerSlotConfig.md)
+ - [HatchetSdkRest::WorkerType](docs/WorkerType.md)
+ - [HatchetSdkRest::Workflow](docs/Workflow.md)
+ - [HatchetSdkRest::WorkflowConcurrency](docs/WorkflowConcurrency.md)
+ - [HatchetSdkRest::WorkflowKind](docs/WorkflowKind.md)
+ - [HatchetSdkRest::WorkflowList](docs/WorkflowList.md)
+ - [HatchetSdkRest::WorkflowMetrics](docs/WorkflowMetrics.md)
+ - [HatchetSdkRest::WorkflowRun](docs/WorkflowRun.md)
+ - [HatchetSdkRest::WorkflowRunList](docs/WorkflowRunList.md)
+ - [HatchetSdkRest::WorkflowRunOrderByDirection](docs/WorkflowRunOrderByDirection.md)
+ - [HatchetSdkRest::WorkflowRunOrderByField](docs/WorkflowRunOrderByField.md)
+ - [HatchetSdkRest::WorkflowRunShape](docs/WorkflowRunShape.md)
+ - [HatchetSdkRest::WorkflowRunShapeItemForWorkflowRunDetails](docs/WorkflowRunShapeItemForWorkflowRunDetails.md)
+ - [HatchetSdkRest::WorkflowRunStatus](docs/WorkflowRunStatus.md)
+ - [HatchetSdkRest::WorkflowRunTriggeredBy](docs/WorkflowRunTriggeredBy.md)
+ - [HatchetSdkRest::WorkflowRunsCancelRequest](docs/WorkflowRunsCancelRequest.md)
+ - [HatchetSdkRest::WorkflowRunsMetrics](docs/WorkflowRunsMetrics.md)
+ - [HatchetSdkRest::WorkflowRunsMetricsCounts](docs/WorkflowRunsMetricsCounts.md)
+ - [HatchetSdkRest::WorkflowTag](docs/WorkflowTag.md)
+ - [HatchetSdkRest::WorkflowTriggerCronRef](docs/WorkflowTriggerCronRef.md)
+ - [HatchetSdkRest::WorkflowTriggerEventRef](docs/WorkflowTriggerEventRef.md)
+ - [HatchetSdkRest::WorkflowTriggers](docs/WorkflowTriggers.md)
+ - [HatchetSdkRest::WorkflowUpdateRequest](docs/WorkflowUpdateRequest.md)
+ - [HatchetSdkRest::WorkflowVersion](docs/WorkflowVersion.md)
+ - [HatchetSdkRest::WorkflowVersionDefinition](docs/WorkflowVersionDefinition.md)
+ - [HatchetSdkRest::WorkflowVersionMeta](docs/WorkflowVersionMeta.md)
+ - [HatchetSdkRest::WorkflowWorkersCount](docs/WorkflowWorkersCount.md)
+
+
+## Documentation for Authorization
+
+
+Authentication schemes defined for the API:
+### bearerAuth
+
+- **Type**: Bearer authentication
+
+### cookieAuth
+
+
+- **Type**: API key
+- **API key parameter name**: hatchet
+- **Location**:
+
+### customAuth
+
+- **Type**: Bearer authentication
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/Rakefile b/sdks/ruby/src/lib/hatchet/clients/rest/Rakefile
new file mode 100644
index 000000000..c72ca30d4
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/Rakefile
@@ -0,0 +1,10 @@
+require "bundler/gem_tasks"
+
+begin
+ require 'rspec/core/rake_task'
+
+ RSpec::Core::RakeTask.new(:spec)
+ task default: :spec
+rescue LoadError
+ # no rspec available
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/git_push.sh b/sdks/ruby/src/lib/hatchet/clients/rest/git_push.sh
new file mode 100644
index 000000000..f53a75d4f
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/git_push.sh
@@ -0,0 +1,57 @@
+#!/bin/sh
+# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/
+#
+# Usage example: /bin/sh ./git_push.sh wing328 openapi-petstore-perl "minor update" "gitlab.com"
+
+git_user_id=$1
+git_repo_id=$2
+release_note=$3
+git_host=$4
+
+if [ "$git_host" = "" ]; then
+ git_host="github.com"
+ echo "[INFO] No command line input provided. Set \$git_host to $git_host"
+fi
+
+if [ "$git_user_id" = "" ]; then
+ git_user_id="GIT_USER_ID"
+ echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id"
+fi
+
+if [ "$git_repo_id" = "" ]; then
+ git_repo_id="GIT_REPO_ID"
+ echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id"
+fi
+
+if [ "$release_note" = "" ]; then
+ release_note="Minor update"
+ echo "[INFO] No command line input provided. Set \$release_note to $release_note"
+fi
+
+# Initialize the local directory as a Git repository
+git init
+
+# Adds the files in the local repository and stages them for commit.
+git add .
+
+# Commits the tracked changes and prepares them to be pushed to a remote repository.
+git commit -m "$release_note"
+
+# Sets the new remote
+git_remote=$(git remote)
+if [ "$git_remote" = "" ]; then # git remote not defined
+
+ if [ "$GIT_TOKEN" = "" ]; then
+ echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment."
+ git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git
+ else
+ git remote add origin https://${git_user_id}:"${GIT_TOKEN}"@${git_host}/${git_user_id}/${git_repo_id}.git
+ fi
+
+fi
+
+git pull origin master
+
+# Pushes (Forces) the changes in the local repository up to the remote repository
+echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git"
+git push origin master 2>&1 | grep -v 'To https'
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/hatchet-sdk-rest.gemspec b/sdks/ruby/src/lib/hatchet/clients/rest/hatchet-sdk-rest.gemspec
new file mode 100644
index 000000000..7d866c320
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/hatchet-sdk-rest.gemspec
@@ -0,0 +1,41 @@
+# -*- encoding: utf-8 -*-
+
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+$:.push File.expand_path("../lib", __FILE__)
+require "hatchet-sdk-rest/version"
+
+Gem::Specification.new do |s|
+ s.name = "hatchet-sdk-rest"
+ s.version = HatchetSdkRest::VERSION
+ s.platform = Gem::Platform::RUBY
+ s.authors = ["Hatchet Team"]
+ s.email = [""]
+ s.homepage = "https://github.com/hatchet-dev/hatchet"
+ s.summary = "Hatchet API Ruby Gem"
+ s.description = "Ruby REST client for Hatchet API generated from OpenAPI specification"
+ s.license = "MIT"
+ s.required_ruby_version = ">= 2.7"
+ s.metadata = {}
+
+ s.add_runtime_dependency 'faraday', '>= 1.0.1', '< 3.0'
+ s.add_runtime_dependency 'faraday-multipart'
+ s.add_runtime_dependency 'marcel'
+
+ s.add_development_dependency 'rspec', '~> 3.6', '>= 3.6.0'
+
+ s.files = `find *`.split("\n").uniq.sort.select { |f| !f.empty? }
+ s.test_files = `find spec/*`.split("\n")
+ s.executables = []
+ s.require_paths = ["lib"]
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest.rb
new file mode 100644
index 000000000..7c9b91a97
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest.rb
@@ -0,0 +1,279 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+# Common files
+require 'hatchet-sdk-rest/api_client'
+require 'hatchet-sdk-rest/api_error'
+require 'hatchet-sdk-rest/version'
+require 'hatchet-sdk-rest/configuration'
+
+# Models
+HatchetSdkRest.autoload :APIError, 'hatchet-sdk-rest/models/api_error'
+HatchetSdkRest.autoload :APIErrors, 'hatchet-sdk-rest/models/api_errors'
+HatchetSdkRest.autoload :APIMeta, 'hatchet-sdk-rest/models/api_meta'
+HatchetSdkRest.autoload :APIMetaAuth, 'hatchet-sdk-rest/models/api_meta_auth'
+HatchetSdkRest.autoload :APIMetaIntegration, 'hatchet-sdk-rest/models/api_meta_integration'
+HatchetSdkRest.autoload :APIMetaPosthog, 'hatchet-sdk-rest/models/api_meta_posthog'
+HatchetSdkRest.autoload :APIResourceMeta, 'hatchet-sdk-rest/models/api_resource_meta'
+HatchetSdkRest.autoload :APIToken, 'hatchet-sdk-rest/models/api_token'
+HatchetSdkRest.autoload :AcceptInviteRequest, 'hatchet-sdk-rest/models/accept_invite_request'
+HatchetSdkRest.autoload :BulkCreateEventRequest, 'hatchet-sdk-rest/models/bulk_create_event_request'
+HatchetSdkRest.autoload :CancelEventRequest, 'hatchet-sdk-rest/models/cancel_event_request'
+HatchetSdkRest.autoload :ConcurrencyLimitStrategy, 'hatchet-sdk-rest/models/concurrency_limit_strategy'
+HatchetSdkRest.autoload :ConcurrencyScope, 'hatchet-sdk-rest/models/concurrency_scope'
+HatchetSdkRest.autoload :ConcurrencySetting, 'hatchet-sdk-rest/models/concurrency_setting'
+HatchetSdkRest.autoload :ConcurrencyStat, 'hatchet-sdk-rest/models/concurrency_stat'
+HatchetSdkRest.autoload :CreateAPITokenRequest, 'hatchet-sdk-rest/models/create_api_token_request'
+HatchetSdkRest.autoload :CreateAPITokenResponse, 'hatchet-sdk-rest/models/create_api_token_response'
+HatchetSdkRest.autoload :CreateCronWorkflowTriggerRequest, 'hatchet-sdk-rest/models/create_cron_workflow_trigger_request'
+HatchetSdkRest.autoload :CreateEventRequest, 'hatchet-sdk-rest/models/create_event_request'
+HatchetSdkRest.autoload :CreatePullRequestFromStepRun, 'hatchet-sdk-rest/models/create_pull_request_from_step_run'
+HatchetSdkRest.autoload :CreateSNSIntegrationRequest, 'hatchet-sdk-rest/models/create_sns_integration_request'
+HatchetSdkRest.autoload :CreateTenantAlertEmailGroupRequest, 'hatchet-sdk-rest/models/create_tenant_alert_email_group_request'
+HatchetSdkRest.autoload :CreateTenantInviteRequest, 'hatchet-sdk-rest/models/create_tenant_invite_request'
+HatchetSdkRest.autoload :CreateTenantRequest, 'hatchet-sdk-rest/models/create_tenant_request'
+HatchetSdkRest.autoload :CronWorkflows, 'hatchet-sdk-rest/models/cron_workflows'
+HatchetSdkRest.autoload :CronWorkflowsList, 'hatchet-sdk-rest/models/cron_workflows_list'
+HatchetSdkRest.autoload :CronWorkflowsMethod, 'hatchet-sdk-rest/models/cron_workflows_method'
+HatchetSdkRest.autoload :CronWorkflowsOrderByField, 'hatchet-sdk-rest/models/cron_workflows_order_by_field'
+HatchetSdkRest.autoload :Event, 'hatchet-sdk-rest/models/event'
+HatchetSdkRest.autoload :EventData, 'hatchet-sdk-rest/models/event_data'
+HatchetSdkRest.autoload :EventKeyList, 'hatchet-sdk-rest/models/event_key_list'
+HatchetSdkRest.autoload :EventList, 'hatchet-sdk-rest/models/event_list'
+HatchetSdkRest.autoload :EventOrderByDirection, 'hatchet-sdk-rest/models/event_order_by_direction'
+HatchetSdkRest.autoload :EventOrderByField, 'hatchet-sdk-rest/models/event_order_by_field'
+HatchetSdkRest.autoload :EventUpdateCancel200Response, 'hatchet-sdk-rest/models/event_update_cancel200_response'
+HatchetSdkRest.autoload :EventWorkflowRunSummary, 'hatchet-sdk-rest/models/event_workflow_run_summary'
+HatchetSdkRest.autoload :Events, 'hatchet-sdk-rest/models/events'
+HatchetSdkRest.autoload :GetStepRunDiffResponse, 'hatchet-sdk-rest/models/get_step_run_diff_response'
+HatchetSdkRest.autoload :InfoGetVersion200Response, 'hatchet-sdk-rest/models/info_get_version200_response'
+HatchetSdkRest.autoload :Job, 'hatchet-sdk-rest/models/job'
+HatchetSdkRest.autoload :JobRun, 'hatchet-sdk-rest/models/job_run'
+HatchetSdkRest.autoload :JobRunStatus, 'hatchet-sdk-rest/models/job_run_status'
+HatchetSdkRest.autoload :ListAPITokensResponse, 'hatchet-sdk-rest/models/list_api_tokens_response'
+HatchetSdkRest.autoload :ListPullRequestsResponse, 'hatchet-sdk-rest/models/list_pull_requests_response'
+HatchetSdkRest.autoload :ListSNSIntegrations, 'hatchet-sdk-rest/models/list_sns_integrations'
+HatchetSdkRest.autoload :ListSlackWebhooks, 'hatchet-sdk-rest/models/list_slack_webhooks'
+HatchetSdkRest.autoload :LogLine, 'hatchet-sdk-rest/models/log_line'
+HatchetSdkRest.autoload :LogLineLevel, 'hatchet-sdk-rest/models/log_line_level'
+HatchetSdkRest.autoload :LogLineList, 'hatchet-sdk-rest/models/log_line_list'
+HatchetSdkRest.autoload :LogLineOrderByDirection, 'hatchet-sdk-rest/models/log_line_order_by_direction'
+HatchetSdkRest.autoload :LogLineOrderByField, 'hatchet-sdk-rest/models/log_line_order_by_field'
+HatchetSdkRest.autoload :PaginationResponse, 'hatchet-sdk-rest/models/pagination_response'
+HatchetSdkRest.autoload :PullRequest, 'hatchet-sdk-rest/models/pull_request'
+HatchetSdkRest.autoload :PullRequestState, 'hatchet-sdk-rest/models/pull_request_state'
+HatchetSdkRest.autoload :QueueMetrics, 'hatchet-sdk-rest/models/queue_metrics'
+HatchetSdkRest.autoload :RateLimit, 'hatchet-sdk-rest/models/rate_limit'
+HatchetSdkRest.autoload :RateLimitList, 'hatchet-sdk-rest/models/rate_limit_list'
+HatchetSdkRest.autoload :RateLimitOrderByDirection, 'hatchet-sdk-rest/models/rate_limit_order_by_direction'
+HatchetSdkRest.autoload :RateLimitOrderByField, 'hatchet-sdk-rest/models/rate_limit_order_by_field'
+HatchetSdkRest.autoload :RecentStepRuns, 'hatchet-sdk-rest/models/recent_step_runs'
+HatchetSdkRest.autoload :RegisteredWorkflow, 'hatchet-sdk-rest/models/registered_workflow'
+HatchetSdkRest.autoload :RejectInviteRequest, 'hatchet-sdk-rest/models/reject_invite_request'
+HatchetSdkRest.autoload :ReplayEventRequest, 'hatchet-sdk-rest/models/replay_event_request'
+HatchetSdkRest.autoload :ReplayWorkflowRunsRequest, 'hatchet-sdk-rest/models/replay_workflow_runs_request'
+HatchetSdkRest.autoload :ReplayWorkflowRunsResponse, 'hatchet-sdk-rest/models/replay_workflow_runs_response'
+HatchetSdkRest.autoload :RerunStepRunRequest, 'hatchet-sdk-rest/models/rerun_step_run_request'
+HatchetSdkRest.autoload :SNSIntegration, 'hatchet-sdk-rest/models/sns_integration'
+HatchetSdkRest.autoload :ScheduleWorkflowRunRequest, 'hatchet-sdk-rest/models/schedule_workflow_run_request'
+HatchetSdkRest.autoload :ScheduledRunStatus, 'hatchet-sdk-rest/models/scheduled_run_status'
+HatchetSdkRest.autoload :ScheduledWorkflows, 'hatchet-sdk-rest/models/scheduled_workflows'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkDeleteFilter, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_filter'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkDeleteRequest, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_request'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkDeleteResponse, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_delete_response'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkError, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_error'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkUpdateItem, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_update_item'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkUpdateRequest, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_update_request'
+HatchetSdkRest.autoload :ScheduledWorkflowsBulkUpdateResponse, 'hatchet-sdk-rest/models/scheduled_workflows_bulk_update_response'
+HatchetSdkRest.autoload :ScheduledWorkflowsList, 'hatchet-sdk-rest/models/scheduled_workflows_list'
+HatchetSdkRest.autoload :ScheduledWorkflowsMethod, 'hatchet-sdk-rest/models/scheduled_workflows_method'
+HatchetSdkRest.autoload :ScheduledWorkflowsOrderByField, 'hatchet-sdk-rest/models/scheduled_workflows_order_by_field'
+HatchetSdkRest.autoload :SemaphoreSlots, 'hatchet-sdk-rest/models/semaphore_slots'
+HatchetSdkRest.autoload :SlackWebhook, 'hatchet-sdk-rest/models/slack_webhook'
+HatchetSdkRest.autoload :Step, 'hatchet-sdk-rest/models/step'
+HatchetSdkRest.autoload :StepRun, 'hatchet-sdk-rest/models/step_run'
+HatchetSdkRest.autoload :StepRunArchive, 'hatchet-sdk-rest/models/step_run_archive'
+HatchetSdkRest.autoload :StepRunArchiveList, 'hatchet-sdk-rest/models/step_run_archive_list'
+HatchetSdkRest.autoload :StepRunDiff, 'hatchet-sdk-rest/models/step_run_diff'
+HatchetSdkRest.autoload :StepRunEvent, 'hatchet-sdk-rest/models/step_run_event'
+HatchetSdkRest.autoload :StepRunEventList, 'hatchet-sdk-rest/models/step_run_event_list'
+HatchetSdkRest.autoload :StepRunEventReason, 'hatchet-sdk-rest/models/step_run_event_reason'
+HatchetSdkRest.autoload :StepRunEventSeverity, 'hatchet-sdk-rest/models/step_run_event_severity'
+HatchetSdkRest.autoload :StepRunStatus, 'hatchet-sdk-rest/models/step_run_status'
+HatchetSdkRest.autoload :TaskStat, 'hatchet-sdk-rest/models/task_stat'
+HatchetSdkRest.autoload :TaskStatusStat, 'hatchet-sdk-rest/models/task_status_stat'
+HatchetSdkRest.autoload :Tenant, 'hatchet-sdk-rest/models/tenant'
+HatchetSdkRest.autoload :TenantAlertEmailGroup, 'hatchet-sdk-rest/models/tenant_alert_email_group'
+HatchetSdkRest.autoload :TenantAlertEmailGroupList, 'hatchet-sdk-rest/models/tenant_alert_email_group_list'
+HatchetSdkRest.autoload :TenantAlertingSettings, 'hatchet-sdk-rest/models/tenant_alerting_settings'
+HatchetSdkRest.autoload :TenantEnvironment, 'hatchet-sdk-rest/models/tenant_environment'
+HatchetSdkRest.autoload :TenantInvite, 'hatchet-sdk-rest/models/tenant_invite'
+HatchetSdkRest.autoload :TenantInviteList, 'hatchet-sdk-rest/models/tenant_invite_list'
+HatchetSdkRest.autoload :TenantList, 'hatchet-sdk-rest/models/tenant_list'
+HatchetSdkRest.autoload :TenantMember, 'hatchet-sdk-rest/models/tenant_member'
+HatchetSdkRest.autoload :TenantMemberList, 'hatchet-sdk-rest/models/tenant_member_list'
+HatchetSdkRest.autoload :TenantMemberRole, 'hatchet-sdk-rest/models/tenant_member_role'
+HatchetSdkRest.autoload :TenantQueueMetrics, 'hatchet-sdk-rest/models/tenant_queue_metrics'
+HatchetSdkRest.autoload :TenantResource, 'hatchet-sdk-rest/models/tenant_resource'
+HatchetSdkRest.autoload :TenantResourceLimit, 'hatchet-sdk-rest/models/tenant_resource_limit'
+HatchetSdkRest.autoload :TenantResourcePolicy, 'hatchet-sdk-rest/models/tenant_resource_policy'
+HatchetSdkRest.autoload :TenantStepRunQueueMetrics, 'hatchet-sdk-rest/models/tenant_step_run_queue_metrics'
+HatchetSdkRest.autoload :TenantVersion, 'hatchet-sdk-rest/models/tenant_version'
+HatchetSdkRest.autoload :TriggerWorkflowRunRequest, 'hatchet-sdk-rest/models/trigger_workflow_run_request'
+HatchetSdkRest.autoload :UpdateCronWorkflowTriggerRequest, 'hatchet-sdk-rest/models/update_cron_workflow_trigger_request'
+HatchetSdkRest.autoload :UpdateScheduledWorkflowRunRequest, 'hatchet-sdk-rest/models/update_scheduled_workflow_run_request'
+HatchetSdkRest.autoload :UpdateTenantAlertEmailGroupRequest, 'hatchet-sdk-rest/models/update_tenant_alert_email_group_request'
+HatchetSdkRest.autoload :UpdateTenantInviteRequest, 'hatchet-sdk-rest/models/update_tenant_invite_request'
+HatchetSdkRest.autoload :UpdateTenantMemberRequest, 'hatchet-sdk-rest/models/update_tenant_member_request'
+HatchetSdkRest.autoload :UpdateTenantRequest, 'hatchet-sdk-rest/models/update_tenant_request'
+HatchetSdkRest.autoload :UpdateWorkerRequest, 'hatchet-sdk-rest/models/update_worker_request'
+HatchetSdkRest.autoload :User, 'hatchet-sdk-rest/models/user'
+HatchetSdkRest.autoload :UserChangePasswordRequest, 'hatchet-sdk-rest/models/user_change_password_request'
+HatchetSdkRest.autoload :UserLoginRequest, 'hatchet-sdk-rest/models/user_login_request'
+HatchetSdkRest.autoload :UserRegisterRequest, 'hatchet-sdk-rest/models/user_register_request'
+HatchetSdkRest.autoload :UserTenantMembershipsList, 'hatchet-sdk-rest/models/user_tenant_memberships_list'
+HatchetSdkRest.autoload :UserTenantPublic, 'hatchet-sdk-rest/models/user_tenant_public'
+HatchetSdkRest.autoload :V1CELDebugRequest, 'hatchet-sdk-rest/models/v1_cel_debug_request'
+HatchetSdkRest.autoload :V1CELDebugResponse, 'hatchet-sdk-rest/models/v1_cel_debug_response'
+HatchetSdkRest.autoload :V1CELDebugResponseStatus, 'hatchet-sdk-rest/models/v1_cel_debug_response_status'
+HatchetSdkRest.autoload :V1CancelTaskRequest, 'hatchet-sdk-rest/models/v1_cancel_task_request'
+HatchetSdkRest.autoload :V1CancelledTasks, 'hatchet-sdk-rest/models/v1_cancelled_tasks'
+HatchetSdkRest.autoload :V1CreateFilterRequest, 'hatchet-sdk-rest/models/v1_create_filter_request'
+HatchetSdkRest.autoload :V1CreateWebhookRequest, 'hatchet-sdk-rest/models/v1_create_webhook_request'
+HatchetSdkRest.autoload :V1CreateWebhookRequestAPIKey, 'hatchet-sdk-rest/models/v1_create_webhook_request_api_key'
+HatchetSdkRest.autoload :V1CreateWebhookRequestBase, 'hatchet-sdk-rest/models/v1_create_webhook_request_base'
+HatchetSdkRest.autoload :V1CreateWebhookRequestBasicAuth, 'hatchet-sdk-rest/models/v1_create_webhook_request_basic_auth'
+HatchetSdkRest.autoload :V1CreateWebhookRequestHMAC, 'hatchet-sdk-rest/models/v1_create_webhook_request_hmac'
+HatchetSdkRest.autoload :V1DagChildren, 'hatchet-sdk-rest/models/v1_dag_children'
+HatchetSdkRest.autoload :V1Event, 'hatchet-sdk-rest/models/v1_event'
+HatchetSdkRest.autoload :V1EventList, 'hatchet-sdk-rest/models/v1_event_list'
+HatchetSdkRest.autoload :V1EventTriggeredRun, 'hatchet-sdk-rest/models/v1_event_triggered_run'
+HatchetSdkRest.autoload :V1EventWorkflowRunSummary, 'hatchet-sdk-rest/models/v1_event_workflow_run_summary'
+HatchetSdkRest.autoload :V1Filter, 'hatchet-sdk-rest/models/v1_filter'
+HatchetSdkRest.autoload :V1FilterList, 'hatchet-sdk-rest/models/v1_filter_list'
+HatchetSdkRest.autoload :V1LogLine, 'hatchet-sdk-rest/models/v1_log_line'
+HatchetSdkRest.autoload :V1LogLineLevel, 'hatchet-sdk-rest/models/v1_log_line_level'
+HatchetSdkRest.autoload :V1LogLineList, 'hatchet-sdk-rest/models/v1_log_line_list'
+HatchetSdkRest.autoload :V1LogLineOrderByDirection, 'hatchet-sdk-rest/models/v1_log_line_order_by_direction'
+HatchetSdkRest.autoload :V1ReplayTaskRequest, 'hatchet-sdk-rest/models/v1_replay_task_request'
+HatchetSdkRest.autoload :V1ReplayedTasks, 'hatchet-sdk-rest/models/v1_replayed_tasks'
+HatchetSdkRest.autoload :V1TaskEvent, 'hatchet-sdk-rest/models/v1_task_event'
+HatchetSdkRest.autoload :V1TaskEventList, 'hatchet-sdk-rest/models/v1_task_event_list'
+HatchetSdkRest.autoload :V1TaskEventType, 'hatchet-sdk-rest/models/v1_task_event_type'
+HatchetSdkRest.autoload :V1TaskFilter, 'hatchet-sdk-rest/models/v1_task_filter'
+HatchetSdkRest.autoload :V1TaskPointMetric, 'hatchet-sdk-rest/models/v1_task_point_metric'
+HatchetSdkRest.autoload :V1TaskPointMetrics, 'hatchet-sdk-rest/models/v1_task_point_metrics'
+HatchetSdkRest.autoload :V1TaskRunMetric, 'hatchet-sdk-rest/models/v1_task_run_metric'
+HatchetSdkRest.autoload :V1TaskRunStatus, 'hatchet-sdk-rest/models/v1_task_run_status'
+HatchetSdkRest.autoload :V1TaskStatus, 'hatchet-sdk-rest/models/v1_task_status'
+HatchetSdkRest.autoload :V1TaskSummary, 'hatchet-sdk-rest/models/v1_task_summary'
+HatchetSdkRest.autoload :V1TaskSummaryList, 'hatchet-sdk-rest/models/v1_task_summary_list'
+HatchetSdkRest.autoload :V1TaskTiming, 'hatchet-sdk-rest/models/v1_task_timing'
+HatchetSdkRest.autoload :V1TaskTimingList, 'hatchet-sdk-rest/models/v1_task_timing_list'
+HatchetSdkRest.autoload :V1TriggerWorkflowRunRequest, 'hatchet-sdk-rest/models/v1_trigger_workflow_run_request'
+HatchetSdkRest.autoload :V1UpdateFilterRequest, 'hatchet-sdk-rest/models/v1_update_filter_request'
+HatchetSdkRest.autoload :V1UpdateWebhookRequest, 'hatchet-sdk-rest/models/v1_update_webhook_request'
+HatchetSdkRest.autoload :V1Webhook, 'hatchet-sdk-rest/models/v1_webhook'
+HatchetSdkRest.autoload :V1WebhookAPIKeyAuth, 'hatchet-sdk-rest/models/v1_webhook_api_key_auth'
+HatchetSdkRest.autoload :V1WebhookAuthType, 'hatchet-sdk-rest/models/v1_webhook_auth_type'
+HatchetSdkRest.autoload :V1WebhookBasicAuth, 'hatchet-sdk-rest/models/v1_webhook_basic_auth'
+HatchetSdkRest.autoload :V1WebhookHMACAlgorithm, 'hatchet-sdk-rest/models/v1_webhook_hmac_algorithm'
+HatchetSdkRest.autoload :V1WebhookHMACAuth, 'hatchet-sdk-rest/models/v1_webhook_hmac_auth'
+HatchetSdkRest.autoload :V1WebhookHMACEncoding, 'hatchet-sdk-rest/models/v1_webhook_hmac_encoding'
+HatchetSdkRest.autoload :V1WebhookList, 'hatchet-sdk-rest/models/v1_webhook_list'
+HatchetSdkRest.autoload :V1WebhookSourceName, 'hatchet-sdk-rest/models/v1_webhook_source_name'
+HatchetSdkRest.autoload :V1WorkflowRun, 'hatchet-sdk-rest/models/v1_workflow_run'
+HatchetSdkRest.autoload :V1WorkflowRunDetails, 'hatchet-sdk-rest/models/v1_workflow_run_details'
+HatchetSdkRest.autoload :V1WorkflowRunDisplayName, 'hatchet-sdk-rest/models/v1_workflow_run_display_name'
+HatchetSdkRest.autoload :V1WorkflowRunDisplayNameList, 'hatchet-sdk-rest/models/v1_workflow_run_display_name_list'
+HatchetSdkRest.autoload :V1WorkflowType, 'hatchet-sdk-rest/models/v1_workflow_type'
+HatchetSdkRest.autoload :WebhookWorker, 'hatchet-sdk-rest/models/webhook_worker'
+HatchetSdkRest.autoload :WebhookWorkerCreateRequest, 'hatchet-sdk-rest/models/webhook_worker_create_request'
+HatchetSdkRest.autoload :WebhookWorkerCreateResponse, 'hatchet-sdk-rest/models/webhook_worker_create_response'
+HatchetSdkRest.autoload :WebhookWorkerCreated, 'hatchet-sdk-rest/models/webhook_worker_created'
+HatchetSdkRest.autoload :WebhookWorkerListResponse, 'hatchet-sdk-rest/models/webhook_worker_list_response'
+HatchetSdkRest.autoload :WebhookWorkerRequest, 'hatchet-sdk-rest/models/webhook_worker_request'
+HatchetSdkRest.autoload :WebhookWorkerRequestListResponse, 'hatchet-sdk-rest/models/webhook_worker_request_list_response'
+HatchetSdkRest.autoload :WebhookWorkerRequestMethod, 'hatchet-sdk-rest/models/webhook_worker_request_method'
+HatchetSdkRest.autoload :Worker, 'hatchet-sdk-rest/models/worker'
+HatchetSdkRest.autoload :WorkerLabel, 'hatchet-sdk-rest/models/worker_label'
+HatchetSdkRest.autoload :WorkerList, 'hatchet-sdk-rest/models/worker_list'
+HatchetSdkRest.autoload :WorkerRuntimeInfo, 'hatchet-sdk-rest/models/worker_runtime_info'
+HatchetSdkRest.autoload :WorkerRuntimeSDKs, 'hatchet-sdk-rest/models/worker_runtime_sdks'
+HatchetSdkRest.autoload :WorkerSlotConfig, 'hatchet-sdk-rest/models/worker_slot_config'
+HatchetSdkRest.autoload :WorkerType, 'hatchet-sdk-rest/models/worker_type'
+HatchetSdkRest.autoload :Workflow, 'hatchet-sdk-rest/models/workflow'
+HatchetSdkRest.autoload :WorkflowConcurrency, 'hatchet-sdk-rest/models/workflow_concurrency'
+HatchetSdkRest.autoload :WorkflowKind, 'hatchet-sdk-rest/models/workflow_kind'
+HatchetSdkRest.autoload :WorkflowList, 'hatchet-sdk-rest/models/workflow_list'
+HatchetSdkRest.autoload :WorkflowMetrics, 'hatchet-sdk-rest/models/workflow_metrics'
+HatchetSdkRest.autoload :WorkflowRun, 'hatchet-sdk-rest/models/workflow_run'
+HatchetSdkRest.autoload :WorkflowRunList, 'hatchet-sdk-rest/models/workflow_run_list'
+HatchetSdkRest.autoload :WorkflowRunOrderByDirection, 'hatchet-sdk-rest/models/workflow_run_order_by_direction'
+HatchetSdkRest.autoload :WorkflowRunOrderByField, 'hatchet-sdk-rest/models/workflow_run_order_by_field'
+HatchetSdkRest.autoload :WorkflowRunShape, 'hatchet-sdk-rest/models/workflow_run_shape'
+HatchetSdkRest.autoload :WorkflowRunShapeItemForWorkflowRunDetails, 'hatchet-sdk-rest/models/workflow_run_shape_item_for_workflow_run_details'
+HatchetSdkRest.autoload :WorkflowRunStatus, 'hatchet-sdk-rest/models/workflow_run_status'
+HatchetSdkRest.autoload :WorkflowRunTriggeredBy, 'hatchet-sdk-rest/models/workflow_run_triggered_by'
+HatchetSdkRest.autoload :WorkflowRunsCancelRequest, 'hatchet-sdk-rest/models/workflow_runs_cancel_request'
+HatchetSdkRest.autoload :WorkflowRunsMetrics, 'hatchet-sdk-rest/models/workflow_runs_metrics'
+HatchetSdkRest.autoload :WorkflowRunsMetricsCounts, 'hatchet-sdk-rest/models/workflow_runs_metrics_counts'
+HatchetSdkRest.autoload :WorkflowTag, 'hatchet-sdk-rest/models/workflow_tag'
+HatchetSdkRest.autoload :WorkflowTriggerCronRef, 'hatchet-sdk-rest/models/workflow_trigger_cron_ref'
+HatchetSdkRest.autoload :WorkflowTriggerEventRef, 'hatchet-sdk-rest/models/workflow_trigger_event_ref'
+HatchetSdkRest.autoload :WorkflowTriggers, 'hatchet-sdk-rest/models/workflow_triggers'
+HatchetSdkRest.autoload :WorkflowUpdateRequest, 'hatchet-sdk-rest/models/workflow_update_request'
+HatchetSdkRest.autoload :WorkflowVersion, 'hatchet-sdk-rest/models/workflow_version'
+HatchetSdkRest.autoload :WorkflowVersionDefinition, 'hatchet-sdk-rest/models/workflow_version_definition'
+HatchetSdkRest.autoload :WorkflowVersionMeta, 'hatchet-sdk-rest/models/workflow_version_meta'
+HatchetSdkRest.autoload :WorkflowWorkersCount, 'hatchet-sdk-rest/models/workflow_workers_count'
+
+# APIs
+HatchetSdkRest.autoload :APITokenApi, 'hatchet-sdk-rest/api/api_token_api'
+HatchetSdkRest.autoload :CELApi, 'hatchet-sdk-rest/api/cel_api'
+HatchetSdkRest.autoload :DefaultApi, 'hatchet-sdk-rest/api/default_api'
+HatchetSdkRest.autoload :EventApi, 'hatchet-sdk-rest/api/event_api'
+HatchetSdkRest.autoload :FilterApi, 'hatchet-sdk-rest/api/filter_api'
+HatchetSdkRest.autoload :GithubApi, 'hatchet-sdk-rest/api/github_api'
+HatchetSdkRest.autoload :HealthcheckApi, 'hatchet-sdk-rest/api/healthcheck_api'
+HatchetSdkRest.autoload :LogApi, 'hatchet-sdk-rest/api/log_api'
+HatchetSdkRest.autoload :MetadataApi, 'hatchet-sdk-rest/api/metadata_api'
+HatchetSdkRest.autoload :RateLimitsApi, 'hatchet-sdk-rest/api/rate_limits_api'
+HatchetSdkRest.autoload :SNSApi, 'hatchet-sdk-rest/api/sns_api'
+HatchetSdkRest.autoload :SlackApi, 'hatchet-sdk-rest/api/slack_api'
+HatchetSdkRest.autoload :StepRunApi, 'hatchet-sdk-rest/api/step_run_api'
+HatchetSdkRest.autoload :TaskApi, 'hatchet-sdk-rest/api/task_api'
+HatchetSdkRest.autoload :TenantApi, 'hatchet-sdk-rest/api/tenant_api'
+HatchetSdkRest.autoload :UserApi, 'hatchet-sdk-rest/api/user_api'
+HatchetSdkRest.autoload :WebhookApi, 'hatchet-sdk-rest/api/webhook_api'
+HatchetSdkRest.autoload :WorkerApi, 'hatchet-sdk-rest/api/worker_api'
+HatchetSdkRest.autoload :WorkflowApi, 'hatchet-sdk-rest/api/workflow_api'
+HatchetSdkRest.autoload :WorkflowRunApi, 'hatchet-sdk-rest/api/workflow_run_api'
+HatchetSdkRest.autoload :WorkflowRunsApi, 'hatchet-sdk-rest/api/workflow_runs_api'
+
+module HatchetSdkRest
+ class << self
+ # Customize default settings for the SDK using block.
+ # HatchetSdkRest.configure do |config|
+ # config.username = "xxx"
+ # config.password = "xxx"
+ # end
+ # If no block given, return the default Configuration object.
+ def configure
+ if block_given?
+ yield(Configuration.default)
+ else
+ Configuration.default
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/api_token_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/api_token_api.rb
new file mode 100644
index 000000000..f306b2614
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/api_token_api.rb
@@ -0,0 +1,242 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class APITokenApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create API Token
+ # Create an API token for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [CreateAPITokenRequest] :create_api_token_request
+ # @return [CreateAPITokenResponse]
+ def api_token_create(tenant, opts = {})
+ data, _status_code, _headers = api_token_create_with_http_info(tenant, opts)
+ data
+ end
+
+ # Create API Token
+ # Create an API token for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [CreateAPITokenRequest] :create_api_token_request
+ # @return [Array<(CreateAPITokenResponse, Integer, Hash)>] CreateAPITokenResponse data, response status code and response headers
+ def api_token_create_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: APITokenApi.api_token_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling APITokenApi.api_token_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling APITokenApi.api_token_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling APITokenApi.api_token_create, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/api-tokens'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'create_api_token_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'CreateAPITokenResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"APITokenApi.api_token_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: APITokenApi#api_token_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List API Tokens
+ # List API tokens for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [ListAPITokensResponse]
+ def api_token_list(tenant, opts = {})
+ data, _status_code, _headers = api_token_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List API Tokens
+ # List API tokens for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ListAPITokensResponse, Integer, Hash)>] ListAPITokensResponse data, response status code and response headers
+ def api_token_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: APITokenApi.api_token_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling APITokenApi.api_token_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling APITokenApi.api_token_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling APITokenApi.api_token_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/api-tokens'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ListAPITokensResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"APITokenApi.api_token_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: APITokenApi#api_token_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Revoke API Token
+ # Revoke an API token for a tenant
+ # @param api_token [String] The API token
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def api_token_update_revoke(api_token, opts = {})
+ api_token_update_revoke_with_http_info(api_token, opts)
+ nil
+ end
+
+ # Revoke API Token
+ # Revoke an API token for a tenant
+ # @param api_token [String] The API token
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def api_token_update_revoke_with_http_info(api_token, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: APITokenApi.api_token_update_revoke ...'
+ end
+ # verify the required parameter 'api_token' is set
+ if @api_client.config.client_side_validation && api_token.nil?
+ fail ArgumentError, "Missing the required parameter 'api_token' when calling APITokenApi.api_token_update_revoke"
+ end
+ if @api_client.config.client_side_validation && api_token.to_s.length > 36
+ fail ArgumentError, 'invalid value for "api_token" when calling APITokenApi.api_token_update_revoke, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && api_token.to_s.length < 36
+ fail ArgumentError, 'invalid value for "api_token" when calling APITokenApi.api_token_update_revoke, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/api-tokens/{api-token}'.sub('{' + 'api-token' + '}', CGI.escape(api_token.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"APITokenApi.api_token_update_revoke",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: APITokenApi#api_token_update_revoke\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/cel_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/cel_api.rb
new file mode 100644
index 000000000..838f193e8
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/cel_api.rb
@@ -0,0 +1,104 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class CELApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Debug a CEL expression
+ # Evaluate a CEL expression against provided input data.
+ # @param tenant [String] The tenant id
+ # @param v1_cel_debug_request [V1CELDebugRequest] The inputs to test the CEL expression against
+ # @param [Hash] opts the optional parameters
+ # @return [V1CELDebugResponse]
+ def v1_cel_debug(tenant, v1_cel_debug_request, opts = {})
+ data, _status_code, _headers = v1_cel_debug_with_http_info(tenant, v1_cel_debug_request, opts)
+ data
+ end
+
+ # Debug a CEL expression
+ # Evaluate a CEL expression against provided input data.
+ # @param tenant [String] The tenant id
+ # @param v1_cel_debug_request [V1CELDebugRequest] The inputs to test the CEL expression against
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1CELDebugResponse, Integer, Hash)>] V1CELDebugResponse data, response status code and response headers
+ def v1_cel_debug_with_http_info(tenant, v1_cel_debug_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: CELApi.v1_cel_debug ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling CELApi.v1_cel_debug"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling CELApi.v1_cel_debug, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling CELApi.v1_cel_debug, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_cel_debug_request' is set
+ if @api_client.config.client_side_validation && v1_cel_debug_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_cel_debug_request' when calling CELApi.v1_cel_debug"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/cel/debug'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_cel_debug_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1CELDebugResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"CELApi.v1_cel_debug",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: CELApi#v1_cel_debug\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/default_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/default_api.rb
new file mode 100644
index 000000000..a99c7bbb4
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/default_api.rb
@@ -0,0 +1,622 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class DefaultApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # We return the version for the currently running server
+ # Get the version of the server
+ # @param [Hash] opts the optional parameters
+ # @return [InfoGetVersion200Response]
+ def info_get_version(opts = {})
+ data, _status_code, _headers = info_get_version_with_http_info(opts)
+ data
+ end
+
+ # We return the version for the currently running server
+ # Get the version of the server
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(InfoGetVersion200Response, Integer, Hash)>] InfoGetVersion200Response data, response status code and response headers
+ def info_get_version_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.info_get_version ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/version'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'InfoGetVersion200Response'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.info_get_version",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#info_get_version\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Detailed Health Probe For the Instance
+ # Triggers a workflow to check the status of the instance
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def monitoring_post_run_probe(tenant, opts = {})
+ monitoring_post_run_probe_with_http_info(tenant, opts)
+ nil
+ end
+
+ # Detailed Health Probe For the Instance
+ # Triggers a workflow to check the status of the instance
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def monitoring_post_run_probe_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.monitoring_post_run_probe ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling DefaultApi.monitoring_post_run_probe"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.monitoring_post_run_probe, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.monitoring_post_run_probe, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/monitoring/{tenant}/probe'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.monitoring_post_run_probe",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#monitoring_post_run_probe\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete invite
+ # Deletes a tenant invite
+ # @param tenant [String] The tenant id
+ # @param tenant_invite [String] The tenant invite id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantInvite]
+ def tenant_invite_delete(tenant, tenant_invite, opts = {})
+ data, _status_code, _headers = tenant_invite_delete_with_http_info(tenant, tenant_invite, opts)
+ data
+ end
+
+ # Delete invite
+ # Deletes a tenant invite
+ # @param tenant [String] The tenant id
+ # @param tenant_invite [String] The tenant invite id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantInvite, Integer, Hash)>] TenantInvite data, response status code and response headers
+ def tenant_invite_delete_with_http_info(tenant, tenant_invite, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.tenant_invite_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling DefaultApi.tenant_invite_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.tenant_invite_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.tenant_invite_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'tenant_invite' is set
+ if @api_client.config.client_side_validation && tenant_invite.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant_invite' when calling DefaultApi.tenant_invite_delete"
+ end
+ if @api_client.config.client_side_validation && tenant_invite.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant_invite" when calling DefaultApi.tenant_invite_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant_invite.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant_invite" when calling DefaultApi.tenant_invite_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/invites/{tenant-invite}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'tenant-invite' + '}', CGI.escape(tenant_invite.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantInvite'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.tenant_invite_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#tenant_invite_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update invite
+ # Updates a tenant invite
+ # @param tenant [String] The tenant id
+ # @param tenant_invite [String] The tenant invite id
+ # @param update_tenant_invite_request [UpdateTenantInviteRequest] The tenant invite to update
+ # @param [Hash] opts the optional parameters
+ # @return [TenantInvite]
+ def tenant_invite_update(tenant, tenant_invite, update_tenant_invite_request, opts = {})
+ data, _status_code, _headers = tenant_invite_update_with_http_info(tenant, tenant_invite, update_tenant_invite_request, opts)
+ data
+ end
+
+ # Update invite
+ # Updates a tenant invite
+ # @param tenant [String] The tenant id
+ # @param tenant_invite [String] The tenant invite id
+ # @param update_tenant_invite_request [UpdateTenantInviteRequest] The tenant invite to update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantInvite, Integer, Hash)>] TenantInvite data, response status code and response headers
+ def tenant_invite_update_with_http_info(tenant, tenant_invite, update_tenant_invite_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.tenant_invite_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling DefaultApi.tenant_invite_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.tenant_invite_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.tenant_invite_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'tenant_invite' is set
+ if @api_client.config.client_side_validation && tenant_invite.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant_invite' when calling DefaultApi.tenant_invite_update"
+ end
+ if @api_client.config.client_side_validation && tenant_invite.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant_invite" when calling DefaultApi.tenant_invite_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant_invite.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant_invite" when calling DefaultApi.tenant_invite_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_tenant_invite_request' is set
+ if @api_client.config.client_side_validation && update_tenant_invite_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_tenant_invite_request' when calling DefaultApi.tenant_invite_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/invites/{tenant-invite}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'tenant-invite' + '}', CGI.escape(tenant_invite.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_tenant_invite_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantInvite'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.tenant_invite_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#tenant_invite_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Create a webhook
+ # Creates a webhook
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [WebhookWorkerCreateRequest] :webhook_worker_create_request
+ # @return [WebhookWorkerCreated]
+ def webhook_create(tenant, opts = {})
+ data, _status_code, _headers = webhook_create_with_http_info(tenant, opts)
+ data
+ end
+
+ # Create a webhook
+ # Creates a webhook
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [WebhookWorkerCreateRequest] :webhook_worker_create_request
+ # @return [Array<(WebhookWorkerCreated, Integer, Hash)>] WebhookWorkerCreated data, response status code and response headers
+ def webhook_create_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.webhook_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling DefaultApi.webhook_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.webhook_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.webhook_create, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/webhook-workers'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'webhook_worker_create_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WebhookWorkerCreated'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.webhook_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#webhook_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete a webhook
+ # Deletes a webhook
+ # @param webhook [String] The webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def webhook_delete(webhook, opts = {})
+ webhook_delete_with_http_info(webhook, opts)
+ nil
+ end
+
+ # Delete a webhook
+ # Deletes a webhook
+ # @param webhook [String] The webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def webhook_delete_with_http_info(webhook, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.webhook_delete ...'
+ end
+ # verify the required parameter 'webhook' is set
+ if @api_client.config.client_side_validation && webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'webhook' when calling DefaultApi.webhook_delete"
+ end
+ if @api_client.config.client_side_validation && webhook.to_s.length > 36
+ fail ArgumentError, 'invalid value for "webhook" when calling DefaultApi.webhook_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && webhook.to_s.length < 36
+ fail ArgumentError, 'invalid value for "webhook" when calling DefaultApi.webhook_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/webhook-workers/{webhook}'.sub('{' + 'webhook' + '}', CGI.escape(webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.webhook_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#webhook_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List webhooks
+ # Lists all webhooks
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [WebhookWorkerListResponse]
+ def webhook_list(tenant, opts = {})
+ data, _status_code, _headers = webhook_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List webhooks
+ # Lists all webhooks
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WebhookWorkerListResponse, Integer, Hash)>] WebhookWorkerListResponse data, response status code and response headers
+ def webhook_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.webhook_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling DefaultApi.webhook_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.webhook_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling DefaultApi.webhook_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/webhook-workers'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WebhookWorkerListResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.webhook_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#webhook_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List webhook requests
+ # Lists all requests for a webhook
+ # @param webhook [String] The webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [WebhookWorkerRequestListResponse]
+ def webhook_requests_list(webhook, opts = {})
+ data, _status_code, _headers = webhook_requests_list_with_http_info(webhook, opts)
+ data
+ end
+
+ # List webhook requests
+ # Lists all requests for a webhook
+ # @param webhook [String] The webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WebhookWorkerRequestListResponse, Integer, Hash)>] WebhookWorkerRequestListResponse data, response status code and response headers
+ def webhook_requests_list_with_http_info(webhook, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: DefaultApi.webhook_requests_list ...'
+ end
+ # verify the required parameter 'webhook' is set
+ if @api_client.config.client_side_validation && webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'webhook' when calling DefaultApi.webhook_requests_list"
+ end
+ if @api_client.config.client_side_validation && webhook.to_s.length > 36
+ fail ArgumentError, 'invalid value for "webhook" when calling DefaultApi.webhook_requests_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && webhook.to_s.length < 36
+ fail ArgumentError, 'invalid value for "webhook" when calling DefaultApi.webhook_requests_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/webhook-workers/{webhook}/requests'.sub('{' + 'webhook' + '}', CGI.escape(webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WebhookWorkerRequestListResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"DefaultApi.webhook_requests_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: DefaultApi#webhook_requests_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/event_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/event_api.rb
new file mode 100644
index 000000000..b6ac11e7d
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/event_api.rb
@@ -0,0 +1,1006 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class EventApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create event
+ # Creates a new event.
+ # @param tenant [String] The tenant id
+ # @param create_event_request [CreateEventRequest] The event to create
+ # @param [Hash] opts the optional parameters
+ # @return [Event]
+ def event_create(tenant, create_event_request, opts = {})
+ data, _status_code, _headers = event_create_with_http_info(tenant, create_event_request, opts)
+ data
+ end
+
+ # Create event
+ # Creates a new event.
+ # @param tenant [String] The tenant id
+ # @param create_event_request [CreateEventRequest] The event to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Event, Integer, Hash)>] Event data, response status code and response headers
+ def event_create_with_http_info(tenant, create_event_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'create_event_request' is set
+ if @api_client.config.client_side_validation && create_event_request.nil?
+ fail ArgumentError, "Missing the required parameter 'create_event_request' when calling EventApi.event_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(create_event_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Event'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Bulk Create events
+ # Bulk creates new events.
+ # @param tenant [String] The tenant id
+ # @param bulk_create_event_request [BulkCreateEventRequest] The events to create
+ # @param [Hash] opts the optional parameters
+ # @return [Events]
+ def event_create_bulk(tenant, bulk_create_event_request, opts = {})
+ data, _status_code, _headers = event_create_bulk_with_http_info(tenant, bulk_create_event_request, opts)
+ data
+ end
+
+ # Bulk Create events
+ # Bulk creates new events.
+ # @param tenant [String] The tenant id
+ # @param bulk_create_event_request [BulkCreateEventRequest] The events to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Events, Integer, Hash)>] Events data, response status code and response headers
+ def event_create_bulk_with_http_info(tenant, bulk_create_event_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_create_bulk ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_create_bulk"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_create_bulk, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_create_bulk, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'bulk_create_event_request' is set
+ if @api_client.config.client_side_validation && bulk_create_event_request.nil?
+ fail ArgumentError, "Missing the required parameter 'bulk_create_event_request' when calling EventApi.event_create_bulk"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events/bulk'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(bulk_create_event_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Events'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_create_bulk",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_create_bulk\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get event data
+ # Get the data for an event.
+ # @param event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [EventData]
+ def event_data_get(event, opts = {})
+ data, _status_code, _headers = event_data_get_with_http_info(event, opts)
+ data
+ end
+
+ # Get event data
+ # Get the data for an event.
+ # @param event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventData, Integer, Hash)>] EventData data, response status code and response headers
+ def event_data_get_with_http_info(event, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_data_get ...'
+ end
+ # verify the required parameter 'event' is set
+ if @api_client.config.client_side_validation && event.nil?
+ fail ArgumentError, "Missing the required parameter 'event' when calling EventApi.event_data_get"
+ end
+ if @api_client.config.client_side_validation && event.to_s.length > 36
+ fail ArgumentError, 'invalid value for "event" when calling EventApi.event_data_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && event.to_s.length < 36
+ fail ArgumentError, 'invalid value for "event" when calling EventApi.event_data_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/events/{event}/data'.sub('{' + 'event' + '}', CGI.escape(event.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventData'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_data_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_data_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get event data
+ # Get the data for an event.
+ # @param event_with_tenant [String] The event id
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [EventData]
+ def event_data_get_with_tenant(event_with_tenant, tenant, opts = {})
+ data, _status_code, _headers = event_data_get_with_tenant_with_http_info(event_with_tenant, tenant, opts)
+ data
+ end
+
+ # Get event data
+ # Get the data for an event.
+ # @param event_with_tenant [String] The event id
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventData, Integer, Hash)>] EventData data, response status code and response headers
+ def event_data_get_with_tenant_with_http_info(event_with_tenant, tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_data_get_with_tenant ...'
+ end
+ # verify the required parameter 'event_with_tenant' is set
+ if @api_client.config.client_side_validation && event_with_tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'event_with_tenant' when calling EventApi.event_data_get_with_tenant"
+ end
+ if @api_client.config.client_side_validation && event_with_tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "event_with_tenant" when calling EventApi.event_data_get_with_tenant, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && event_with_tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "event_with_tenant" when calling EventApi.event_data_get_with_tenant, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_data_get_with_tenant"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_data_get_with_tenant, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_data_get_with_tenant, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events/{event-with-tenant}/data'.sub('{' + 'event-with-tenant' + '}', CGI.escape(event_with_tenant.to_s)).sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventData'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_data_get_with_tenant",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_data_get_with_tenant\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get event data
+ # Get an event.
+ # @param event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [Event]
+ def event_get(event, opts = {})
+ data, _status_code, _headers = event_get_with_http_info(event, opts)
+ data
+ end
+
+ # Get event data
+ # Get an event.
+ # @param event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Event, Integer, Hash)>] Event data, response status code and response headers
+ def event_get_with_http_info(event, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_get ...'
+ end
+ # verify the required parameter 'event' is set
+ if @api_client.config.client_side_validation && event.nil?
+ fail ArgumentError, "Missing the required parameter 'event' when calling EventApi.event_get"
+ end
+ if @api_client.config.client_side_validation && event.to_s.length > 36
+ fail ArgumentError, 'invalid value for "event" when calling EventApi.event_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && event.to_s.length < 36
+ fail ArgumentError, 'invalid value for "event" when calling EventApi.event_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/events/{event}'.sub('{' + 'event' + '}', CGI.escape(event.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Event'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List event keys
+ # Lists all event keys for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [EventKeyList]
+ def event_key_list(tenant, opts = {})
+ data, _status_code, _headers = event_key_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List event keys
+ # Lists all event keys for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventKeyList, Integer, Hash)>] EventKeyList data, response status code and response headers
+ def event_key_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_key_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_key_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_key_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_key_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events/keys'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventKeyList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_key_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_key_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List events
+ # Lists all events for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :keys A list of keys to filter by
+ # @option opts [Array] :workflows A list of workflow IDs to filter by
+ # @option opts [Array] :statuses A list of workflow run statuses to filter by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [EventOrderByField] :order_by_field What to order by
+ # @option opts [EventOrderByDirection] :order_by_direction The order direction
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Array] :event_ids A list of event ids to filter by
+ # @return [EventList]
+ def event_list(tenant, opts = {})
+ data, _status_code, _headers = event_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List events
+ # Lists all events for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :keys A list of keys to filter by
+ # @option opts [Array] :workflows A list of workflow IDs to filter by
+ # @option opts [Array] :statuses A list of workflow run statuses to filter by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [EventOrderByField] :order_by_field What to order by
+ # @option opts [EventOrderByDirection] :order_by_direction The order direction
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Array] :event_ids A list of event ids to filter by
+ # @return [Array<(EventList, Integer, Hash)>] EventList data, response status code and response headers
+ def event_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'keys'] = @api_client.build_collection_param(opts[:'keys'], :multi) if !opts[:'keys'].nil?
+ query_params[:'workflows'] = @api_client.build_collection_param(opts[:'workflows'], :multi) if !opts[:'workflows'].nil?
+ query_params[:'statuses'] = @api_client.build_collection_param(opts[:'statuses'], :multi) if !opts[:'statuses'].nil?
+ query_params[:'search'] = opts[:'search'] if !opts[:'search'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'eventIds'] = @api_client.build_collection_param(opts[:'event_ids'], :multi) if !opts[:'event_ids'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Replay events
+ # Cancels all runs for a list of events.
+ # @param tenant [String] The tenant id
+ # @param cancel_event_request [CancelEventRequest] The event ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [EventUpdateCancel200Response]
+ def event_update_cancel(tenant, cancel_event_request, opts = {})
+ data, _status_code, _headers = event_update_cancel_with_http_info(tenant, cancel_event_request, opts)
+ data
+ end
+
+ # Replay events
+ # Cancels all runs for a list of events.
+ # @param tenant [String] The tenant id
+ # @param cancel_event_request [CancelEventRequest] The event ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventUpdateCancel200Response, Integer, Hash)>] EventUpdateCancel200Response data, response status code and response headers
+ def event_update_cancel_with_http_info(tenant, cancel_event_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_update_cancel ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_update_cancel"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_update_cancel, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_update_cancel, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'cancel_event_request' is set
+ if @api_client.config.client_side_validation && cancel_event_request.nil?
+ fail ArgumentError, "Missing the required parameter 'cancel_event_request' when calling EventApi.event_update_cancel"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events/cancel'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(cancel_event_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventUpdateCancel200Response'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_update_cancel",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_update_cancel\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Replay events
+ # Replays a list of events.
+ # @param tenant [String] The tenant id
+ # @param replay_event_request [ReplayEventRequest] The event ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [EventList]
+ def event_update_replay(tenant, replay_event_request, opts = {})
+ data, _status_code, _headers = event_update_replay_with_http_info(tenant, replay_event_request, opts)
+ data
+ end
+
+ # Replay events
+ # Replays a list of events.
+ # @param tenant [String] The tenant id
+ # @param replay_event_request [ReplayEventRequest] The event ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventList, Integer, Hash)>] EventList data, response status code and response headers
+ def event_update_replay_with_http_info(tenant, replay_event_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.event_update_replay ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.event_update_replay"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_update_replay, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.event_update_replay, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'replay_event_request' is set
+ if @api_client.config.client_side_validation && replay_event_request.nil?
+ fail ArgumentError, "Missing the required parameter 'replay_event_request' when calling EventApi.event_update_replay"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/events/replay'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(replay_event_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.event_update_replay",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#event_update_replay\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get events
+ # Get an event by its id
+ # @param tenant [String] The tenant id
+ # @param v1_event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [V1Event]
+ def v1_event_get(tenant, v1_event, opts = {})
+ data, _status_code, _headers = v1_event_get_with_http_info(tenant, v1_event, opts)
+ data
+ end
+
+ # Get events
+ # Get an event by its id
+ # @param tenant [String] The tenant id
+ # @param v1_event [String] The event id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Event, Integer, Hash)>] V1Event data, response status code and response headers
+ def v1_event_get_with_http_info(tenant, v1_event, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.v1_event_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.v1_event_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_event' is set
+ if @api_client.config.client_side_validation && v1_event.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_event' when calling EventApi.v1_event_get"
+ end
+ if @api_client.config.client_side_validation && v1_event.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_event" when calling EventApi.v1_event_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_event.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_event" when calling EventApi.v1_event_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/events/{v1-event}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-event' + '}', CGI.escape(v1_event.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Event'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.v1_event_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#v1_event_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List event keys
+ # Lists all event keys for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [EventKeyList]
+ def v1_event_key_list(tenant, opts = {})
+ data, _status_code, _headers = v1_event_key_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List event keys
+ # Lists all event keys for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventKeyList, Integer, Hash)>] EventKeyList data, response status code and response headers
+ def v1_event_key_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.v1_event_key_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.v1_event_key_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_key_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_key_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/events/keys'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventKeyList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.v1_event_key_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#v1_event_key_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List events
+ # Lists all events for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :keys A list of keys to filter by
+ # @option opts [Time] :since Consider events that occurred after this time
+ # @option opts [Time] :_until Consider events that occurred before this time
+ # @option opts [Array] :workflow_ids Filter to events that are associated with a specific workflow run
+ # @option opts [Array] :workflow_run_statuses Filter to events that are associated with workflow runs matching a certain status
+ # @option opts [Array] :event_ids Filter to specific events by their ids
+ # @option opts [Array] :additional_metadata Filter by additional metadata on the events
+ # @option opts [Array] :scopes The scopes to filter by
+ # @return [V1EventList]
+ def v1_event_list(tenant, opts = {})
+ data, _status_code, _headers = v1_event_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List events
+ # Lists all events for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :keys A list of keys to filter by
+ # @option opts [Time] :since Consider events that occurred after this time
+ # @option opts [Time] :_until Consider events that occurred before this time
+ # @option opts [Array] :workflow_ids Filter to events that are associated with a specific workflow run
+ # @option opts [Array] :workflow_run_statuses Filter to events that are associated with workflow runs matching a certain status
+ # @option opts [Array] :event_ids Filter to specific events by their ids
+ # @option opts [Array] :additional_metadata Filter by additional metadata on the events
+ # @option opts [Array] :scopes The scopes to filter by
+ # @return [Array<(V1EventList, Integer, Hash)>] V1EventList data, response status code and response headers
+ def v1_event_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: EventApi.v1_event_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling EventApi.v1_event_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling EventApi.v1_event_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/events'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'keys'] = @api_client.build_collection_param(opts[:'keys'], :multi) if !opts[:'keys'].nil?
+ query_params[:'since'] = opts[:'since'] if !opts[:'since'].nil?
+ query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?
+ query_params[:'workflowIds'] = @api_client.build_collection_param(opts[:'workflow_ids'], :multi) if !opts[:'workflow_ids'].nil?
+ query_params[:'workflowRunStatuses'] = @api_client.build_collection_param(opts[:'workflow_run_statuses'], :multi) if !opts[:'workflow_run_statuses'].nil?
+ query_params[:'eventIds'] = @api_client.build_collection_param(opts[:'event_ids'], :multi) if !opts[:'event_ids'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'scopes'] = @api_client.build_collection_param(opts[:'scopes'], :multi) if !opts[:'scopes'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1EventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"EventApi.v1_event_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: EventApi#v1_event_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/filter_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/filter_api.rb
new file mode 100644
index 000000000..7c3e02357
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/filter_api.rb
@@ -0,0 +1,449 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class FilterApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create a filter
+ # Create a new filter
+ # @param tenant [String] The tenant id
+ # @param v1_create_filter_request [V1CreateFilterRequest] The input to the filter creation
+ # @param [Hash] opts the optional parameters
+ # @return [V1Filter]
+ def v1_filter_create(tenant, v1_create_filter_request, opts = {})
+ data, _status_code, _headers = v1_filter_create_with_http_info(tenant, v1_create_filter_request, opts)
+ data
+ end
+
+ # Create a filter
+ # Create a new filter
+ # @param tenant [String] The tenant id
+ # @param v1_create_filter_request [V1CreateFilterRequest] The input to the filter creation
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Filter, Integer, Hash)>] V1Filter data, response status code and response headers
+ def v1_filter_create_with_http_info(tenant, v1_create_filter_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: FilterApi.v1_filter_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling FilterApi.v1_filter_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_create_filter_request' is set
+ if @api_client.config.client_side_validation && v1_create_filter_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_create_filter_request' when calling FilterApi.v1_filter_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/filters'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_create_filter_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Filter'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"FilterApi.v1_filter_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: FilterApi#v1_filter_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete a filter
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id to delete
+ # @param [Hash] opts the optional parameters
+ # @return [V1Filter]
+ def v1_filter_delete(tenant, v1_filter, opts = {})
+ data, _status_code, _headers = v1_filter_delete_with_http_info(tenant, v1_filter, opts)
+ data
+ end
+
+ # Delete a filter
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id to delete
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Filter, Integer, Hash)>] V1Filter data, response status code and response headers
+ def v1_filter_delete_with_http_info(tenant, v1_filter, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: FilterApi.v1_filter_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling FilterApi.v1_filter_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_filter' is set
+ if @api_client.config.client_side_validation && v1_filter.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_filter' when calling FilterApi.v1_filter_delete"
+ end
+ if @api_client.config.client_side_validation && v1_filter.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_filter.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/filters/{v1-filter}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-filter' + '}', CGI.escape(v1_filter.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Filter'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"FilterApi.v1_filter_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: FilterApi#v1_filter_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get a filter
+ # Get a filter by its id
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id
+ # @param [Hash] opts the optional parameters
+ # @return [V1Filter]
+ def v1_filter_get(tenant, v1_filter, opts = {})
+ data, _status_code, _headers = v1_filter_get_with_http_info(tenant, v1_filter, opts)
+ data
+ end
+
+ # Get a filter
+ # Get a filter by its id
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Filter, Integer, Hash)>] V1Filter data, response status code and response headers
+ def v1_filter_get_with_http_info(tenant, v1_filter, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: FilterApi.v1_filter_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling FilterApi.v1_filter_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_filter' is set
+ if @api_client.config.client_side_validation && v1_filter.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_filter' when calling FilterApi.v1_filter_get"
+ end
+ if @api_client.config.client_side_validation && v1_filter.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_filter.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/filters/{v1-filter}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-filter' + '}', CGI.escape(v1_filter.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Filter'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"FilterApi.v1_filter_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: FilterApi#v1_filter_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List filters
+ # Lists all filters for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :workflow_ids The workflow ids to filter by
+ # @option opts [Array] :scopes The scopes to subset candidate filters by
+ # @return [V1FilterList]
+ def v1_filter_list(tenant, opts = {})
+ data, _status_code, _headers = v1_filter_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List filters
+ # Lists all filters for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :workflow_ids The workflow ids to filter by
+ # @option opts [Array] :scopes The scopes to subset candidate filters by
+ # @return [Array<(V1FilterList, Integer, Hash)>] V1FilterList data, response status code and response headers
+ def v1_filter_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: FilterApi.v1_filter_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling FilterApi.v1_filter_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/filters'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'workflowIds'] = @api_client.build_collection_param(opts[:'workflow_ids'], :multi) if !opts[:'workflow_ids'].nil?
+ query_params[:'scopes'] = @api_client.build_collection_param(opts[:'scopes'], :multi) if !opts[:'scopes'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1FilterList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"FilterApi.v1_filter_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: FilterApi#v1_filter_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update a filter
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id to update
+ # @param v1_update_filter_request [V1UpdateFilterRequest] The input to the filter update
+ # @param [Hash] opts the optional parameters
+ # @return [V1Filter]
+ def v1_filter_update(tenant, v1_filter, v1_update_filter_request, opts = {})
+ data, _status_code, _headers = v1_filter_update_with_http_info(tenant, v1_filter, v1_update_filter_request, opts)
+ data
+ end
+
+ # Update a filter
+ # @param tenant [String] The tenant id
+ # @param v1_filter [String] The filter id to update
+ # @param v1_update_filter_request [V1UpdateFilterRequest] The input to the filter update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Filter, Integer, Hash)>] V1Filter data, response status code and response headers
+ def v1_filter_update_with_http_info(tenant, v1_filter, v1_update_filter_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: FilterApi.v1_filter_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling FilterApi.v1_filter_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling FilterApi.v1_filter_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_filter' is set
+ if @api_client.config.client_side_validation && v1_filter.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_filter' when calling FilterApi.v1_filter_update"
+ end
+ if @api_client.config.client_side_validation && v1_filter.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_filter.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_filter" when calling FilterApi.v1_filter_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_update_filter_request' is set
+ if @api_client.config.client_side_validation && v1_update_filter_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_update_filter_request' when calling FilterApi.v1_filter_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/filters/{v1-filter}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-filter' + '}', CGI.escape(v1_filter.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_update_filter_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Filter'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"FilterApi.v1_filter_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: FilterApi#v1_filter_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/github_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/github_api.rb
new file mode 100644
index 000000000..2ed96b651
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/github_api.rb
@@ -0,0 +1,107 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class GithubApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Github app tenant webhook
+ # SNS event
+ # @param tenant [String] The tenant id
+ # @param event [String] The event key
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def sns_update(tenant, event, opts = {})
+ sns_update_with_http_info(tenant, event, opts)
+ nil
+ end
+
+ # Github app tenant webhook
+ # SNS event
+ # @param tenant [String] The tenant id
+ # @param event [String] The event key
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def sns_update_with_http_info(tenant, event, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: GithubApi.sns_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling GithubApi.sns_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling GithubApi.sns_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling GithubApi.sns_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'event' is set
+ if @api_client.config.client_side_validation && event.nil?
+ fail ArgumentError, "Missing the required parameter 'event' when calling GithubApi.sns_update"
+ end
+ if @api_client.config.client_side_validation && event.to_s.length > 255
+ fail ArgumentError, 'invalid value for "event" when calling GithubApi.sns_update, the character length must be smaller than or equal to 255.'
+ end
+
+ if @api_client.config.client_side_validation && event.to_s.length < 1
+ fail ArgumentError, 'invalid value for "event" when calling GithubApi.sns_update, the character length must be greater than or equal to 1.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/sns/{tenant}/{event}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'event' + '}', CGI.escape(event.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"GithubApi.sns_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: GithubApi#sns_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/healthcheck_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/healthcheck_api.rb
new file mode 100644
index 000000000..316396be9
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/healthcheck_api.rb
@@ -0,0 +1,136 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class HealthcheckApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Get liveness
+ # Gets the liveness status
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def liveness_get(opts = {})
+ liveness_get_with_http_info(opts)
+ nil
+ end
+
+ # Get liveness
+ # Gets the liveness status
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def liveness_get_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: HealthcheckApi.liveness_get ...'
+ end
+ # resource path
+ local_var_path = '/api/live'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"HealthcheckApi.liveness_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: HealthcheckApi#liveness_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get readiness
+ # Gets the readiness status
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def readiness_get(opts = {})
+ readiness_get_with_http_info(opts)
+ nil
+ end
+
+ # Get readiness
+ # Gets the readiness status
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def readiness_get_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: HealthcheckApi.readiness_get ...'
+ end
+ # resource path
+ local_var_path = '/api/ready'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"HealthcheckApi.readiness_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: HealthcheckApi#readiness_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/log_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/log_api.rb
new file mode 100644
index 000000000..5c68c9645
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/log_api.rb
@@ -0,0 +1,203 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class LogApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # List log lines
+ # Lists log lines for a step run.
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :levels A list of levels to filter by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [LogLineOrderByField] :order_by_field What to order by
+ # @option opts [LogLineOrderByDirection] :order_by_direction The order direction
+ # @return [LogLineList]
+ def log_line_list(step_run, opts = {})
+ data, _status_code, _headers = log_line_list_with_http_info(step_run, opts)
+ data
+ end
+
+ # List log lines
+ # Lists log lines for a step run.
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :levels A list of levels to filter by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [LogLineOrderByField] :order_by_field What to order by
+ # @option opts [LogLineOrderByDirection] :order_by_direction The order direction
+ # @return [Array<(LogLineList, Integer, Hash)>] LogLineList data, response status code and response headers
+ def log_line_list_with_http_info(step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: LogApi.log_line_list ...'
+ end
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling LogApi.log_line_list"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling LogApi.log_line_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling LogApi.log_line_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/step-runs/{step-run}/logs'.sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'levels'] = @api_client.build_collection_param(opts[:'levels'], :multi) if !opts[:'levels'].nil?
+ query_params[:'search'] = opts[:'search'] if !opts[:'search'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'LogLineList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"LogApi.log_line_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: LogApi#log_line_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List log lines
+ # Lists log lines for a task
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Time] :since The start time to get logs for
+ # @option opts [Time] :_until The end time to get logs for
+ # @option opts [String] :search A full-text search query to filter for
+ # @option opts [Array] :levels The log level(s) to include
+ # @option opts [V1LogLineOrderByDirection] :order_by_direction The direction to order by
+ # @option opts [Integer] :attempt The attempt number to filter for
+ # @return [V1LogLineList]
+ def v1_log_line_list(task, opts = {})
+ data, _status_code, _headers = v1_log_line_list_with_http_info(task, opts)
+ data
+ end
+
+ # List log lines
+ # Lists log lines for a task
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Time] :since The start time to get logs for
+ # @option opts [Time] :_until The end time to get logs for
+ # @option opts [String] :search A full-text search query to filter for
+ # @option opts [Array] :levels The log level(s) to include
+ # @option opts [V1LogLineOrderByDirection] :order_by_direction The direction to order by
+ # @option opts [Integer] :attempt The attempt number to filter for
+ # @return [Array<(V1LogLineList, Integer, Hash)>] V1LogLineList data, response status code and response headers
+ def v1_log_line_list_with_http_info(task, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: LogApi.v1_log_line_list ...'
+ end
+ # verify the required parameter 'task' is set
+ if @api_client.config.client_side_validation && task.nil?
+ fail ArgumentError, "Missing the required parameter 'task' when calling LogApi.v1_log_line_list"
+ end
+ if @api_client.config.client_side_validation && task.to_s.length > 36
+ fail ArgumentError, 'invalid value for "task" when calling LogApi.v1_log_line_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && task.to_s.length < 36
+ fail ArgumentError, 'invalid value for "task" when calling LogApi.v1_log_line_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tasks/{task}/logs'.sub('{' + 'task' + '}', CGI.escape(task.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'since'] = opts[:'since'] if !opts[:'since'].nil?
+ query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?
+ query_params[:'search'] = opts[:'search'] if !opts[:'search'].nil?
+ query_params[:'levels'] = @api_client.build_collection_param(opts[:'levels'], :multi) if !opts[:'levels'].nil?
+ query_params[:'order_by_direction'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+ query_params[:'attempt'] = opts[:'attempt'] if !opts[:'attempt'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1LogLineList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"LogApi.v1_log_line_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: LogApi#v1_log_line_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/metadata_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/metadata_api.rb
new file mode 100644
index 000000000..1a1f4bdaa
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/metadata_api.rb
@@ -0,0 +1,193 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class MetadataApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Get cloud metadata
+ # Gets metadata for the Hatchet cloud instance
+ # @param [Hash] opts the optional parameters
+ # @return [APIErrors]
+ def cloud_metadata_get(opts = {})
+ data, _status_code, _headers = cloud_metadata_get_with_http_info(opts)
+ data
+ end
+
+ # Get cloud metadata
+ # Gets metadata for the Hatchet cloud instance
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(APIErrors, Integer, Hash)>] APIErrors data, response status code and response headers
+ def cloud_metadata_get_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: MetadataApi.cloud_metadata_get ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/cloud/metadata'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'APIErrors'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"MetadataApi.cloud_metadata_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: MetadataApi#cloud_metadata_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get metadata
+ # Gets metadata for the Hatchet instance
+ # @param [Hash] opts the optional parameters
+ # @return [APIMeta]
+ def metadata_get(opts = {})
+ data, _status_code, _headers = metadata_get_with_http_info(opts)
+ data
+ end
+
+ # Get metadata
+ # Gets metadata for the Hatchet instance
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(APIMeta, Integer, Hash)>] APIMeta data, response status code and response headers
+ def metadata_get_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: MetadataApi.metadata_get ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/meta'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'APIMeta'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"MetadataApi.metadata_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: MetadataApi#metadata_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List integrations
+ # List all integrations
+ # @param [Hash] opts the optional parameters
+ # @return [Array]
+ def metadata_list_integrations(opts = {})
+ data, _status_code, _headers = metadata_list_integrations_with_http_info(opts)
+ data
+ end
+
+ # List integrations
+ # List all integrations
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Array, Integer, Hash)>] Array data, response status code and response headers
+ def metadata_list_integrations_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: MetadataApi.metadata_list_integrations ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/meta/integrations'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Array'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"MetadataApi.metadata_list_integrations",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: MetadataApi#metadata_list_integrations\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/rate_limits_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/rate_limits_api.rb
new file mode 100644
index 000000000..b575e92ce
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/rate_limits_api.rb
@@ -0,0 +1,108 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class RateLimitsApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # List rate limits
+ # Lists all rate limits for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [RateLimitOrderByField] :order_by_field What to order by
+ # @option opts [RateLimitOrderByDirection] :order_by_direction The order direction
+ # @return [RateLimitList]
+ def rate_limit_list(tenant, opts = {})
+ data, _status_code, _headers = rate_limit_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List rate limits
+ # Lists all rate limits for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :search The search query to filter for
+ # @option opts [RateLimitOrderByField] :order_by_field What to order by
+ # @option opts [RateLimitOrderByDirection] :order_by_direction The order direction
+ # @return [Array<(RateLimitList, Integer, Hash)>] RateLimitList data, response status code and response headers
+ def rate_limit_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: RateLimitsApi.rate_limit_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling RateLimitsApi.rate_limit_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling RateLimitsApi.rate_limit_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling RateLimitsApi.rate_limit_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/rate-limits'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'search'] = opts[:'search'] if !opts[:'search'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'RateLimitList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"RateLimitsApi.rate_limit_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: RateLimitsApi#rate_limit_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/slack_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/slack_api.rb
new file mode 100644
index 000000000..7e9df2b55
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/slack_api.rb
@@ -0,0 +1,164 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class SlackApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Delete Slack webhook
+ # Delete Slack webhook
+ # @param slack [String] The Slack webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def slack_webhook_delete(slack, opts = {})
+ slack_webhook_delete_with_http_info(slack, opts)
+ nil
+ end
+
+ # Delete Slack webhook
+ # Delete Slack webhook
+ # @param slack [String] The Slack webhook id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def slack_webhook_delete_with_http_info(slack, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: SlackApi.slack_webhook_delete ...'
+ end
+ # verify the required parameter 'slack' is set
+ if @api_client.config.client_side_validation && slack.nil?
+ fail ArgumentError, "Missing the required parameter 'slack' when calling SlackApi.slack_webhook_delete"
+ end
+ if @api_client.config.client_side_validation && slack.to_s.length > 36
+ fail ArgumentError, 'invalid value for "slack" when calling SlackApi.slack_webhook_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && slack.to_s.length < 36
+ fail ArgumentError, 'invalid value for "slack" when calling SlackApi.slack_webhook_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/slack/{slack}'.sub('{' + 'slack' + '}', CGI.escape(slack.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"SlackApi.slack_webhook_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: SlackApi#slack_webhook_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List Slack integrations
+ # List Slack webhooks
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [ListSlackWebhooks]
+ def slack_webhook_list(tenant, opts = {})
+ data, _status_code, _headers = slack_webhook_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List Slack integrations
+ # List Slack webhooks
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ListSlackWebhooks, Integer, Hash)>] ListSlackWebhooks data, response status code and response headers
+ def slack_webhook_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: SlackApi.slack_webhook_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling SlackApi.slack_webhook_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SlackApi.slack_webhook_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SlackApi.slack_webhook_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/slack'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ListSlackWebhooks'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"SlackApi.slack_webhook_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: SlackApi#slack_webhook_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/sns_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/sns_api.rb
new file mode 100644
index 000000000..14132ce1c
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/sns_api.rb
@@ -0,0 +1,242 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class SNSApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create SNS integration
+ # Create SNS integration
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [CreateSNSIntegrationRequest] :create_sns_integration_request
+ # @return [SNSIntegration]
+ def sns_create(tenant, opts = {})
+ data, _status_code, _headers = sns_create_with_http_info(tenant, opts)
+ data
+ end
+
+ # Create SNS integration
+ # Create SNS integration
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [CreateSNSIntegrationRequest] :create_sns_integration_request
+ # @return [Array<(SNSIntegration, Integer, Hash)>] SNSIntegration data, response status code and response headers
+ def sns_create_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: SNSApi.sns_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling SNSApi.sns_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SNSApi.sns_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SNSApi.sns_create, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/sns'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'create_sns_integration_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'SNSIntegration'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"SNSApi.sns_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: SNSApi#sns_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete SNS integration
+ # Delete SNS integration
+ # @param sns [String] The SNS integration id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def sns_delete(sns, opts = {})
+ sns_delete_with_http_info(sns, opts)
+ nil
+ end
+
+ # Delete SNS integration
+ # Delete SNS integration
+ # @param sns [String] The SNS integration id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def sns_delete_with_http_info(sns, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: SNSApi.sns_delete ...'
+ end
+ # verify the required parameter 'sns' is set
+ if @api_client.config.client_side_validation && sns.nil?
+ fail ArgumentError, "Missing the required parameter 'sns' when calling SNSApi.sns_delete"
+ end
+ if @api_client.config.client_side_validation && sns.to_s.length > 36
+ fail ArgumentError, 'invalid value for "sns" when calling SNSApi.sns_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && sns.to_s.length < 36
+ fail ArgumentError, 'invalid value for "sns" when calling SNSApi.sns_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/sns/{sns}'.sub('{' + 'sns' + '}', CGI.escape(sns.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"SNSApi.sns_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: SNSApi#sns_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List SNS integrations
+ # List SNS integrations
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [ListSNSIntegrations]
+ def sns_list(tenant, opts = {})
+ data, _status_code, _headers = sns_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List SNS integrations
+ # List SNS integrations
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ListSNSIntegrations, Integer, Hash)>] ListSNSIntegrations data, response status code and response headers
+ def sns_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: SNSApi.sns_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling SNSApi.sns_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SNSApi.sns_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling SNSApi.sns_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/sns'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ListSNSIntegrations'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"SNSApi.sns_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: SNSApi#sns_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/step_run_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/step_run_api.rb
new file mode 100644
index 000000000..4407f26f8
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/step_run_api.rb
@@ -0,0 +1,615 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class StepRunApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Get step run
+ # Get a step run by id
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [StepRun]
+ def step_run_get(tenant, step_run, opts = {})
+ data, _status_code, _headers = step_run_get_with_http_info(tenant, step_run, opts)
+ data
+ end
+
+ # Get step run
+ # Get a step run by id
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(StepRun, Integer, Hash)>] StepRun data, response status code and response headers
+ def step_run_get_with_http_info(tenant, step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling StepRunApi.step_run_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_get"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/step-runs/{step-run}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRun'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get step run schema
+ # Get the schema for a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [Object]
+ def step_run_get_schema(tenant, step_run, opts = {})
+ data, _status_code, _headers = step_run_get_schema_with_http_info(tenant, step_run, opts)
+ data
+ end
+
+ # Get step run schema
+ # Get the schema for a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Object, Integer, Hash)>] Object data, response status code and response headers
+ def step_run_get_schema_with_http_info(tenant, step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_get_schema ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling StepRunApi.step_run_get_schema"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_get_schema, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_get_schema, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_get_schema"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_get_schema, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_get_schema, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/step-runs/{step-run}/schema'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Object'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_get_schema",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_get_schema\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List archives for step run
+ # List archives for a step run
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [StepRunArchiveList]
+ def step_run_list_archives(step_run, opts = {})
+ data, _status_code, _headers = step_run_list_archives_with_http_info(step_run, opts)
+ data
+ end
+
+ # List archives for step run
+ # List archives for a step run
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [Array<(StepRunArchiveList, Integer, Hash)>] StepRunArchiveList data, response status code and response headers
+ def step_run_list_archives_with_http_info(step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_list_archives ...'
+ end
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_list_archives"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_list_archives, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_list_archives, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/step-runs/{step-run}/archives'.sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRunArchiveList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_list_archives",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_list_archives\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List events for step run
+ # List events for a step run
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [StepRunEventList]
+ def step_run_list_events(step_run, opts = {})
+ data, _status_code, _headers = step_run_list_events_with_http_info(step_run, opts)
+ data
+ end
+
+ # List events for step run
+ # List events for a step run
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [Array<(StepRunEventList, Integer, Hash)>] StepRunEventList data, response status code and response headers
+ def step_run_list_events_with_http_info(step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_list_events ...'
+ end
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_list_events"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_list_events, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_list_events, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/step-runs/{step-run}/events'.sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRunEventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_list_events",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_list_events\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Attempts to cancel a step run
+ # Attempts to cancel a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [StepRun]
+ def step_run_update_cancel(tenant, step_run, opts = {})
+ data, _status_code, _headers = step_run_update_cancel_with_http_info(tenant, step_run, opts)
+ data
+ end
+
+ # Attempts to cancel a step run
+ # Attempts to cancel a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(StepRun, Integer, Hash)>] StepRun data, response status code and response headers
+ def step_run_update_cancel_with_http_info(tenant, step_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_update_cancel ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling StepRunApi.step_run_update_cancel"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_update_cancel, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_update_cancel, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_update_cancel"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_update_cancel, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_update_cancel, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/step-runs/{step-run}/cancel'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRun'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_update_cancel",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_update_cancel\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Rerun step run
+ # Reruns a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param rerun_step_run_request [RerunStepRunRequest] The input to the rerun
+ # @param [Hash] opts the optional parameters
+ # @return [StepRun]
+ def step_run_update_rerun(tenant, step_run, rerun_step_run_request, opts = {})
+ data, _status_code, _headers = step_run_update_rerun_with_http_info(tenant, step_run, rerun_step_run_request, opts)
+ data
+ end
+
+ # Rerun step run
+ # Reruns a step run
+ # @param tenant [String] The tenant id
+ # @param step_run [String] The step run id
+ # @param rerun_step_run_request [RerunStepRunRequest] The input to the rerun
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(StepRun, Integer, Hash)>] StepRun data, response status code and response headers
+ def step_run_update_rerun_with_http_info(tenant, step_run, rerun_step_run_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.step_run_update_rerun ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling StepRunApi.step_run_update_rerun"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_update_rerun, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.step_run_update_rerun, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'step_run' is set
+ if @api_client.config.client_side_validation && step_run.nil?
+ fail ArgumentError, "Missing the required parameter 'step_run' when calling StepRunApi.step_run_update_rerun"
+ end
+ if @api_client.config.client_side_validation && step_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_update_rerun, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && step_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "step_run" when calling StepRunApi.step_run_update_rerun, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'rerun_step_run_request' is set
+ if @api_client.config.client_side_validation && rerun_step_run_request.nil?
+ fail ArgumentError, "Missing the required parameter 'rerun_step_run_request' when calling StepRunApi.step_run_update_rerun"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/step-runs/{step-run}/rerun'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'step-run' + '}', CGI.escape(step_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(rerun_step_run_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRun'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.step_run_update_rerun",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#step_run_update_rerun\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List events for all step runs for a workflow run
+ # List events for all step runs for a workflow run
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :last_id Last ID of the last event
+ # @return [StepRunEventList]
+ def workflow_run_list_step_run_events(tenant, workflow_run, opts = {})
+ data, _status_code, _headers = workflow_run_list_step_run_events_with_http_info(tenant, workflow_run, opts)
+ data
+ end
+
+ # List events for all step runs for a workflow run
+ # List events for all step runs for a workflow run
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :last_id Last ID of the last event
+ # @return [Array<(StepRunEventList, Integer, Hash)>] StepRunEventList data, response status code and response headers
+ def workflow_run_list_step_run_events_with_http_info(tenant, workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: StepRunApi.workflow_run_list_step_run_events ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling StepRunApi.workflow_run_list_step_run_events"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.workflow_run_list_step_run_events, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling StepRunApi.workflow_run_list_step_run_events, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_run' is set
+ if @api_client.config.client_side_validation && workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_run' when calling StepRunApi.workflow_run_list_step_run_events"
+ end
+ if @api_client.config.client_side_validation && workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling StepRunApi.workflow_run_list_step_run_events, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling StepRunApi.workflow_run_list_step_run_events, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/step-run-events'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow-run' + '}', CGI.escape(workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'lastId'] = opts[:'last_id'] if !opts[:'last_id'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'StepRunEventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"StepRunApi.workflow_run_list_step_run_events",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: StepRunApi#workflow_run_list_step_run_events\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/task_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/task_api.rb
new file mode 100644
index 000000000..f8097885e
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/task_api.rb
@@ -0,0 +1,602 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class TaskApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # List tasks
+ # Lists all tasks that belong a specific list of dags
+ # @param dag_ids [Array] The external id of the DAG
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array]
+ def v1_dag_list_tasks(dag_ids, tenant, opts = {})
+ data, _status_code, _headers = v1_dag_list_tasks_with_http_info(dag_ids, tenant, opts)
+ data
+ end
+
+ # List tasks
+ # Lists all tasks that belong a specific list of dags
+ # @param dag_ids [Array] The external id of the DAG
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Array, Integer, Hash)>] Array data, response status code and response headers
+ def v1_dag_list_tasks_with_http_info(dag_ids, tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_dag_list_tasks ...'
+ end
+ # verify the required parameter 'dag_ids' is set
+ if @api_client.config.client_side_validation && dag_ids.nil?
+ fail ArgumentError, "Missing the required parameter 'dag_ids' when calling TaskApi.v1_dag_list_tasks"
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TaskApi.v1_dag_list_tasks"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_dag_list_tasks, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_dag_list_tasks, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/dags/tasks'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'dag_ids'] = @api_client.build_collection_param(dag_ids, :multi)
+ query_params[:'tenant'] = tenant
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Array'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_dag_list_tasks",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_dag_list_tasks\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Cancel tasks
+ # Cancel tasks
+ # @param tenant [String] The tenant id
+ # @param v1_cancel_task_request [V1CancelTaskRequest] The tasks to cancel
+ # @param [Hash] opts the optional parameters
+ # @return [V1CancelledTasks]
+ def v1_task_cancel(tenant, v1_cancel_task_request, opts = {})
+ data, _status_code, _headers = v1_task_cancel_with_http_info(tenant, v1_cancel_task_request, opts)
+ data
+ end
+
+ # Cancel tasks
+ # Cancel tasks
+ # @param tenant [String] The tenant id
+ # @param v1_cancel_task_request [V1CancelTaskRequest] The tasks to cancel
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1CancelledTasks, Integer, Hash)>] V1CancelledTasks data, response status code and response headers
+ def v1_task_cancel_with_http_info(tenant, v1_cancel_task_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_cancel ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TaskApi.v1_task_cancel"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_cancel, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_cancel, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_cancel_task_request' is set
+ if @api_client.config.client_side_validation && v1_cancel_task_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_cancel_task_request' when calling TaskApi.v1_task_cancel"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/tasks/cancel'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_cancel_task_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1CancelledTasks'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_cancel",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_cancel\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List events for a task
+ # List events for a task
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [V1TaskEventList]
+ def v1_task_event_list(task, opts = {})
+ data, _status_code, _headers = v1_task_event_list_with_http_info(task, opts)
+ data
+ end
+
+ # List events for a task
+ # List events for a task
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [Array<(V1TaskEventList, Integer, Hash)>] V1TaskEventList data, response status code and response headers
+ def v1_task_event_list_with_http_info(task, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_event_list ...'
+ end
+ # verify the required parameter 'task' is set
+ if @api_client.config.client_side_validation && task.nil?
+ fail ArgumentError, "Missing the required parameter 'task' when calling TaskApi.v1_task_event_list"
+ end
+ if @api_client.config.client_side_validation && task.to_s.length > 36
+ fail ArgumentError, 'invalid value for "task" when calling TaskApi.v1_task_event_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && task.to_s.length < 36
+ fail ArgumentError, 'invalid value for "task" when calling TaskApi.v1_task_event_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tasks/{task}/task-events'.sub('{' + 'task' + '}', CGI.escape(task.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskEventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_event_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_event_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get a task
+ # Get a task by id
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :attempt The attempt number
+ # @return [V1TaskSummary]
+ def v1_task_get(task, opts = {})
+ data, _status_code, _headers = v1_task_get_with_http_info(task, opts)
+ data
+ end
+
+ # Get a task
+ # Get a task by id
+ # @param task [String] The task id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :attempt The attempt number
+ # @return [Array<(V1TaskSummary, Integer, Hash)>] V1TaskSummary data, response status code and response headers
+ def v1_task_get_with_http_info(task, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_get ...'
+ end
+ # verify the required parameter 'task' is set
+ if @api_client.config.client_side_validation && task.nil?
+ fail ArgumentError, "Missing the required parameter 'task' when calling TaskApi.v1_task_get"
+ end
+ if @api_client.config.client_side_validation && task.to_s.length > 36
+ fail ArgumentError, 'invalid value for "task" when calling TaskApi.v1_task_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && task.to_s.length < 36
+ fail ArgumentError, 'invalid value for "task" when calling TaskApi.v1_task_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tasks/{task}'.sub('{' + 'task' + '}', CGI.escape(task.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'attempt'] = opts[:'attempt'] if !opts[:'attempt'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskSummary'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get task point metrics
+ # Get a minute by minute breakdown of task metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Time] :created_after The time after the task was created
+ # @option opts [Time] :finished_before The time before the task was completed
+ # @return [V1TaskPointMetrics]
+ def v1_task_get_point_metrics(tenant, opts = {})
+ data, _status_code, _headers = v1_task_get_point_metrics_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get task point metrics
+ # Get a minute by minute breakdown of task metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Time] :created_after The time after the task was created
+ # @option opts [Time] :finished_before The time before the task was completed
+ # @return [Array<(V1TaskPointMetrics, Integer, Hash)>] V1TaskPointMetrics data, response status code and response headers
+ def v1_task_get_point_metrics_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_get_point_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TaskApi.v1_task_get_point_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_get_point_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_get_point_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/task-point-metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'createdAfter'] = opts[:'created_after'] if !opts[:'created_after'].nil?
+ query_params[:'finishedBefore'] = opts[:'finished_before'] if !opts[:'finished_before'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskPointMetrics'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_get_point_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_get_point_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get task metrics
+ # Get a summary of task run metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param since [Time] The start time to get metrics for
+ # @param [Hash] opts the optional parameters
+ # @option opts [Time] :_until The end time to get metrics for
+ # @option opts [Array] :workflow_ids The workflow id to find runs for
+ # @option opts [String] :parent_task_external_id The parent task's external id
+ # @option opts [String] :triggering_event_external_id The id of the event that triggered the task
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @return [Array]
+ def v1_task_list_status_metrics(tenant, since, opts = {})
+ data, _status_code, _headers = v1_task_list_status_metrics_with_http_info(tenant, since, opts)
+ data
+ end
+
+ # Get task metrics
+ # Get a summary of task run metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param since [Time] The start time to get metrics for
+ # @param [Hash] opts the optional parameters
+ # @option opts [Time] :_until The end time to get metrics for
+ # @option opts [Array] :workflow_ids The workflow id to find runs for
+ # @option opts [String] :parent_task_external_id The parent task's external id
+ # @option opts [String] :triggering_event_external_id The id of the event that triggered the task
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @return [Array<(Array, Integer, Hash)>] Array data, response status code and response headers
+ def v1_task_list_status_metrics_with_http_info(tenant, since, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_list_status_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TaskApi.v1_task_list_status_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_list_status_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_list_status_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'since' is set
+ if @api_client.config.client_side_validation && since.nil?
+ fail ArgumentError, "Missing the required parameter 'since' when calling TaskApi.v1_task_list_status_metrics"
+ end
+ if @api_client.config.client_side_validation && !opts[:'parent_task_external_id'].nil? && opts[:'parent_task_external_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_task_external_id"]" when calling TaskApi.v1_task_list_status_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_task_external_id'].nil? && opts[:'parent_task_external_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_task_external_id"]" when calling TaskApi.v1_task_list_status_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'triggering_event_external_id'].nil? && opts[:'triggering_event_external_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"triggering_event_external_id"]" when calling TaskApi.v1_task_list_status_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'triggering_event_external_id'].nil? && opts[:'triggering_event_external_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"triggering_event_external_id"]" when calling TaskApi.v1_task_list_status_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/task-metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'since'] = since
+ query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?
+ query_params[:'workflow_ids'] = @api_client.build_collection_param(opts[:'workflow_ids'], :multi) if !opts[:'workflow_ids'].nil?
+ query_params[:'parent_task_external_id'] = opts[:'parent_task_external_id'] if !opts[:'parent_task_external_id'].nil?
+ query_params[:'triggering_event_external_id'] = opts[:'triggering_event_external_id'] if !opts[:'triggering_event_external_id'].nil?
+ query_params[:'additional_metadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Array'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_list_status_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_list_status_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Replay tasks
+ # Replay tasks
+ # @param tenant [String] The tenant id
+ # @param v1_replay_task_request [V1ReplayTaskRequest] The tasks to replay
+ # @param [Hash] opts the optional parameters
+ # @return [V1ReplayedTasks]
+ def v1_task_replay(tenant, v1_replay_task_request, opts = {})
+ data, _status_code, _headers = v1_task_replay_with_http_info(tenant, v1_replay_task_request, opts)
+ data
+ end
+
+ # Replay tasks
+ # Replay tasks
+ # @param tenant [String] The tenant id
+ # @param v1_replay_task_request [V1ReplayTaskRequest] The tasks to replay
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1ReplayedTasks, Integer, Hash)>] V1ReplayedTasks data, response status code and response headers
+ def v1_task_replay_with_http_info(tenant, v1_replay_task_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TaskApi.v1_task_replay ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TaskApi.v1_task_replay"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_replay, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TaskApi.v1_task_replay, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_replay_task_request' is set
+ if @api_client.config.client_side_validation && v1_replay_task_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_replay_task_request' when calling TaskApi.v1_task_replay"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/tasks/replay'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_replay_task_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1ReplayedTasks'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TaskApi.v1_task_replay",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TaskApi#v1_task_replay\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/tenant_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/tenant_api.rb
new file mode 100644
index 000000000..c76fa43c7
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/tenant_api.rb
@@ -0,0 +1,1494 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class TenantApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create tenant alert email group
+ # Creates a new tenant alert email group
+ # @param tenant [String] The tenant id
+ # @param create_tenant_alert_email_group_request [CreateTenantAlertEmailGroupRequest] The tenant alert email group to create
+ # @param [Hash] opts the optional parameters
+ # @return [TenantAlertEmailGroup]
+ def alert_email_group_create(tenant, create_tenant_alert_email_group_request, opts = {})
+ data, _status_code, _headers = alert_email_group_create_with_http_info(tenant, create_tenant_alert_email_group_request, opts)
+ data
+ end
+
+ # Create tenant alert email group
+ # Creates a new tenant alert email group
+ # @param tenant [String] The tenant id
+ # @param create_tenant_alert_email_group_request [CreateTenantAlertEmailGroupRequest] The tenant alert email group to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantAlertEmailGroup, Integer, Hash)>] TenantAlertEmailGroup data, response status code and response headers
+ def alert_email_group_create_with_http_info(tenant, create_tenant_alert_email_group_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.alert_email_group_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.alert_email_group_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.alert_email_group_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.alert_email_group_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'create_tenant_alert_email_group_request' is set
+ if @api_client.config.client_side_validation && create_tenant_alert_email_group_request.nil?
+ fail ArgumentError, "Missing the required parameter 'create_tenant_alert_email_group_request' when calling TenantApi.alert_email_group_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/alerting-email-groups'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(create_tenant_alert_email_group_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantAlertEmailGroup'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.alert_email_group_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#alert_email_group_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete tenant alert email group
+ # Deletes a tenant alert email group
+ # @param alert_email_group [String] The tenant alert email group id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def alert_email_group_delete(alert_email_group, opts = {})
+ alert_email_group_delete_with_http_info(alert_email_group, opts)
+ nil
+ end
+
+ # Delete tenant alert email group
+ # Deletes a tenant alert email group
+ # @param alert_email_group [String] The tenant alert email group id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def alert_email_group_delete_with_http_info(alert_email_group, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.alert_email_group_delete ...'
+ end
+ # verify the required parameter 'alert_email_group' is set
+ if @api_client.config.client_side_validation && alert_email_group.nil?
+ fail ArgumentError, "Missing the required parameter 'alert_email_group' when calling TenantApi.alert_email_group_delete"
+ end
+ if @api_client.config.client_side_validation && alert_email_group.to_s.length > 36
+ fail ArgumentError, 'invalid value for "alert_email_group" when calling TenantApi.alert_email_group_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && alert_email_group.to_s.length < 36
+ fail ArgumentError, 'invalid value for "alert_email_group" when calling TenantApi.alert_email_group_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/alerting-email-groups/{alert-email-group}'.sub('{' + 'alert-email-group' + '}', CGI.escape(alert_email_group.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.alert_email_group_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#alert_email_group_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tenant alert email groups
+ # Gets a list of tenant alert email groups
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantAlertEmailGroupList]
+ def alert_email_group_list(tenant, opts = {})
+ data, _status_code, _headers = alert_email_group_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List tenant alert email groups
+ # Gets a list of tenant alert email groups
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantAlertEmailGroupList, Integer, Hash)>] TenantAlertEmailGroupList data, response status code and response headers
+ def alert_email_group_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.alert_email_group_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.alert_email_group_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.alert_email_group_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.alert_email_group_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/alerting-email-groups'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantAlertEmailGroupList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.alert_email_group_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#alert_email_group_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update tenant alert email group
+ # Updates a tenant alert email group
+ # @param alert_email_group [String] The tenant alert email group id
+ # @param update_tenant_alert_email_group_request [UpdateTenantAlertEmailGroupRequest] The tenant alert email group to update
+ # @param [Hash] opts the optional parameters
+ # @return [TenantAlertEmailGroup]
+ def alert_email_group_update(alert_email_group, update_tenant_alert_email_group_request, opts = {})
+ data, _status_code, _headers = alert_email_group_update_with_http_info(alert_email_group, update_tenant_alert_email_group_request, opts)
+ data
+ end
+
+ # Update tenant alert email group
+ # Updates a tenant alert email group
+ # @param alert_email_group [String] The tenant alert email group id
+ # @param update_tenant_alert_email_group_request [UpdateTenantAlertEmailGroupRequest] The tenant alert email group to update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantAlertEmailGroup, Integer, Hash)>] TenantAlertEmailGroup data, response status code and response headers
+ def alert_email_group_update_with_http_info(alert_email_group, update_tenant_alert_email_group_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.alert_email_group_update ...'
+ end
+ # verify the required parameter 'alert_email_group' is set
+ if @api_client.config.client_side_validation && alert_email_group.nil?
+ fail ArgumentError, "Missing the required parameter 'alert_email_group' when calling TenantApi.alert_email_group_update"
+ end
+ if @api_client.config.client_side_validation && alert_email_group.to_s.length > 36
+ fail ArgumentError, 'invalid value for "alert_email_group" when calling TenantApi.alert_email_group_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && alert_email_group.to_s.length < 36
+ fail ArgumentError, 'invalid value for "alert_email_group" when calling TenantApi.alert_email_group_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_tenant_alert_email_group_request' is set
+ if @api_client.config.client_side_validation && update_tenant_alert_email_group_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_tenant_alert_email_group_request' when calling TenantApi.alert_email_group_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/alerting-email-groups/{alert-email-group}'.sub('{' + 'alert-email-group' + '}', CGI.escape(alert_email_group.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_tenant_alert_email_group_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantAlertEmailGroup'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.alert_email_group_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#alert_email_group_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get tenant alerting settings
+ # Gets the alerting settings for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantAlertingSettings]
+ def tenant_alerting_settings_get(tenant, opts = {})
+ data, _status_code, _headers = tenant_alerting_settings_get_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get tenant alerting settings
+ # Gets the alerting settings for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantAlertingSettings, Integer, Hash)>] TenantAlertingSettings data, response status code and response headers
+ def tenant_alerting_settings_get_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_alerting_settings_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_alerting_settings_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_alerting_settings_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_alerting_settings_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/alerting/settings'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantAlertingSettings'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_alerting_settings_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_alerting_settings_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Create tenant
+ # Creates a new tenant
+ # @param create_tenant_request [CreateTenantRequest] The tenant to create
+ # @param [Hash] opts the optional parameters
+ # @return [Tenant]
+ def tenant_create(create_tenant_request, opts = {})
+ data, _status_code, _headers = tenant_create_with_http_info(create_tenant_request, opts)
+ data
+ end
+
+ # Create tenant
+ # Creates a new tenant
+ # @param create_tenant_request [CreateTenantRequest] The tenant to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Tenant, Integer, Hash)>] Tenant data, response status code and response headers
+ def tenant_create_with_http_info(create_tenant_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_create ...'
+ end
+ # verify the required parameter 'create_tenant_request' is set
+ if @api_client.config.client_side_validation && create_tenant_request.nil?
+ fail ArgumentError, "Missing the required parameter 'create_tenant_request' when calling TenantApi.tenant_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(create_tenant_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Tenant'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get tenant
+ # Get the details of a tenant
+ # @param tenant [String] The tenant id to get details for
+ # @param [Hash] opts the optional parameters
+ # @return [Tenant]
+ def tenant_get(tenant, opts = {})
+ data, _status_code, _headers = tenant_get_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get tenant
+ # Get the details of a tenant
+ # @param tenant [String] The tenant id to get details for
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Tenant, Integer, Hash)>] Tenant data, response status code and response headers
+ def tenant_get_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Tenant'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get prometheus metrics
+ # Get the prometheus metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [String]
+ def tenant_get_prometheus_metrics(tenant, opts = {})
+ data, _status_code, _headers = tenant_get_prometheus_metrics_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get prometheus metrics
+ # Get the prometheus metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(String, Integer, Hash)>] String data, response status code and response headers
+ def tenant_get_prometheus_metrics_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_get_prometheus_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_get_prometheus_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_prometheus_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_prometheus_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/prometheus-metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['text/plain', 'application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'String'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_get_prometheus_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_get_prometheus_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get step run metrics
+ # Get the queue metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantStepRunQueueMetrics]
+ def tenant_get_step_run_queue_metrics(tenant, opts = {})
+ data, _status_code, _headers = tenant_get_step_run_queue_metrics_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get step run metrics
+ # Get the queue metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantStepRunQueueMetrics, Integer, Hash)>] TenantStepRunQueueMetrics data, response status code and response headers
+ def tenant_get_step_run_queue_metrics_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_get_step_run_queue_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_get_step_run_queue_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_step_run_queue_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_step_run_queue_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/step-run-queue-metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantStepRunQueueMetrics'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_get_step_run_queue_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_get_step_run_queue_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get task stats for tenant
+ # Get task stats for tenant
+ # @param tenant [String] The tenant ID
+ # @param [Hash] opts the optional parameters
+ # @return [Hash]
+ def tenant_get_task_stats(tenant, opts = {})
+ data, _status_code, _headers = tenant_get_task_stats_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get task stats for tenant
+ # Get task stats for tenant
+ # @param tenant [String] The tenant ID
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Hash, Integer, Hash)>] Hash data, response status code and response headers
+ def tenant_get_task_stats_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_get_task_stats ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_get_task_stats"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_task_stats, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_get_task_stats, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/task-stats'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Hash'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_get_task_stats",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_get_task_stats\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Accept tenant invite
+ # Accepts a tenant invite
+ # @param [Hash] opts the optional parameters
+ # @option opts [AcceptInviteRequest] :accept_invite_request
+ # @return [nil]
+ def tenant_invite_accept(opts = {})
+ tenant_invite_accept_with_http_info(opts)
+ nil
+ end
+
+ # Accept tenant invite
+ # Accepts a tenant invite
+ # @param [Hash] opts the optional parameters
+ # @option opts [AcceptInviteRequest] :accept_invite_request
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def tenant_invite_accept_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_invite_accept ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/invites/accept'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'accept_invite_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_invite_accept",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_invite_accept\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Create tenant invite
+ # Creates a new tenant invite
+ # @param tenant [String] The tenant id
+ # @param create_tenant_invite_request [CreateTenantInviteRequest] The tenant invite to create
+ # @param [Hash] opts the optional parameters
+ # @return [TenantInvite]
+ def tenant_invite_create(tenant, create_tenant_invite_request, opts = {})
+ data, _status_code, _headers = tenant_invite_create_with_http_info(tenant, create_tenant_invite_request, opts)
+ data
+ end
+
+ # Create tenant invite
+ # Creates a new tenant invite
+ # @param tenant [String] The tenant id
+ # @param create_tenant_invite_request [CreateTenantInviteRequest] The tenant invite to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantInvite, Integer, Hash)>] TenantInvite data, response status code and response headers
+ def tenant_invite_create_with_http_info(tenant, create_tenant_invite_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_invite_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_invite_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_invite_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_invite_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'create_tenant_invite_request' is set
+ if @api_client.config.client_side_validation && create_tenant_invite_request.nil?
+ fail ArgumentError, "Missing the required parameter 'create_tenant_invite_request' when calling TenantApi.tenant_invite_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/invites'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(create_tenant_invite_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantInvite'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_invite_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_invite_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tenant invites
+ # Gets a list of tenant invites
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantInviteList]
+ def tenant_invite_list(tenant, opts = {})
+ data, _status_code, _headers = tenant_invite_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List tenant invites
+ # Gets a list of tenant invites
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantInviteList, Integer, Hash)>] TenantInviteList data, response status code and response headers
+ def tenant_invite_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_invite_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_invite_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_invite_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_invite_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/invites'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantInviteList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_invite_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_invite_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Reject tenant invite
+ # Rejects a tenant invite
+ # @param [Hash] opts the optional parameters
+ # @option opts [RejectInviteRequest] :reject_invite_request
+ # @return [nil]
+ def tenant_invite_reject(opts = {})
+ tenant_invite_reject_with_http_info(opts)
+ nil
+ end
+
+ # Reject tenant invite
+ # Rejects a tenant invite
+ # @param [Hash] opts the optional parameters
+ # @option opts [RejectInviteRequest] :reject_invite_request
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def tenant_invite_reject_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_invite_reject ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/invites/reject'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'reject_invite_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_invite_reject",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_invite_reject\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete a tenant member
+ # Delete a member from a tenant
+ # @param tenant [String] The tenant id
+ # @param member [String] The tenant member id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantMember]
+ def tenant_member_delete(tenant, member, opts = {})
+ data, _status_code, _headers = tenant_member_delete_with_http_info(tenant, member, opts)
+ data
+ end
+
+ # Delete a tenant member
+ # Delete a member from a tenant
+ # @param tenant [String] The tenant id
+ # @param member [String] The tenant member id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantMember, Integer, Hash)>] TenantMember data, response status code and response headers
+ def tenant_member_delete_with_http_info(tenant, member, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_member_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_member_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'member' is set
+ if @api_client.config.client_side_validation && member.nil?
+ fail ArgumentError, "Missing the required parameter 'member' when calling TenantApi.tenant_member_delete"
+ end
+ if @api_client.config.client_side_validation && member.to_s.length > 36
+ fail ArgumentError, 'invalid value for "member" when calling TenantApi.tenant_member_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && member.to_s.length < 36
+ fail ArgumentError, 'invalid value for "member" when calling TenantApi.tenant_member_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/members/{member}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'member' + '}', CGI.escape(member.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantMember'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_member_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_member_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tenant members
+ # Gets a list of tenant members
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantMemberList]
+ def tenant_member_list(tenant, opts = {})
+ data, _status_code, _headers = tenant_member_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List tenant members
+ # Gets a list of tenant members
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantMemberList, Integer, Hash)>] TenantMemberList data, response status code and response headers
+ def tenant_member_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_member_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_member_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/members'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantMemberList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_member_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_member_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update a tenant member
+ # Update a tenant member
+ # @param tenant [String] The tenant id
+ # @param member [String] The tenant member id
+ # @param update_tenant_member_request [UpdateTenantMemberRequest] The tenant member properties to update
+ # @param [Hash] opts the optional parameters
+ # @return [TenantMember]
+ def tenant_member_update(tenant, member, update_tenant_member_request, opts = {})
+ data, _status_code, _headers = tenant_member_update_with_http_info(tenant, member, update_tenant_member_request, opts)
+ data
+ end
+
+ # Update a tenant member
+ # Update a tenant member
+ # @param tenant [String] The tenant id
+ # @param member [String] The tenant member id
+ # @param update_tenant_member_request [UpdateTenantMemberRequest] The tenant member properties to update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantMember, Integer, Hash)>] TenantMember data, response status code and response headers
+ def tenant_member_update_with_http_info(tenant, member, update_tenant_member_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_member_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_member_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_member_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'member' is set
+ if @api_client.config.client_side_validation && member.nil?
+ fail ArgumentError, "Missing the required parameter 'member' when calling TenantApi.tenant_member_update"
+ end
+ if @api_client.config.client_side_validation && member.to_s.length > 36
+ fail ArgumentError, 'invalid value for "member" when calling TenantApi.tenant_member_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && member.to_s.length < 36
+ fail ArgumentError, 'invalid value for "member" when calling TenantApi.tenant_member_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_tenant_member_request' is set
+ if @api_client.config.client_side_validation && update_tenant_member_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_tenant_member_request' when calling TenantApi.tenant_member_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/members/{member}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'member' + '}', CGI.escape(member.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_tenant_member_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantMember'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_member_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_member_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Create tenant alert email group
+ # Gets the resource policy for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [TenantResourcePolicy]
+ def tenant_resource_policy_get(tenant, opts = {})
+ data, _status_code, _headers = tenant_resource_policy_get_with_http_info(tenant, opts)
+ data
+ end
+
+ # Create tenant alert email group
+ # Gets the resource policy for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantResourcePolicy, Integer, Hash)>] TenantResourcePolicy data, response status code and response headers
+ def tenant_resource_policy_get_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_resource_policy_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_resource_policy_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_resource_policy_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_resource_policy_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/resource-policy'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantResourcePolicy'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_resource_policy_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_resource_policy_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update tenant
+ # Update an existing tenant
+ # @param tenant [String] The tenant id
+ # @param update_tenant_request [UpdateTenantRequest] The tenant properties to update
+ # @param [Hash] opts the optional parameters
+ # @return [Tenant]
+ def tenant_update(tenant, update_tenant_request, opts = {})
+ data, _status_code, _headers = tenant_update_with_http_info(tenant, update_tenant_request, opts)
+ data
+ end
+
+ # Update tenant
+ # Update an existing tenant
+ # @param tenant [String] The tenant id
+ # @param update_tenant_request [UpdateTenantRequest] The tenant properties to update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Tenant, Integer, Hash)>] Tenant data, response status code and response headers
+ def tenant_update_with_http_info(tenant, update_tenant_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.tenant_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling TenantApi.tenant_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling TenantApi.tenant_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_tenant_request' is set
+ if @api_client.config.client_side_validation && update_tenant_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_tenant_request' when calling TenantApi.tenant_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_tenant_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Tenant'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.tenant_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#tenant_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tenant invites
+ # Lists all tenant invites for the current user
+ # @param [Hash] opts the optional parameters
+ # @return [TenantInviteList]
+ def user_list_tenant_invites(opts = {})
+ data, _status_code, _headers = user_list_tenant_invites_with_http_info(opts)
+ data
+ end
+
+ # List tenant invites
+ # Lists all tenant invites for the current user
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(TenantInviteList, Integer, Hash)>] TenantInviteList data, response status code and response headers
+ def user_list_tenant_invites_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: TenantApi.user_list_tenant_invites ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/invites'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantInviteList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"TenantApi.user_list_tenant_invites",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: TenantApi#user_list_tenant_invites\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/user_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/user_api.rb
new file mode 100644
index 000000000..a2ca3f8ae
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/user_api.rb
@@ -0,0 +1,729 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class UserApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # List tenant memberships
+ # Lists all tenant memberships for the current user
+ # @param [Hash] opts the optional parameters
+ # @return [UserTenantMembershipsList]
+ def tenant_memberships_list(opts = {})
+ data, _status_code, _headers = tenant_memberships_list_with_http_info(opts)
+ data
+ end
+
+ # List tenant memberships
+ # Lists all tenant memberships for the current user
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(UserTenantMembershipsList, Integer, Hash)>] UserTenantMembershipsList data, response status code and response headers
+ def tenant_memberships_list_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.tenant_memberships_list ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/memberships'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'UserTenantMembershipsList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.tenant_memberships_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#tenant_memberships_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Register user
+ # Registers a user.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserRegisterRequest] :user_register_request
+ # @return [User]
+ def user_create(opts = {})
+ data, _status_code, _headers = user_create_with_http_info(opts)
+ data
+ end
+
+ # Register user
+ # Registers a user.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserRegisterRequest] :user_register_request
+ # @return [Array<(User, Integer, Hash)>] User data, response status code and response headers
+ def user_create_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_create ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/register'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'user_register_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'User'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get current user
+ # Gets the current user
+ # @param [Hash] opts the optional parameters
+ # @return [User]
+ def user_get_current(opts = {})
+ data, _status_code, _headers = user_get_current_with_http_info(opts)
+ data
+ end
+
+ # Get current user
+ # Gets the current user
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(User, Integer, Hash)>] User data, response status code and response headers
+ def user_get_current_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_get_current ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/current'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'User'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_get_current",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_get_current\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_github_oauth_callback(opts = {})
+ user_update_github_oauth_callback_with_http_info(opts)
+ nil
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_github_oauth_callback_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_github_oauth_callback ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/github/callback'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_github_oauth_callback",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_github_oauth_callback\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_github_oauth_start(opts = {})
+ user_update_github_oauth_start_with_http_info(opts)
+ nil
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_github_oauth_start_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_github_oauth_start ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/github/start'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_github_oauth_start",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_github_oauth_start\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_google_oauth_callback(opts = {})
+ user_update_google_oauth_callback_with_http_info(opts)
+ nil
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_google_oauth_callback_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_google_oauth_callback ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/google/callback'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_google_oauth_callback",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_google_oauth_callback\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_google_oauth_start(opts = {})
+ user_update_google_oauth_start_with_http_info(opts)
+ nil
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_google_oauth_start_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_google_oauth_start ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/google/start'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_google_oauth_start",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_google_oauth_start\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Login user
+ # Logs in a user.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserLoginRequest] :user_login_request
+ # @return [User]
+ def user_update_login(opts = {})
+ data, _status_code, _headers = user_update_login_with_http_info(opts)
+ data
+ end
+
+ # Login user
+ # Logs in a user.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserLoginRequest] :user_login_request
+ # @return [Array<(User, Integer, Hash)>] User data, response status code and response headers
+ def user_update_login_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_login ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/login'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'user_login_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'User'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_login",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_login\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Logout user
+ # Logs out a user.
+ # @param [Hash] opts the optional parameters
+ # @return [User]
+ def user_update_logout(opts = {})
+ data, _status_code, _headers = user_update_logout_with_http_info(opts)
+ data
+ end
+
+ # Logout user
+ # Logs out a user.
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(User, Integer, Hash)>] User data, response status code and response headers
+ def user_update_logout_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_logout ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/logout'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'User'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_logout",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_logout\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Change user password
+ # Update a user password.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserChangePasswordRequest] :user_change_password_request
+ # @return [User]
+ def user_update_password(opts = {})
+ data, _status_code, _headers = user_update_password_with_http_info(opts)
+ data
+ end
+
+ # Change user password
+ # Update a user password.
+ # @param [Hash] opts the optional parameters
+ # @option opts [UserChangePasswordRequest] :user_change_password_request
+ # @return [Array<(User, Integer, Hash)>] User data, response status code and response headers
+ def user_update_password_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_password ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/password'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(opts[:'user_change_password_request'])
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'User'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_password",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_password\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_slack_oauth_callback(opts = {})
+ user_update_slack_oauth_callback_with_http_info(opts)
+ nil
+ end
+
+ # Complete OAuth flow
+ # Completes the OAuth flow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_slack_oauth_callback_with_http_info(opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_slack_oauth_callback ...'
+ end
+ # resource path
+ local_var_path = '/api/v1/users/slack/callback'
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_slack_oauth_callback",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_slack_oauth_callback\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def user_update_slack_oauth_start(tenant, opts = {})
+ user_update_slack_oauth_start_with_http_info(tenant, opts)
+ nil
+ end
+
+ # Start OAuth flow
+ # Starts the OAuth flow
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def user_update_slack_oauth_start_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: UserApi.user_update_slack_oauth_start ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling UserApi.user_update_slack_oauth_start"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling UserApi.user_update_slack_oauth_start, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling UserApi.user_update_slack_oauth_start, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/slack/start'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth']
+
+ new_options = opts.merge(
+ :operation => :"UserApi.user_update_slack_oauth_start",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: UserApi#user_update_slack_oauth_start\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/webhook_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/webhook_api.rb
new file mode 100644
index 000000000..9aa002046
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/webhook_api.rb
@@ -0,0 +1,504 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class WebhookApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create a webhook
+ # Create a new webhook
+ # @param tenant [String] The tenant id
+ # @param v1_create_webhook_request [V1CreateWebhookRequest] The input to the webhook creation
+ # @param [Hash] opts the optional parameters
+ # @return [V1Webhook]
+ def v1_webhook_create(tenant, v1_create_webhook_request, opts = {})
+ data, _status_code, _headers = v1_webhook_create_with_http_info(tenant, v1_create_webhook_request, opts)
+ data
+ end
+
+ # Create a webhook
+ # Create a new webhook
+ # @param tenant [String] The tenant id
+ # @param v1_create_webhook_request [V1CreateWebhookRequest] The input to the webhook creation
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Webhook, Integer, Hash)>] V1Webhook data, response status code and response headers
+ def v1_webhook_create_with_http_info(tenant, v1_create_webhook_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_create_webhook_request' is set
+ if @api_client.config.client_side_validation && v1_create_webhook_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_create_webhook_request' when calling WebhookApi.v1_webhook_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_create_webhook_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Webhook'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete a webhook
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The name of the webhook to delete
+ # @param [Hash] opts the optional parameters
+ # @return [V1Webhook]
+ def v1_webhook_delete(tenant, v1_webhook, opts = {})
+ data, _status_code, _headers = v1_webhook_delete_with_http_info(tenant, v1_webhook, opts)
+ data
+ end
+
+ # Delete a webhook
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The name of the webhook to delete
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Webhook, Integer, Hash)>] V1Webhook data, response status code and response headers
+ def v1_webhook_delete_with_http_info(tenant, v1_webhook, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_webhook' is set
+ if @api_client.config.client_side_validation && v1_webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_webhook' when calling WebhookApi.v1_webhook_delete"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-webhook' + '}', CGI.escape(v1_webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Webhook'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get a webhook
+ # Get a webhook by its name
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param [Hash] opts the optional parameters
+ # @return [V1Webhook]
+ def v1_webhook_get(tenant, v1_webhook, opts = {})
+ data, _status_code, _headers = v1_webhook_get_with_http_info(tenant, v1_webhook, opts)
+ data
+ end
+
+ # Get a webhook
+ # Get a webhook by its name
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Webhook, Integer, Hash)>] V1Webhook data, response status code and response headers
+ def v1_webhook_get_with_http_info(tenant, v1_webhook, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_webhook' is set
+ if @api_client.config.client_side_validation && v1_webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_webhook' when calling WebhookApi.v1_webhook_get"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-webhook' + '}', CGI.escape(v1_webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Webhook'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List webhooks
+ # Lists all webhook for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :source_names The source names to filter by
+ # @option opts [Array] :webhook_names The webhook names to filter by
+ # @return [V1WebhookList]
+ def v1_webhook_list(tenant, opts = {})
+ data, _status_code, _headers = v1_webhook_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # List webhooks
+ # Lists all webhook for a tenant.
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :source_names The source names to filter by
+ # @option opts [Array] :webhook_names The webhook names to filter by
+ # @return [Array<(V1WebhookList, Integer, Hash)>] V1WebhookList data, response status code and response headers
+ def v1_webhook_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'sourceNames'] = @api_client.build_collection_param(opts[:'source_names'], :multi) if !opts[:'source_names'].nil?
+ query_params[:'webhookNames'] = @api_client.build_collection_param(opts[:'webhook_names'], :multi) if !opts[:'webhook_names'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1WebhookList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Post a webhook message
+ # Post an incoming webhook message
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param [Hash] opts the optional parameters
+ # @return [Hash]
+ def v1_webhook_receive(tenant, v1_webhook, opts = {})
+ data, _status_code, _headers = v1_webhook_receive_with_http_info(tenant, v1_webhook, opts)
+ data
+ end
+
+ # Post a webhook message
+ # Post an incoming webhook message
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Hash, Integer, Hash)>] Hash data, response status code and response headers
+ def v1_webhook_receive_with_http_info(tenant, v1_webhook, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_receive ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_receive"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_receive, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_receive, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_webhook' is set
+ if @api_client.config.client_side_validation && v1_webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_webhook' when calling WebhookApi.v1_webhook_receive"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-webhook' + '}', CGI.escape(v1_webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Hash'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || []
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_receive",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_receive\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update a webhook
+ # Update a webhook
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param v1_update_webhook_request [V1UpdateWebhookRequest] The input to the webhook creation
+ # @param [Hash] opts the optional parameters
+ # @return [V1Webhook]
+ def v1_webhook_update(tenant, v1_webhook, v1_update_webhook_request, opts = {})
+ data, _status_code, _headers = v1_webhook_update_with_http_info(tenant, v1_webhook, v1_update_webhook_request, opts)
+ data
+ end
+
+ # Update a webhook
+ # Update a webhook
+ # @param tenant [String] The tenant id
+ # @param v1_webhook [String] The webhook name
+ # @param v1_update_webhook_request [V1UpdateWebhookRequest] The input to the webhook creation
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1Webhook, Integer, Hash)>] V1Webhook data, response status code and response headers
+ def v1_webhook_update_with_http_info(tenant, v1_webhook, v1_update_webhook_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WebhookApi.v1_webhook_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WebhookApi.v1_webhook_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WebhookApi.v1_webhook_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_webhook' is set
+ if @api_client.config.client_side_validation && v1_webhook.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_webhook' when calling WebhookApi.v1_webhook_update"
+ end
+ # verify the required parameter 'v1_update_webhook_request' is set
+ if @api_client.config.client_side_validation && v1_update_webhook_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_update_webhook_request' when calling WebhookApi.v1_webhook_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/webhooks/{v1-webhook}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'v1-webhook' + '}', CGI.escape(v1_webhook.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_update_webhook_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1Webhook'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WebhookApi.v1_webhook_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WebhookApi#v1_webhook_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/worker_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/worker_api.rb
new file mode 100644
index 000000000..9ba86d2ae
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/worker_api.rb
@@ -0,0 +1,246 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class WorkerApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Get worker
+ # Get a worker
+ # @param worker [String] The worker id
+ # @param [Hash] opts the optional parameters
+ # @return [Worker]
+ def worker_get(worker, opts = {})
+ data, _status_code, _headers = worker_get_with_http_info(worker, opts)
+ data
+ end
+
+ # Get worker
+ # Get a worker
+ # @param worker [String] The worker id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Worker, Integer, Hash)>] Worker data, response status code and response headers
+ def worker_get_with_http_info(worker, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkerApi.worker_get ...'
+ end
+ # verify the required parameter 'worker' is set
+ if @api_client.config.client_side_validation && worker.nil?
+ fail ArgumentError, "Missing the required parameter 'worker' when calling WorkerApi.worker_get"
+ end
+ if @api_client.config.client_side_validation && worker.to_s.length > 36
+ fail ArgumentError, 'invalid value for "worker" when calling WorkerApi.worker_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && worker.to_s.length < 36
+ fail ArgumentError, 'invalid value for "worker" when calling WorkerApi.worker_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workers/{worker}'.sub('{' + 'worker' + '}', CGI.escape(worker.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Worker'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkerApi.worker_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkerApi#worker_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workers
+ # Get all workers for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [WorkerList]
+ def worker_list(tenant, opts = {})
+ data, _status_code, _headers = worker_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get workers
+ # Get all workers for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WorkerList, Integer, Hash)>] WorkerList data, response status code and response headers
+ def worker_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkerApi.worker_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkerApi.worker_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkerApi.worker_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkerApi.worker_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/worker'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkerList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkerApi.worker_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkerApi#worker_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update worker
+ # Update a worker
+ # @param worker [String] The worker id
+ # @param update_worker_request [UpdateWorkerRequest] The worker update
+ # @param [Hash] opts the optional parameters
+ # @return [Worker]
+ def worker_update(worker, update_worker_request, opts = {})
+ data, _status_code, _headers = worker_update_with_http_info(worker, update_worker_request, opts)
+ data
+ end
+
+ # Update worker
+ # Update a worker
+ # @param worker [String] The worker id
+ # @param update_worker_request [UpdateWorkerRequest] The worker update
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Worker, Integer, Hash)>] Worker data, response status code and response headers
+ def worker_update_with_http_info(worker, update_worker_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkerApi.worker_update ...'
+ end
+ # verify the required parameter 'worker' is set
+ if @api_client.config.client_side_validation && worker.nil?
+ fail ArgumentError, "Missing the required parameter 'worker' when calling WorkerApi.worker_update"
+ end
+ if @api_client.config.client_side_validation && worker.to_s.length > 36
+ fail ArgumentError, 'invalid value for "worker" when calling WorkerApi.worker_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && worker.to_s.length < 36
+ fail ArgumentError, 'invalid value for "worker" when calling WorkerApi.worker_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_worker_request' is set
+ if @api_client.config.client_side_validation && update_worker_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_worker_request' when calling WorkerApi.worker_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/workers/{worker}'.sub('{' + 'worker' + '}', CGI.escape(worker.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_worker_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Worker'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkerApi.worker_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkerApi#worker_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_api.rb
new file mode 100644
index 000000000..8b17e2676
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_api.rb
@@ -0,0 +1,2010 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class WorkflowApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Get cron job workflows
+ # Get all cron job workflow triggers for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :workflow_name The workflow name to get runs for.
+ # @option opts [String] :cron_name The cron name to get runs for.
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [CronWorkflowsOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @return [CronWorkflowsList]
+ def cron_workflow_list(tenant, opts = {})
+ data, _status_code, _headers = cron_workflow_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get cron job workflows
+ # Get all cron job workflow triggers for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :workflow_name The workflow name to get runs for.
+ # @option opts [String] :cron_name The cron name to get runs for.
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [CronWorkflowsOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @return [Array<(CronWorkflowsList, Integer, Hash)>] CronWorkflowsList data, response status code and response headers
+ def cron_workflow_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.cron_workflow_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.cron_workflow_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.cron_workflow_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.cron_workflow_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.cron_workflow_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.cron_workflow_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/crons'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'workflowId'] = opts[:'workflow_id'] if !opts[:'workflow_id'].nil?
+ query_params[:'workflowName'] = opts[:'workflow_name'] if !opts[:'workflow_name'].nil?
+ query_params[:'cronName'] = opts[:'cron_name'] if !opts[:'cron_name'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'CronWorkflowsList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.cron_workflow_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#cron_workflow_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow metrics
+ # Get the queue metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Array] :workflows A list of workflow IDs to filter by
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @return [TenantQueueMetrics]
+ def tenant_get_queue_metrics(tenant, opts = {})
+ data, _status_code, _headers = tenant_get_queue_metrics_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get workflow metrics
+ # Get the queue metrics for the tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Array] :workflows A list of workflow IDs to filter by
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @return [Array<(TenantQueueMetrics, Integer, Hash)>] TenantQueueMetrics data, response status code and response headers
+ def tenant_get_queue_metrics_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.tenant_get_queue_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.tenant_get_queue_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.tenant_get_queue_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.tenant_get_queue_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/queue-metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'workflows'] = @api_client.build_collection_param(opts[:'workflows'], :multi) if !opts[:'workflows'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'TenantQueueMetrics'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.tenant_get_queue_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#tenant_get_queue_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete cron job workflow run
+ # Delete a cron job workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def workflow_cron_delete(tenant, cron_workflow, opts = {})
+ workflow_cron_delete_with_http_info(tenant, cron_workflow, opts)
+ nil
+ end
+
+ # Delete cron job workflow run
+ # Delete a cron job workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def workflow_cron_delete_with_http_info(tenant, cron_workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_cron_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_cron_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'cron_workflow' is set
+ if @api_client.config.client_side_validation && cron_workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'cron_workflow' when calling WorkflowApi.workflow_cron_delete"
+ end
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'cron-workflow' + '}', CGI.escape(cron_workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_cron_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_cron_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get cron job workflow run
+ # Get a cron job workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param [Hash] opts the optional parameters
+ # @return [CronWorkflows]
+ def workflow_cron_get(tenant, cron_workflow, opts = {})
+ data, _status_code, _headers = workflow_cron_get_with_http_info(tenant, cron_workflow, opts)
+ data
+ end
+
+ # Get cron job workflow run
+ # Get a cron job workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(CronWorkflows, Integer, Hash)>] CronWorkflows data, response status code and response headers
+ def workflow_cron_get_with_http_info(tenant, cron_workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_cron_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_cron_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'cron_workflow' is set
+ if @api_client.config.client_side_validation && cron_workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'cron_workflow' when calling WorkflowApi.workflow_cron_get"
+ end
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'cron-workflow' + '}', CGI.escape(cron_workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'CronWorkflows'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_cron_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_cron_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update cron job workflow run
+ # Update a cron workflow for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param update_cron_workflow_trigger_request [UpdateCronWorkflowTriggerRequest] The input for updates
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def workflow_cron_update(tenant, cron_workflow, update_cron_workflow_trigger_request, opts = {})
+ workflow_cron_update_with_http_info(tenant, cron_workflow, update_cron_workflow_trigger_request, opts)
+ nil
+ end
+
+ # Update cron job workflow run
+ # Update a cron workflow for a tenant
+ # @param tenant [String] The tenant id
+ # @param cron_workflow [String] The cron job id
+ # @param update_cron_workflow_trigger_request [UpdateCronWorkflowTriggerRequest] The input for updates
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def workflow_cron_update_with_http_info(tenant, cron_workflow, update_cron_workflow_trigger_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_cron_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_cron_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_cron_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'cron_workflow' is set
+ if @api_client.config.client_side_validation && cron_workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'cron_workflow' when calling WorkflowApi.workflow_cron_update"
+ end
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && cron_workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "cron_workflow" when calling WorkflowApi.workflow_cron_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_cron_workflow_trigger_request' is set
+ if @api_client.config.client_side_validation && update_cron_workflow_trigger_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_cron_workflow_trigger_request' when calling WorkflowApi.workflow_cron_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/crons/{cron-workflow}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'cron-workflow' + '}', CGI.escape(cron_workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_cron_workflow_trigger_request)
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_cron_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_cron_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete workflow
+ # Delete a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def workflow_delete(workflow, opts = {})
+ workflow_delete_with_http_info(workflow, opts)
+ nil
+ end
+
+ # Delete workflow
+ # Delete a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def workflow_delete_with_http_info(workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_delete ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_delete"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow
+ # Get a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Workflow]
+ def workflow_get(workflow, opts = {})
+ data, _status_code, _headers = workflow_get_with_http_info(workflow, opts)
+ data
+ end
+
+ # Get workflow
+ # Get a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Workflow, Integer, Hash)>] Workflow data, response status code and response headers
+ def workflow_get_with_http_info(workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_get ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_get"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Workflow'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow metrics
+ # Get the metrics for a workflow version
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @option opts [WorkflowRunStatus] :status A status of workflow run statuses to filter by
+ # @option opts [String] :group_key A group key to filter metrics by
+ # @return [WorkflowMetrics]
+ def workflow_get_metrics(workflow, opts = {})
+ data, _status_code, _headers = workflow_get_metrics_with_http_info(workflow, opts)
+ data
+ end
+
+ # Get workflow metrics
+ # Get the metrics for a workflow version
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @option opts [WorkflowRunStatus] :status A status of workflow run statuses to filter by
+ # @option opts [String] :group_key A group key to filter metrics by
+ # @return [Array<(WorkflowMetrics, Integer, Hash)>] WorkflowMetrics data, response status code and response headers
+ def workflow_get_metrics_with_http_info(workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_get_metrics ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_get_metrics"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}/metrics'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'status'] = opts[:'status'] if !opts[:'status'].nil?
+ query_params[:'groupKey'] = opts[:'group_key'] if !opts[:'group_key'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowMetrics'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_get_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_get_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow worker count
+ # Get a count of the workers available for workflow
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [WorkflowWorkersCount]
+ def workflow_get_workers_count(tenant, workflow, opts = {})
+ data, _status_code, _headers = workflow_get_workers_count_with_http_info(tenant, workflow, opts)
+ data
+ end
+
+ # Get workflow worker count
+ # Get a count of the workers available for workflow
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WorkflowWorkersCount, Integer, Hash)>] WorkflowWorkersCount data, response status code and response headers
+ def workflow_get_workers_count_with_http_info(tenant, workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_get_workers_count ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_get_workers_count"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_get_workers_count, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_get_workers_count, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_get_workers_count"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get_workers_count, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_get_workers_count, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/{workflow}/worker-count'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowWorkersCount'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_get_workers_count",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_get_workers_count\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflows
+ # Get all workflows for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip (default to 0)
+ # @option opts [Integer] :limit The number to limit by (default to 50)
+ # @option opts [String] :name Search by name
+ # @return [WorkflowList]
+ def workflow_list(tenant, opts = {})
+ data, _status_code, _headers = workflow_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get workflows
+ # Get all workflows for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip (default to 0)
+ # @option opts [Integer] :limit The number to limit by (default to 50)
+ # @option opts [String] :name Search by name
+ # @return [Array<(WorkflowList, Integer, Hash)>] WorkflowList data, response status code and response headers
+ def workflow_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'name'] = opts[:'name'] if !opts[:'name'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow run
+ # Get a workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [WorkflowRun]
+ def workflow_run_get(tenant, workflow_run, opts = {})
+ data, _status_code, _headers = workflow_run_get_with_http_info(tenant, workflow_run, opts)
+ data
+ end
+
+ # Get workflow run
+ # Get a workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WorkflowRun, Integer, Hash)>] WorkflowRun data, response status code and response headers
+ def workflow_run_get_with_http_info(tenant, workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_run_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_run_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_run' is set
+ if @api_client.config.client_side_validation && workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_run' when calling WorkflowApi.workflow_run_get"
+ end
+ if @api_client.config.client_side_validation && workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowApi.workflow_run_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowApi.workflow_run_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow-run' + '}', CGI.escape(workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowRun'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_run_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_run_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow runs metrics
+ # Get a summary of workflow run metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :event_id The event id to get runs for.
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Time] :created_after The time after the workflow run was created
+ # @option opts [Time] :created_before The time before the workflow run was created
+ # @return [WorkflowRunsMetrics]
+ def workflow_run_get_metrics(tenant, opts = {})
+ data, _status_code, _headers = workflow_run_get_metrics_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get workflow runs metrics
+ # Get a summary of workflow run metrics for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :event_id The event id to get runs for.
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Time] :created_after The time after the workflow run was created
+ # @option opts [Time] :created_before The time before the workflow run was created
+ # @return [Array<(WorkflowRunsMetrics, Integer, Hash)>] WorkflowRunsMetrics data, response status code and response headers
+ def workflow_run_get_metrics_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_run_get_metrics ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_run_get_metrics"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'event_id'].nil? && opts[:'event_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"event_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'event_id'].nil? && opts[:'event_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"event_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_run_get_metrics, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/runs/metrics'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'eventId'] = opts[:'event_id'] if !opts[:'event_id'].nil?
+ query_params[:'workflowId'] = opts[:'workflow_id'] if !opts[:'workflow_id'].nil?
+ query_params[:'parentWorkflowRunId'] = opts[:'parent_workflow_run_id'] if !opts[:'parent_workflow_run_id'].nil?
+ query_params[:'parentStepRunId'] = opts[:'parent_step_run_id'] if !opts[:'parent_step_run_id'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'createdAfter'] = opts[:'created_after'] if !opts[:'created_after'].nil?
+ query_params[:'createdBefore'] = opts[:'created_before'] if !opts[:'created_before'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowRunsMetrics'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_run_get_metrics",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_run_get_metrics\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow run
+ # Get a workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [WorkflowRunShape]
+ def workflow_run_get_shape(tenant, workflow_run, opts = {})
+ data, _status_code, _headers = workflow_run_get_shape_with_http_info(tenant, workflow_run, opts)
+ data
+ end
+
+ # Get workflow run
+ # Get a workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(WorkflowRunShape, Integer, Hash)>] WorkflowRunShape data, response status code and response headers
+ def workflow_run_get_shape_with_http_info(tenant, workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_run_get_shape ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_run_get_shape"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get_shape, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_get_shape, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_run' is set
+ if @api_client.config.client_side_validation && workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_run' when calling WorkflowApi.workflow_run_get_shape"
+ end
+ if @api_client.config.client_side_validation && workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowApi.workflow_run_get_shape, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowApi.workflow_run_get_shape, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/shape'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow-run' + '}', CGI.escape(workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowRunShape'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_run_get_shape",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_run_get_shape\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow runs
+ # Get all workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :event_id The event id to get runs for.
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :statuses A list of workflow run statuses to filter by
+ # @option opts [Array] :kinds A list of workflow kinds to filter by
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Time] :created_after The time after the workflow run was created
+ # @option opts [Time] :created_before The time before the workflow run was created
+ # @option opts [Time] :finished_after The time after the workflow run was finished
+ # @option opts [Time] :finished_before The time before the workflow run was finished
+ # @option opts [WorkflowRunOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @return [WorkflowRunList]
+ def workflow_run_list(tenant, opts = {})
+ data, _status_code, _headers = workflow_run_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get workflow runs
+ # Get all workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [String] :event_id The event id to get runs for.
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :statuses A list of workflow run statuses to filter by
+ # @option opts [Array] :kinds A list of workflow kinds to filter by
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Time] :created_after The time after the workflow run was created
+ # @option opts [Time] :created_before The time before the workflow run was created
+ # @option opts [Time] :finished_after The time after the workflow run was finished
+ # @option opts [Time] :finished_before The time before the workflow run was finished
+ # @option opts [WorkflowRunOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @return [Array<(WorkflowRunList, Integer, Hash)>] WorkflowRunList data, response status code and response headers
+ def workflow_run_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_run_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_run_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'event_id'].nil? && opts[:'event_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"event_id"]" when calling WorkflowApi.workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'event_id'].nil? && opts[:'event_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"event_id"]" when calling WorkflowApi.workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/runs'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'eventId'] = opts[:'event_id'] if !opts[:'event_id'].nil?
+ query_params[:'workflowId'] = opts[:'workflow_id'] if !opts[:'workflow_id'].nil?
+ query_params[:'parentWorkflowRunId'] = opts[:'parent_workflow_run_id'] if !opts[:'parent_workflow_run_id'].nil?
+ query_params[:'parentStepRunId'] = opts[:'parent_step_run_id'] if !opts[:'parent_step_run_id'].nil?
+ query_params[:'statuses'] = @api_client.build_collection_param(opts[:'statuses'], :multi) if !opts[:'statuses'].nil?
+ query_params[:'kinds'] = @api_client.build_collection_param(opts[:'kinds'], :multi) if !opts[:'kinds'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'createdAfter'] = opts[:'created_after'] if !opts[:'created_after'].nil?
+ query_params[:'createdBefore'] = opts[:'created_before'] if !opts[:'created_before'].nil?
+ query_params[:'finishedAfter'] = opts[:'finished_after'] if !opts[:'finished_after'].nil?
+ query_params[:'finishedBefore'] = opts[:'finished_before'] if !opts[:'finished_before'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowRunList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_run_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_run_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Bulk delete scheduled workflow runs
+ # Bulk delete scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflows_bulk_delete_request [ScheduledWorkflowsBulkDeleteRequest] The input to bulk delete scheduled workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [ScheduledWorkflowsBulkDeleteResponse]
+ def workflow_scheduled_bulk_delete(tenant, scheduled_workflows_bulk_delete_request, opts = {})
+ data, _status_code, _headers = workflow_scheduled_bulk_delete_with_http_info(tenant, scheduled_workflows_bulk_delete_request, opts)
+ data
+ end
+
+ # Bulk delete scheduled workflow runs
+ # Bulk delete scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflows_bulk_delete_request [ScheduledWorkflowsBulkDeleteRequest] The input to bulk delete scheduled workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ScheduledWorkflowsBulkDeleteResponse, Integer, Hash)>] ScheduledWorkflowsBulkDeleteResponse data, response status code and response headers
+ def workflow_scheduled_bulk_delete_with_http_info(tenant, scheduled_workflows_bulk_delete_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_bulk_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_bulk_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_bulk_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_bulk_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'scheduled_workflows_bulk_delete_request' is set
+ if @api_client.config.client_side_validation && scheduled_workflows_bulk_delete_request.nil?
+ fail ArgumentError, "Missing the required parameter 'scheduled_workflows_bulk_delete_request' when calling WorkflowApi.workflow_scheduled_bulk_delete"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled/bulk-delete'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(scheduled_workflows_bulk_delete_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflowsBulkDeleteResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_bulk_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_bulk_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Bulk update scheduled workflow runs
+ # Bulk update (reschedule) scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflows_bulk_update_request [ScheduledWorkflowsBulkUpdateRequest] The input to bulk update scheduled workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [ScheduledWorkflowsBulkUpdateResponse]
+ def workflow_scheduled_bulk_update(tenant, scheduled_workflows_bulk_update_request, opts = {})
+ data, _status_code, _headers = workflow_scheduled_bulk_update_with_http_info(tenant, scheduled_workflows_bulk_update_request, opts)
+ data
+ end
+
+ # Bulk update scheduled workflow runs
+ # Bulk update (reschedule) scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflows_bulk_update_request [ScheduledWorkflowsBulkUpdateRequest] The input to bulk update scheduled workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ScheduledWorkflowsBulkUpdateResponse, Integer, Hash)>] ScheduledWorkflowsBulkUpdateResponse data, response status code and response headers
+ def workflow_scheduled_bulk_update_with_http_info(tenant, scheduled_workflows_bulk_update_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_bulk_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_bulk_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_bulk_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_bulk_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'scheduled_workflows_bulk_update_request' is set
+ if @api_client.config.client_side_validation && scheduled_workflows_bulk_update_request.nil?
+ fail ArgumentError, "Missing the required parameter 'scheduled_workflows_bulk_update_request' when calling WorkflowApi.workflow_scheduled_bulk_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled/bulk-update'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(scheduled_workflows_bulk_update_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflowsBulkUpdateResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_bulk_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_bulk_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Delete scheduled workflow run
+ # Delete a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [nil]
+ def workflow_scheduled_delete(tenant, scheduled_workflow_run, opts = {})
+ workflow_scheduled_delete_with_http_info(tenant, scheduled_workflow_run, opts)
+ nil
+ end
+
+ # Delete scheduled workflow run
+ # Delete a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers
+ def workflow_scheduled_delete_with_http_info(tenant, scheduled_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_delete ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_delete"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'scheduled_workflow_run' is set
+ if @api_client.config.client_side_validation && scheduled_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'scheduled_workflow_run' when calling WorkflowApi.workflow_scheduled_delete"
+ end
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_delete, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_delete, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'scheduled-workflow-run' + '}', CGI.escape(scheduled_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type]
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_delete",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_delete\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get scheduled workflow run
+ # Get a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [ScheduledWorkflows]
+ def workflow_scheduled_get(tenant, scheduled_workflow_run, opts = {})
+ data, _status_code, _headers = workflow_scheduled_get_with_http_info(tenant, scheduled_workflow_run, opts)
+ data
+ end
+
+ # Get scheduled workflow run
+ # Get a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ScheduledWorkflows, Integer, Hash)>] ScheduledWorkflows data, response status code and response headers
+ def workflow_scheduled_get_with_http_info(tenant, scheduled_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_get ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_get"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_get, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'scheduled_workflow_run' is set
+ if @api_client.config.client_side_validation && scheduled_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'scheduled_workflow_run' when calling WorkflowApi.workflow_scheduled_get"
+ end
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'scheduled-workflow-run' + '}', CGI.escape(scheduled_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflows'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get scheduled workflow runs
+ # Get all scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [ScheduledWorkflowsOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Array] :statuses A list of scheduled run statuses to filter by
+ # @return [ScheduledWorkflowsList]
+ def workflow_scheduled_list(tenant, opts = {})
+ data, _status_code, _headers = workflow_scheduled_list_with_http_info(tenant, opts)
+ data
+ end
+
+ # Get scheduled workflow runs
+ # Get all scheduled workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [ScheduledWorkflowsOrderByField] :order_by_field The order by field
+ # @option opts [WorkflowRunOrderByDirection] :order_by_direction The order by direction
+ # @option opts [String] :workflow_id The workflow id to get runs for.
+ # @option opts [String] :parent_workflow_run_id The parent workflow run id
+ # @option opts [String] :parent_step_run_id The parent step run id
+ # @option opts [Array] :additional_metadata A list of metadata key value pairs to filter by
+ # @option opts [Array] :statuses A list of scheduled run statuses to filter by
+ # @return [Array<(ScheduledWorkflowsList, Integer, Hash)>] ScheduledWorkflowsList data, response status code and response headers
+ def workflow_scheduled_list_with_http_info(tenant, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'workflow_id'].nil? && opts[:'workflow_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"workflow_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_workflow_run_id'].nil? && opts[:'parent_workflow_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_workflow_run_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_step_run_id'].nil? && opts[:'parent_step_run_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_step_run_id"]" when calling WorkflowApi.workflow_scheduled_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'orderByField'] = opts[:'order_by_field'] if !opts[:'order_by_field'].nil?
+ query_params[:'orderByDirection'] = opts[:'order_by_direction'] if !opts[:'order_by_direction'].nil?
+ query_params[:'workflowId'] = opts[:'workflow_id'] if !opts[:'workflow_id'].nil?
+ query_params[:'parentWorkflowRunId'] = opts[:'parent_workflow_run_id'] if !opts[:'parent_workflow_run_id'].nil?
+ query_params[:'parentStepRunId'] = opts[:'parent_step_run_id'] if !opts[:'parent_step_run_id'].nil?
+ query_params[:'additionalMetadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'statuses'] = @api_client.build_collection_param(opts[:'statuses'], :multi) if !opts[:'statuses'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflowsList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update scheduled workflow run
+ # Update (reschedule) a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param update_scheduled_workflow_run_request [UpdateScheduledWorkflowRunRequest] The input to reschedule the workflow run
+ # @param [Hash] opts the optional parameters
+ # @return [ScheduledWorkflows]
+ def workflow_scheduled_update(tenant, scheduled_workflow_run, update_scheduled_workflow_run_request, opts = {})
+ data, _status_code, _headers = workflow_scheduled_update_with_http_info(tenant, scheduled_workflow_run, update_scheduled_workflow_run_request, opts)
+ data
+ end
+
+ # Update scheduled workflow run
+ # Update (reschedule) a scheduled workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param scheduled_workflow_run [String] The scheduled workflow id
+ # @param update_scheduled_workflow_run_request [UpdateScheduledWorkflowRunRequest] The input to reschedule the workflow run
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ScheduledWorkflows, Integer, Hash)>] ScheduledWorkflows data, response status code and response headers
+ def workflow_scheduled_update_with_http_info(tenant, scheduled_workflow_run, update_scheduled_workflow_run_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_scheduled_update ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowApi.workflow_scheduled_update"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowApi.workflow_scheduled_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'scheduled_workflow_run' is set
+ if @api_client.config.client_side_validation && scheduled_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'scheduled_workflow_run' when calling WorkflowApi.workflow_scheduled_update"
+ end
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && scheduled_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "scheduled_workflow_run" when calling WorkflowApi.workflow_scheduled_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'update_scheduled_workflow_run_request' is set
+ if @api_client.config.client_side_validation && update_scheduled_workflow_run_request.nil?
+ fail ArgumentError, "Missing the required parameter 'update_scheduled_workflow_run_request' when calling WorkflowApi.workflow_scheduled_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/scheduled/{scheduled-workflow-run}'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'scheduled-workflow-run' + '}', CGI.escape(scheduled_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(update_scheduled_workflow_run_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflows'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_scheduled_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_scheduled_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Update workflow
+ # Update a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param workflow_update_request [WorkflowUpdateRequest] The input to update the workflow
+ # @param [Hash] opts the optional parameters
+ # @return [Workflow]
+ def workflow_update(workflow, workflow_update_request, opts = {})
+ data, _status_code, _headers = workflow_update_with_http_info(workflow, workflow_update_request, opts)
+ data
+ end
+
+ # Update workflow
+ # Update a workflow for a tenant
+ # @param workflow [String] The workflow id
+ # @param workflow_update_request [WorkflowUpdateRequest] The input to update the workflow
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Workflow, Integer, Hash)>] Workflow data, response status code and response headers
+ def workflow_update_with_http_info(workflow, workflow_update_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_update ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_update"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_update, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_update, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_update_request' is set
+ if @api_client.config.client_side_validation && workflow_update_request.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_update_request' when calling WorkflowApi.workflow_update"
+ end
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(workflow_update_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Workflow'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_update",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:PATCH, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_update\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow version
+ # Get a workflow version for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :version The workflow version. If not supplied, the latest version is fetched.
+ # @return [WorkflowVersion]
+ def workflow_version_get(workflow, opts = {})
+ data, _status_code, _headers = workflow_version_get_with_http_info(workflow, opts)
+ data
+ end
+
+ # Get workflow version
+ # Get a workflow version for a tenant
+ # @param workflow [String] The workflow id
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :version The workflow version. If not supplied, the latest version is fetched.
+ # @return [Array<(WorkflowVersion, Integer, Hash)>] WorkflowVersion data, response status code and response headers
+ def workflow_version_get_with_http_info(workflow, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowApi.workflow_version_get ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowApi.workflow_version_get"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_version_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowApi.workflow_version_get, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'version'].nil? && opts[:'version'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"version"]" when calling WorkflowApi.workflow_version_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'version'].nil? && opts[:'version'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"version"]" when calling WorkflowApi.workflow_version_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}/versions'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'version'] = opts[:'version'] if !opts[:'version'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowVersion'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowApi.workflow_version_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowApi#workflow_version_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_run_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_run_api.rb
new file mode 100644
index 000000000..d6a19b437
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_run_api.rb
@@ -0,0 +1,540 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class WorkflowRunApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create cron job workflow trigger
+ # Create a new cron job workflow trigger for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow name
+ # @param create_cron_workflow_trigger_request [CreateCronWorkflowTriggerRequest] The input to the cron job workflow trigger
+ # @param [Hash] opts the optional parameters
+ # @return [CronWorkflows]
+ def cron_workflow_trigger_create(tenant, workflow, create_cron_workflow_trigger_request, opts = {})
+ data, _status_code, _headers = cron_workflow_trigger_create_with_http_info(tenant, workflow, create_cron_workflow_trigger_request, opts)
+ data
+ end
+
+ # Create cron job workflow trigger
+ # Create a new cron job workflow trigger for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow name
+ # @param create_cron_workflow_trigger_request [CreateCronWorkflowTriggerRequest] The input to the cron job workflow trigger
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(CronWorkflows, Integer, Hash)>] CronWorkflows data, response status code and response headers
+ def cron_workflow_trigger_create_with_http_info(tenant, workflow, create_cron_workflow_trigger_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.cron_workflow_trigger_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunApi.cron_workflow_trigger_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.cron_workflow_trigger_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.cron_workflow_trigger_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowRunApi.cron_workflow_trigger_create"
+ end
+ # verify the required parameter 'create_cron_workflow_trigger_request' is set
+ if @api_client.config.client_side_validation && create_cron_workflow_trigger_request.nil?
+ fail ArgumentError, "Missing the required parameter 'create_cron_workflow_trigger_request' when calling WorkflowRunApi.cron_workflow_trigger_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/{workflow}/crons'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(create_cron_workflow_trigger_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'CronWorkflows'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.cron_workflow_trigger_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#cron_workflow_trigger_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Trigger workflow run
+ # Schedule a new workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow name
+ # @param schedule_workflow_run_request [ScheduleWorkflowRunRequest] The input to the scheduled workflow run
+ # @param [Hash] opts the optional parameters
+ # @return [ScheduledWorkflows]
+ def scheduled_workflow_run_create(tenant, workflow, schedule_workflow_run_request, opts = {})
+ data, _status_code, _headers = scheduled_workflow_run_create_with_http_info(tenant, workflow, schedule_workflow_run_request, opts)
+ data
+ end
+
+ # Trigger workflow run
+ # Schedule a new workflow run for a tenant
+ # @param tenant [String] The tenant id
+ # @param workflow [String] The workflow name
+ # @param schedule_workflow_run_request [ScheduleWorkflowRunRequest] The input to the scheduled workflow run
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ScheduledWorkflows, Integer, Hash)>] ScheduledWorkflows data, response status code and response headers
+ def scheduled_workflow_run_create_with_http_info(tenant, workflow, schedule_workflow_run_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.scheduled_workflow_run_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunApi.scheduled_workflow_run_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.scheduled_workflow_run_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.scheduled_workflow_run_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowRunApi.scheduled_workflow_run_create"
+ end
+ # verify the required parameter 'schedule_workflow_run_request' is set
+ if @api_client.config.client_side_validation && schedule_workflow_run_request.nil?
+ fail ArgumentError, "Missing the required parameter 'schedule_workflow_run_request' when calling WorkflowRunApi.scheduled_workflow_run_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/{workflow}/scheduled'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(schedule_workflow_run_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ScheduledWorkflows'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.scheduled_workflow_run_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#scheduled_workflow_run_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Cancel workflow runs
+ # Cancel a batch of workflow runs
+ # @param tenant [String] The tenant id
+ # @param workflow_runs_cancel_request [WorkflowRunsCancelRequest] The input to cancel the workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [EventUpdateCancel200Response]
+ def workflow_run_cancel(tenant, workflow_runs_cancel_request, opts = {})
+ data, _status_code, _headers = workflow_run_cancel_with_http_info(tenant, workflow_runs_cancel_request, opts)
+ data
+ end
+
+ # Cancel workflow runs
+ # Cancel a batch of workflow runs
+ # @param tenant [String] The tenant id
+ # @param workflow_runs_cancel_request [WorkflowRunsCancelRequest] The input to cancel the workflow runs
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(EventUpdateCancel200Response, Integer, Hash)>] EventUpdateCancel200Response data, response status code and response headers
+ def workflow_run_cancel_with_http_info(tenant, workflow_runs_cancel_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.workflow_run_cancel ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunApi.workflow_run_cancel"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_cancel, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_cancel, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_runs_cancel_request' is set
+ if @api_client.config.client_side_validation && workflow_runs_cancel_request.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_runs_cancel_request' when calling WorkflowRunApi.workflow_run_cancel"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflows/cancel'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(workflow_runs_cancel_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'EventUpdateCancel200Response'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.workflow_run_cancel",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#workflow_run_cancel\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Trigger workflow run
+ # Trigger a new workflow run for a tenant
+ # @param workflow [String] The workflow id
+ # @param trigger_workflow_run_request [TriggerWorkflowRunRequest] The input to the workflow run
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :version The workflow version. If not supplied, the latest version is fetched.
+ # @return [WorkflowRun]
+ def workflow_run_create(workflow, trigger_workflow_run_request, opts = {})
+ data, _status_code, _headers = workflow_run_create_with_http_info(workflow, trigger_workflow_run_request, opts)
+ data
+ end
+
+ # Trigger workflow run
+ # Trigger a new workflow run for a tenant
+ # @param workflow [String] The workflow id
+ # @param trigger_workflow_run_request [TriggerWorkflowRunRequest] The input to the workflow run
+ # @param [Hash] opts the optional parameters
+ # @option opts [String] :version The workflow version. If not supplied, the latest version is fetched.
+ # @return [Array<(WorkflowRun, Integer, Hash)>] WorkflowRun data, response status code and response headers
+ def workflow_run_create_with_http_info(workflow, trigger_workflow_run_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.workflow_run_create ...'
+ end
+ # verify the required parameter 'workflow' is set
+ if @api_client.config.client_side_validation && workflow.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow' when calling WorkflowRunApi.workflow_run_create"
+ end
+ if @api_client.config.client_side_validation && workflow.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowRunApi.workflow_run_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow" when calling WorkflowRunApi.workflow_run_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'trigger_workflow_run_request' is set
+ if @api_client.config.client_side_validation && trigger_workflow_run_request.nil?
+ fail ArgumentError, "Missing the required parameter 'trigger_workflow_run_request' when calling WorkflowRunApi.workflow_run_create"
+ end
+ if @api_client.config.client_side_validation && !opts[:'version'].nil? && opts[:'version'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"version"]" when calling WorkflowRunApi.workflow_run_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'version'].nil? && opts[:'version'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"version"]" when calling WorkflowRunApi.workflow_run_create, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/workflows/{workflow}/trigger'.sub('{' + 'workflow' + '}', CGI.escape(workflow.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'version'] = opts[:'version'] if !opts[:'version'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(trigger_workflow_run_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'WorkflowRun'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.workflow_run_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#workflow_run_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow run input
+ # Get the input for a workflow run.
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [Hash]
+ def workflow_run_get_input(tenant, workflow_run, opts = {})
+ data, _status_code, _headers = workflow_run_get_input_with_http_info(tenant, workflow_run, opts)
+ data
+ end
+
+ # Get workflow run input
+ # Get the input for a workflow run.
+ # @param tenant [String] The tenant id
+ # @param workflow_run [String] The workflow run id
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(Hash, Integer, Hash)>] Hash data, response status code and response headers
+ def workflow_run_get_input_with_http_info(tenant, workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.workflow_run_get_input ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunApi.workflow_run_get_input"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_get_input, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_get_input, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'workflow_run' is set
+ if @api_client.config.client_side_validation && workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'workflow_run' when calling WorkflowRunApi.workflow_run_get_input"
+ end
+ if @api_client.config.client_side_validation && workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowRunApi.workflow_run_get_input, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "workflow_run" when calling WorkflowRunApi.workflow_run_get_input, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflow-runs/{workflow-run}/input'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s)).sub('{' + 'workflow-run' + '}', CGI.escape(workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Hash'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.workflow_run_get_input",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#workflow_run_get_input\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Replay workflow runs
+ # Replays a list of workflow runs.
+ # @param tenant [String] The tenant id
+ # @param replay_workflow_runs_request [ReplayWorkflowRunsRequest] The workflow run ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [ReplayWorkflowRunsResponse]
+ def workflow_run_update_replay(tenant, replay_workflow_runs_request, opts = {})
+ data, _status_code, _headers = workflow_run_update_replay_with_http_info(tenant, replay_workflow_runs_request, opts)
+ data
+ end
+
+ # Replay workflow runs
+ # Replays a list of workflow runs.
+ # @param tenant [String] The tenant id
+ # @param replay_workflow_runs_request [ReplayWorkflowRunsRequest] The workflow run ids to replay
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(ReplayWorkflowRunsResponse, Integer, Hash)>] ReplayWorkflowRunsResponse data, response status code and response headers
+ def workflow_run_update_replay_with_http_info(tenant, replay_workflow_runs_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunApi.workflow_run_update_replay ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunApi.workflow_run_update_replay"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_update_replay, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunApi.workflow_run_update_replay, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'replay_workflow_runs_request' is set
+ if @api_client.config.client_side_validation && replay_workflow_runs_request.nil?
+ fail ArgumentError, "Missing the required parameter 'replay_workflow_runs_request' when calling WorkflowRunApi.workflow_run_update_replay"
+ end
+ # resource path
+ local_var_path = '/api/v1/tenants/{tenant}/workflow-runs/replay'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(replay_workflow_runs_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'ReplayWorkflowRunsResponse'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunApi.workflow_run_update_replay",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunApi#workflow_run_update_replay\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_runs_api.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_runs_api.rb
new file mode 100644
index 000000000..de76c60f7
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api/workflow_runs_api.rb
@@ -0,0 +1,704 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'cgi'
+
+module HatchetSdkRest
+ class WorkflowRunsApi
+ attr_accessor :api_client
+
+ def initialize(api_client = ApiClient.default)
+ @api_client = api_client
+ end
+ # Create workflow run
+ # Trigger a new workflow run
+ # @param tenant [String] The tenant id
+ # @param v1_trigger_workflow_run_request [V1TriggerWorkflowRunRequest] The workflow run to create
+ # @param [Hash] opts the optional parameters
+ # @return [V1WorkflowRunDetails]
+ def v1_workflow_run_create(tenant, v1_trigger_workflow_run_request, opts = {})
+ data, _status_code, _headers = v1_workflow_run_create_with_http_info(tenant, v1_trigger_workflow_run_request, opts)
+ data
+ end
+
+ # Create workflow run
+ # Trigger a new workflow run
+ # @param tenant [String] The tenant id
+ # @param v1_trigger_workflow_run_request [V1TriggerWorkflowRunRequest] The workflow run to create
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1WorkflowRunDetails, Integer, Hash)>] V1WorkflowRunDetails data, response status code and response headers
+ def v1_workflow_run_create_with_http_info(tenant, v1_trigger_workflow_run_request, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_create ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunsApi.v1_workflow_run_create"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_create, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_create, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'v1_trigger_workflow_run_request' is set
+ if @api_client.config.client_side_validation && v1_trigger_workflow_run_request.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_trigger_workflow_run_request' when calling WorkflowRunsApi.v1_workflow_run_create"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/workflow-runs/trigger'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+ # HTTP header 'Content-Type'
+ content_type = @api_client.select_header_content_type(['application/json'])
+ if !content_type.nil?
+ header_params['Content-Type'] = content_type
+ end
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body] || @api_client.object_to_http_body(v1_trigger_workflow_run_request)
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1WorkflowRunDetails'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_create",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_create\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List workflow runs
+ # Lists displayable names of workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param external_ids [Array] The external ids of the workflow runs to get display names for
+ # @param [Hash] opts the optional parameters
+ # @return [V1WorkflowRunDisplayNameList]
+ def v1_workflow_run_display_names_list(tenant, external_ids, opts = {})
+ data, _status_code, _headers = v1_workflow_run_display_names_list_with_http_info(tenant, external_ids, opts)
+ data
+ end
+
+ # List workflow runs
+ # Lists displayable names of workflow runs for a tenant
+ # @param tenant [String] The tenant id
+ # @param external_ids [Array] The external ids of the workflow runs to get display names for
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1WorkflowRunDisplayNameList, Integer, Hash)>] V1WorkflowRunDisplayNameList data, response status code and response headers
+ def v1_workflow_run_display_names_list_with_http_info(tenant, external_ids, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_display_names_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunsApi.v1_workflow_run_display_names_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_display_names_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_display_names_list, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'external_ids' is set
+ if @api_client.config.client_side_validation && external_ids.nil?
+ fail ArgumentError, "Missing the required parameter 'external_ids' when calling WorkflowRunsApi.v1_workflow_run_display_names_list"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/workflow-runs/display-names'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'external_ids'] = @api_client.build_collection_param(external_ids, :multi)
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1WorkflowRunDisplayNameList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_display_names_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_display_names_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List workflow run external ids
+ # Lists external ids for workflow runs matching filters
+ # @param tenant [String] The tenant id
+ # @param since [Time] The earliest date to filter by
+ # @param [Hash] opts the optional parameters
+ # @option opts [Array] :statuses A list of statuses to filter by
+ # @option opts [Time] :_until The latest date to filter by
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @option opts [Array] :workflow_ids The workflow ids to find runs for
+ # @return [Array]
+ def v1_workflow_run_external_ids_list(tenant, since, opts = {})
+ data, _status_code, _headers = v1_workflow_run_external_ids_list_with_http_info(tenant, since, opts)
+ data
+ end
+
+ # List workflow run external ids
+ # Lists external ids for workflow runs matching filters
+ # @param tenant [String] The tenant id
+ # @param since [Time] The earliest date to filter by
+ # @param [Hash] opts the optional parameters
+ # @option opts [Array] :statuses A list of statuses to filter by
+ # @option opts [Time] :_until The latest date to filter by
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @option opts [Array] :workflow_ids The workflow ids to find runs for
+ # @return [Array<(Array, Integer, Hash)>] Array data, response status code and response headers
+ def v1_workflow_run_external_ids_list_with_http_info(tenant, since, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_external_ids_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunsApi.v1_workflow_run_external_ids_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_external_ids_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_external_ids_list, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'since' is set
+ if @api_client.config.client_side_validation && since.nil?
+ fail ArgumentError, "Missing the required parameter 'since' when calling WorkflowRunsApi.v1_workflow_run_external_ids_list"
+ end
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/workflow-runs/external-ids'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'since'] = since
+ query_params[:'statuses'] = @api_client.build_collection_param(opts[:'statuses'], :multi) if !opts[:'statuses'].nil?
+ query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?
+ query_params[:'additional_metadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'workflow_ids'] = @api_client.build_collection_param(opts[:'workflow_ids'], :multi) if !opts[:'workflow_ids'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'Array'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_external_ids_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_external_ids_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tasks
+ # Get a workflow run and its metadata to display on the \"detail\" page
+ # @param v1_workflow_run [String] The workflow run id to get
+ # @param [Hash] opts the optional parameters
+ # @return [V1WorkflowRunDetails]
+ def v1_workflow_run_get(v1_workflow_run, opts = {})
+ data, _status_code, _headers = v1_workflow_run_get_with_http_info(v1_workflow_run, opts)
+ data
+ end
+
+ # List tasks
+ # Get a workflow run and its metadata to display on the \"detail\" page
+ # @param v1_workflow_run [String] The workflow run id to get
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1WorkflowRunDetails, Integer, Hash)>] V1WorkflowRunDetails data, response status code and response headers
+ def v1_workflow_run_get_with_http_info(v1_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_get ...'
+ end
+ # verify the required parameter 'v1_workflow_run' is set
+ if @api_client.config.client_side_validation && v1_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_workflow_run' when calling WorkflowRunsApi.v1_workflow_run_get"
+ end
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/workflow-runs/{v1-workflow-run}'.sub('{' + 'v1-workflow-run' + '}', CGI.escape(v1_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1WorkflowRunDetails'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_get",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_get\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # Get workflow run status
+ # Get the status of a workflow run.
+ # @param v1_workflow_run [String] The workflow run id to get the status for
+ # @param [Hash] opts the optional parameters
+ # @return [V1TaskStatus]
+ def v1_workflow_run_get_status(v1_workflow_run, opts = {})
+ data, _status_code, _headers = v1_workflow_run_get_status_with_http_info(v1_workflow_run, opts)
+ data
+ end
+
+ # Get workflow run status
+ # Get the status of a workflow run.
+ # @param v1_workflow_run [String] The workflow run id to get the status for
+ # @param [Hash] opts the optional parameters
+ # @return [Array<(V1TaskStatus, Integer, Hash)>] V1TaskStatus data, response status code and response headers
+ def v1_workflow_run_get_status_with_http_info(v1_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_get_status ...'
+ end
+ # verify the required parameter 'v1_workflow_run' is set
+ if @api_client.config.client_side_validation && v1_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_workflow_run' when calling WorkflowRunsApi.v1_workflow_run_get_status"
+ end
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get_status, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get_status, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/workflow-runs/{v1-workflow-run}/status'.sub('{' + 'v1-workflow-run' + '}', CGI.escape(v1_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskStatus'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_get_status",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_get_status\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List timings for a workflow run
+ # Get the timings for a workflow run
+ # @param v1_workflow_run [String] The workflow run id to get
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :depth The depth to retrieve children
+ # @return [V1TaskTimingList]
+ def v1_workflow_run_get_timings(v1_workflow_run, opts = {})
+ data, _status_code, _headers = v1_workflow_run_get_timings_with_http_info(v1_workflow_run, opts)
+ data
+ end
+
+ # List timings for a workflow run
+ # Get the timings for a workflow run
+ # @param v1_workflow_run [String] The workflow run id to get
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :depth The depth to retrieve children
+ # @return [Array<(V1TaskTimingList, Integer, Hash)>] V1TaskTimingList data, response status code and response headers
+ def v1_workflow_run_get_timings_with_http_info(v1_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_get_timings ...'
+ end
+ # verify the required parameter 'v1_workflow_run' is set
+ if @api_client.config.client_side_validation && v1_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_workflow_run' when calling WorkflowRunsApi.v1_workflow_run_get_timings"
+ end
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get_timings, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_get_timings, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/workflow-runs/{v1-workflow-run}/task-timings'.sub('{' + 'v1-workflow-run' + '}', CGI.escape(v1_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'depth'] = opts[:'depth'] if !opts[:'depth'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskTimingList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_get_timings",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_get_timings\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List workflow runs
+ # Lists workflow runs for a tenant.
+ # @param tenant [String] The tenant id
+ # @param since [Time] The earliest date to filter by
+ # @param only_tasks [Boolean] Whether to include DAGs or only to include tasks
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :statuses A list of statuses to filter by
+ # @option opts [Time] :_until The latest date to filter by
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @option opts [Array] :workflow_ids The workflow ids to find runs for
+ # @option opts [String] :worker_id The worker id to filter by
+ # @option opts [String] :parent_task_external_id The parent task external id to filter by
+ # @option opts [String] :triggering_event_external_id The external id of the event that triggered the workflow run
+ # @option opts [Boolean] :include_payloads A flag for whether or not to include the input and output payloads in the response. Defaults to `true` if unset.
+ # @return [V1TaskSummaryList]
+ def v1_workflow_run_list(tenant, since, only_tasks, opts = {})
+ data, _status_code, _headers = v1_workflow_run_list_with_http_info(tenant, since, only_tasks, opts)
+ data
+ end
+
+ # List workflow runs
+ # Lists workflow runs for a tenant.
+ # @param tenant [String] The tenant id
+ # @param since [Time] The earliest date to filter by
+ # @param only_tasks [Boolean] Whether to include DAGs or only to include tasks
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @option opts [Array] :statuses A list of statuses to filter by
+ # @option opts [Time] :_until The latest date to filter by
+ # @option opts [Array] :additional_metadata Additional metadata k-v pairs to filter by
+ # @option opts [Array] :workflow_ids The workflow ids to find runs for
+ # @option opts [String] :worker_id The worker id to filter by
+ # @option opts [String] :parent_task_external_id The parent task external id to filter by
+ # @option opts [String] :triggering_event_external_id The external id of the event that triggered the workflow run
+ # @option opts [Boolean] :include_payloads A flag for whether or not to include the input and output payloads in the response. Defaults to `true` if unset.
+ # @return [Array<(V1TaskSummaryList, Integer, Hash)>] V1TaskSummaryList data, response status code and response headers
+ def v1_workflow_run_list_with_http_info(tenant, since, only_tasks, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_list ...'
+ end
+ # verify the required parameter 'tenant' is set
+ if @api_client.config.client_side_validation && tenant.nil?
+ fail ArgumentError, "Missing the required parameter 'tenant' when calling WorkflowRunsApi.v1_workflow_run_list"
+ end
+ if @api_client.config.client_side_validation && tenant.to_s.length > 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && tenant.to_s.length < 36
+ fail ArgumentError, 'invalid value for "tenant" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ # verify the required parameter 'since' is set
+ if @api_client.config.client_side_validation && since.nil?
+ fail ArgumentError, "Missing the required parameter 'since' when calling WorkflowRunsApi.v1_workflow_run_list"
+ end
+ # verify the required parameter 'only_tasks' is set
+ if @api_client.config.client_side_validation && only_tasks.nil?
+ fail ArgumentError, "Missing the required parameter 'only_tasks' when calling WorkflowRunsApi.v1_workflow_run_list"
+ end
+ if @api_client.config.client_side_validation && !opts[:'worker_id'].nil? && opts[:'worker_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"worker_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'worker_id'].nil? && opts[:'worker_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"worker_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_task_external_id'].nil? && opts[:'parent_task_external_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_task_external_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'parent_task_external_id'].nil? && opts[:'parent_task_external_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"parent_task_external_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'triggering_event_external_id'].nil? && opts[:'triggering_event_external_id'].to_s.length > 36
+ fail ArgumentError, 'invalid value for "opts[:"triggering_event_external_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && !opts[:'triggering_event_external_id'].nil? && opts[:'triggering_event_external_id'].to_s.length < 36
+ fail ArgumentError, 'invalid value for "opts[:"triggering_event_external_id"]" when calling WorkflowRunsApi.v1_workflow_run_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/tenants/{tenant}/workflow-runs'.sub('{' + 'tenant' + '}', CGI.escape(tenant.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'since'] = since
+ query_params[:'only_tasks'] = only_tasks
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+ query_params[:'statuses'] = @api_client.build_collection_param(opts[:'statuses'], :multi) if !opts[:'statuses'].nil?
+ query_params[:'until'] = opts[:'_until'] if !opts[:'_until'].nil?
+ query_params[:'additional_metadata'] = @api_client.build_collection_param(opts[:'additional_metadata'], :multi) if !opts[:'additional_metadata'].nil?
+ query_params[:'workflow_ids'] = @api_client.build_collection_param(opts[:'workflow_ids'], :multi) if !opts[:'workflow_ids'].nil?
+ query_params[:'worker_id'] = opts[:'worker_id'] if !opts[:'worker_id'].nil?
+ query_params[:'parent_task_external_id'] = opts[:'parent_task_external_id'] if !opts[:'parent_task_external_id'].nil?
+ query_params[:'triggering_event_external_id'] = opts[:'triggering_event_external_id'] if !opts[:'triggering_event_external_id'].nil?
+ query_params[:'include_payloads'] = opts[:'include_payloads'] if !opts[:'include_payloads'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskSummaryList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+
+ # List tasks
+ # List all tasks for a workflow run
+ # @param v1_workflow_run [String] The workflow run id to find runs for
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [V1TaskEventList]
+ def v1_workflow_run_task_events_list(v1_workflow_run, opts = {})
+ data, _status_code, _headers = v1_workflow_run_task_events_list_with_http_info(v1_workflow_run, opts)
+ data
+ end
+
+ # List tasks
+ # List all tasks for a workflow run
+ # @param v1_workflow_run [String] The workflow run id to find runs for
+ # @param [Hash] opts the optional parameters
+ # @option opts [Integer] :offset The number to skip
+ # @option opts [Integer] :limit The number to limit by
+ # @return [Array<(V1TaskEventList, Integer, Hash)>] V1TaskEventList data, response status code and response headers
+ def v1_workflow_run_task_events_list_with_http_info(v1_workflow_run, opts = {})
+ if @api_client.config.debugging
+ @api_client.config.logger.debug 'Calling API: WorkflowRunsApi.v1_workflow_run_task_events_list ...'
+ end
+ # verify the required parameter 'v1_workflow_run' is set
+ if @api_client.config.client_side_validation && v1_workflow_run.nil?
+ fail ArgumentError, "Missing the required parameter 'v1_workflow_run' when calling WorkflowRunsApi.v1_workflow_run_task_events_list"
+ end
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length > 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_task_events_list, the character length must be smaller than or equal to 36.'
+ end
+
+ if @api_client.config.client_side_validation && v1_workflow_run.to_s.length < 36
+ fail ArgumentError, 'invalid value for "v1_workflow_run" when calling WorkflowRunsApi.v1_workflow_run_task_events_list, the character length must be greater than or equal to 36.'
+ end
+
+ # resource path
+ local_var_path = '/api/v1/stable/workflow-runs/{v1-workflow-run}/task-events'.sub('{' + 'v1-workflow-run' + '}', CGI.escape(v1_workflow_run.to_s))
+
+ # query parameters
+ query_params = opts[:query_params] || {}
+ query_params[:'offset'] = opts[:'offset'] if !opts[:'offset'].nil?
+ query_params[:'limit'] = opts[:'limit'] if !opts[:'limit'].nil?
+
+ # header parameters
+ header_params = opts[:header_params] || {}
+ # HTTP header 'Accept' (if needed)
+ header_params['Accept'] = @api_client.select_header_accept(['application/json']) unless header_params['Accept']
+
+ # form parameters
+ form_params = opts[:form_params] || {}
+
+ # http body (model)
+ post_body = opts[:debug_body]
+
+ # return_type
+ return_type = opts[:debug_return_type] || 'V1TaskEventList'
+
+ # auth_names
+ auth_names = opts[:debug_auth_names] || ['cookieAuth', 'bearerAuth']
+
+ new_options = opts.merge(
+ :operation => :"WorkflowRunsApi.v1_workflow_run_task_events_list",
+ :header_params => header_params,
+ :query_params => query_params,
+ :form_params => form_params,
+ :body => post_body,
+ :auth_names => auth_names,
+ :return_type => return_type
+ )
+
+ data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options)
+ if @api_client.config.debugging
+ @api_client.config.logger.debug "API called: WorkflowRunsApi#v1_workflow_run_task_events_list\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}"
+ end
+ return data, status_code, headers
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_client.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_client.rb
new file mode 100644
index 000000000..62db0c4ec
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_client.rb
@@ -0,0 +1,439 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'json'
+require 'logger'
+require 'tempfile'
+require 'time'
+require 'faraday'
+require 'faraday/multipart' if Gem::Version.new(Faraday::VERSION) >= Gem::Version.new('2.0')
+require 'marcel'
+
+
+module HatchetSdkRest
+ class ApiClient
+ # The Configuration object holding settings to be used in the API client.
+ attr_accessor :config
+
+ # Defines the headers to be used in HTTP requests of all API calls by default.
+ #
+ # @return [Hash]
+ attr_accessor :default_headers
+
+ # Initializes the ApiClient
+ # @option config [Configuration] Configuration for initializing the object, default to Configuration.default
+ def initialize(config = Configuration.default)
+ @config = config
+ @user_agent = "OpenAPI-Generator/#{VERSION}/ruby"
+ @default_headers = {
+ 'Content-Type' => 'application/json',
+ 'User-Agent' => @user_agent
+ }
+ end
+
+ def self.default
+ @@default ||= ApiClient.new
+ end
+
+ # Call an API with given options.
+ #
+ # @return [Array<(Object, Integer, Hash)>] an array of 3 elements:
+ # the data deserialized from response body (could be nil), response status code and response headers.
+ def call_api(http_method, path, opts = {})
+ stream = nil
+ begin
+ response = connection(opts).public_send(http_method.to_sym.downcase) do |req|
+ request = build_request(http_method, path, req, opts)
+ stream = download_file(request) if opts[:return_type] == 'File' || opts[:return_type] == 'Binary'
+ end
+
+ if config.debugging
+ config.logger.debug "HTTP response body ~BEGIN~\n#{response.body}\n~END~\n"
+ end
+
+ unless response.success?
+ if response.status == 0 && response.respond_to?(:return_message)
+ # Errors from libcurl will be made visible here
+ fail ApiError.new(code: 0,
+ message: response.return_message)
+ else
+ fail ApiError.new(code: response.status,
+ response_headers: response.headers,
+ response_body: response.body),
+ response.reason_phrase
+ end
+ end
+ rescue Faraday::TimeoutError
+ fail ApiError.new('Connection timed out')
+ rescue Faraday::ConnectionFailed
+ fail ApiError.new('Connection failed')
+ end
+
+ if opts[:return_type] == 'File' || opts[:return_type] == 'Binary'
+ data = deserialize_file(response, stream)
+ elsif opts[:return_type]
+ data = deserialize(response, opts[:return_type])
+ else
+ data = nil
+ end
+ return data, response.status, response.headers
+ end
+
+ # Builds the HTTP request
+ #
+ # @param [String] http_method HTTP method/verb (e.g. POST)
+ # @param [String] path URL path (e.g. /account/new)
+ # @option opts [Hash] :header_params Header parameters
+ # @option opts [Hash] :query_params Query parameters
+ # @option opts [Hash] :form_params Query parameters
+ # @option opts [Object] :body HTTP body (JSON/XML)
+ # @return [Faraday::Request] A Faraday Request
+ def build_request(http_method, path, request, opts = {})
+ url = build_request_url(path, opts)
+ http_method = http_method.to_sym.downcase
+
+ header_params = @default_headers.merge(opts[:header_params] || {})
+ query_params = opts[:query_params] || {}
+ form_params = opts[:form_params] || {}
+
+ update_params_for_auth! header_params, query_params, opts[:auth_names]
+
+ if [:post, :patch, :put, :delete].include?(http_method)
+ req_body = build_request_body(header_params, form_params, opts[:body])
+ if config.debugging
+ config.logger.debug "HTTP request body param ~BEGIN~\n#{req_body}\n~END~\n"
+ end
+ end
+ request.headers = header_params
+ request.body = req_body
+
+ # Overload default options only if provided
+ request.options.params_encoder = config.params_encoder if config.params_encoder
+ request.options.timeout = config.timeout if config.timeout
+
+ request.url url
+ request.params = query_params
+ request
+ end
+
+ # Builds the HTTP request body
+ #
+ # @param [Hash] header_params Header parameters
+ # @param [Hash] form_params Query parameters
+ # @param [Object] body HTTP body (JSON/XML)
+ # @return [String] HTTP body data in the form of string
+ def build_request_body(header_params, form_params, body)
+ # http form
+ if header_params['Content-Type'] == 'application/x-www-form-urlencoded'
+ data = URI.encode_www_form(form_params)
+ elsif header_params['Content-Type'] == 'multipart/form-data'
+ data = {}
+ form_params.each do |key, value|
+ case value
+ when ::File, ::Tempfile
+ data[key] = Faraday::FilePart.new(value.path, Marcel::MimeType.for(Pathname.new(value.path)))
+ when ::Array, nil
+ # let Faraday handle Array and nil parameters
+ data[key] = value
+ else
+ data[key] = value.to_s
+ end
+ end
+ elsif body
+ data = body.is_a?(String) ? body : body.to_json
+ else
+ data = nil
+ end
+ data
+ end
+
+ def download_file(request)
+ stream = []
+
+ # handle streaming Responses
+ request.options.on_data = Proc.new do |chunk, overall_received_bytes|
+ stream << chunk
+ end
+
+ stream
+ end
+
+ def deserialize_file(response, stream)
+ body = response.body
+ encoding = body.encoding
+
+ # reconstruct content
+ content = stream.join
+ content = content.unpack('m').join if response.headers['Content-Transfer-Encoding'] == 'binary'
+ content = content.force_encoding(encoding)
+
+ # return byte stream
+ return content if @config.return_binary_data == true
+
+ # return file instead of binary data
+ content_disposition = response.headers['Content-Disposition']
+ if content_disposition && content_disposition =~ /filename=/i
+ filename = content_disposition[/filename=['"]?([^'"\s]+)['"]?/, 1]
+ prefix = sanitize_filename(filename)
+ else
+ prefix = 'download-'
+ end
+ prefix = prefix + '-' unless prefix.end_with?('-')
+
+ tempfile = Tempfile.open(prefix, @config.temp_folder_path, encoding: encoding)
+ tempfile.write(content)
+ tempfile.close
+
+ config.logger.info "Temp file written to #{tempfile.path}, please copy the file to a proper folder "\
+ "with e.g. `FileUtils.cp(tempfile.path, '/new/file/path')` otherwise the temp file "\
+ "will be deleted automatically with GC. It's also recommended to delete the temp file "\
+ "explicitly with `tempfile.delete`"
+ tempfile
+ end
+
+ def connection(opts)
+ opts[:header_params]['Content-Type'] == 'multipart/form-data' ? connection_multipart : connection_regular
+ end
+
+ def connection_multipart
+ @connection_multipart ||= build_connection do |conn|
+ conn.request :multipart
+ conn.request :url_encoded
+ end
+ end
+
+ def connection_regular
+ @connection_regular ||= build_connection
+ end
+
+ def build_connection
+ Faraday.new(url: config.base_url, ssl: ssl_options, proxy: config.proxy) do |conn|
+ basic_auth(conn)
+ config.configure_middleware(conn)
+ yield(conn) if block_given?
+ conn.adapter(Faraday.default_adapter)
+ config.configure_connection(conn)
+ end
+ end
+
+ def ssl_options
+ {
+ ca_file: config.ssl_ca_file,
+ verify: config.ssl_verify,
+ verify_mode: config.ssl_verify_mode,
+ client_cert: config.ssl_client_cert,
+ client_key: config.ssl_client_key
+ }
+ end
+
+ def basic_auth(conn)
+ if config.username && config.password
+ if Gem::Version.new(Faraday::VERSION) >= Gem::Version.new('2.0')
+ conn.request(:authorization, :basic, config.username, config.password)
+ else
+ conn.request(:basic_auth, config.username, config.password)
+ end
+ end
+ end
+
+ # Check if the given MIME is a JSON MIME.
+ # JSON MIME examples:
+ # application/json
+ # application/json; charset=UTF8
+ # APPLICATION/JSON
+ # */*
+ # @param [String] mime MIME
+ # @return [Boolean] True if the MIME is application/json
+ def json_mime?(mime)
+ (mime == '*/*') || !(mime =~ /^Application\/.*json(?!p)(;.*)?/i).nil?
+ end
+
+ # Deserialize the response to the given return type.
+ #
+ # @param [Response] response HTTP response
+ # @param [String] return_type some examples: "User", "Array", "Hash"
+ def deserialize(response, return_type)
+ body = response.body
+ return nil if body.nil? || body.empty?
+
+ # return response body directly for String return type
+ return body.to_s if return_type == 'String'
+
+ # ensuring a default content type
+ content_type = response.headers['Content-Type'] || 'application/json'
+
+ fail "Content-Type is not supported: #{content_type}" unless json_mime?(content_type)
+
+ begin
+ data = JSON.parse("[#{body}]", :symbolize_names => true)[0]
+ rescue JSON::ParserError => e
+ if %w(String Date Time).include?(return_type)
+ data = body
+ else
+ raise e
+ end
+ end
+
+ convert_to_type data, return_type
+ end
+
+ # Convert data to the given return type.
+ # @param [Object] data Data to be converted
+ # @param [String] return_type Return type
+ # @return [Mixed] Data in a particular type
+ def convert_to_type(data, return_type)
+ return nil if data.nil?
+ case return_type
+ when 'String'
+ data.to_s
+ when 'Integer'
+ data.to_i
+ when 'Float'
+ data.to_f
+ when 'Boolean'
+ data == true
+ when 'Time'
+ # parse date time (expecting ISO 8601 format)
+ Time.parse data
+ when 'Date'
+ # parse date time (expecting ISO 8601 format)
+ Date.parse data
+ when 'Object'
+ # generic object (usually a Hash), return directly
+ data
+ when /\AArray<(.+)>\z/
+ # e.g. Array
+ sub_type = $1
+ data.map { |item| convert_to_type(item, sub_type) }
+ when /\AHash\\z/
+ # e.g. Hash
+ sub_type = $1
+ {}.tap do |hash|
+ data.each { |k, v| hash[k] = convert_to_type(v, sub_type) }
+ end
+ else
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(return_type)
+ klass.respond_to?(:openapi_one_of) ? klass.build(data) : klass.build_from_hash(data)
+ end
+ end
+
+ # Sanitize filename by removing path.
+ # e.g. ../../sun.gif becomes sun.gif
+ #
+ # @param [String] filename the filename to be sanitized
+ # @return [String] the sanitized filename
+ def sanitize_filename(filename)
+ filename.split(/[\/\\]/).last
+ end
+
+ def build_request_url(path, opts = {})
+ # Add leading and trailing slashes to path
+ path = "/#{path}".gsub(/\/+/, '/')
+ @config.base_url(opts[:operation]) + path
+ end
+
+ # Update header and query params based on authentication settings.
+ #
+ # @param [Hash] header_params Header parameters
+ # @param [Hash] query_params Query parameters
+ # @param [String] auth_names Authentication scheme name
+ def update_params_for_auth!(header_params, query_params, auth_names)
+ Array(auth_names).each do |auth_name|
+ auth_setting = @config.auth_settings[auth_name]
+ next unless auth_setting
+ next if auth_setting[:value].nil? || auth_setting[:value].to_s.empty?
+ case auth_setting[:in]
+ when 'header' then header_params[auth_setting[:key]] = auth_setting[:value]
+ when 'query' then query_params[auth_setting[:key]] = auth_setting[:value]
+ when 'cookie' then header_params['Cookie'] = "#{auth_setting[:key]}=#{auth_setting[:value]}"
+ else next # skip unsupported auth locations
+ end
+ end
+ end
+
+ # Sets user agent in HTTP header
+ #
+ # @param [String] user_agent User agent (e.g. openapi-generator/ruby/1.0.0)
+ def user_agent=(user_agent)
+ @user_agent = user_agent
+ @default_headers['User-Agent'] = @user_agent
+ end
+
+ # Return Accept header based on an array of accepts provided.
+ # @param [Array] accepts array for Accept
+ # @return [String] the Accept header (e.g. application/json)
+ def select_header_accept(accepts)
+ return nil if accepts.nil? || accepts.empty?
+ # use JSON when present, otherwise use all of the provided
+ json_accept = accepts.find { |s| json_mime?(s) }
+ json_accept || accepts.join(',')
+ end
+
+ # Return Content-Type header based on an array of content types provided.
+ # @param [Array] content_types array for Content-Type
+ # @return [String] the Content-Type header (e.g. application/json)
+ def select_header_content_type(content_types)
+ # return nil by default
+ return if content_types.nil? || content_types.empty?
+ # use JSON when present, otherwise use the first one
+ json_content_type = content_types.find { |s| json_mime?(s) }
+ json_content_type || content_types.first
+ end
+
+ # Convert object (array, hash, object, etc) to JSON string.
+ # @param [Object] model object to be converted into JSON string
+ # @return [String] JSON string representation of the object
+ def object_to_http_body(model)
+ return model if model.nil? || model.is_a?(String)
+ local_body = nil
+ if model.is_a?(Array)
+ local_body = model.map { |m| object_to_hash(m) }
+ else
+ local_body = object_to_hash(model)
+ end
+ local_body.to_json
+ end
+
+ # Convert object(non-array) to hash.
+ # @param [Object] obj object to be converted into JSON string
+ # @return [String] JSON string representation of the object
+ def object_to_hash(obj)
+ if obj.respond_to?(:to_hash)
+ obj.to_hash
+ else
+ obj
+ end
+ end
+
+ # Build parameter value according to the given collection format.
+ # @param [String] collection_format one of :csv, :ssv, :tsv, :pipes and :multi
+ def build_collection_param(param, collection_format)
+ case collection_format
+ when :csv
+ param.join(',')
+ when :ssv
+ param.join(' ')
+ when :tsv
+ param.join("\t")
+ when :pipes
+ param.join('|')
+ when :multi
+ # return the array directly as typhoeus will handle it as expected
+ param
+ else
+ fail "unknown collection format: #{collection_format.inspect}"
+ end
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_error.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_error.rb
new file mode 100644
index 000000000..4f5c7f89e
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/api_error.rb
@@ -0,0 +1,58 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+module HatchetSdkRest
+ class ApiError < StandardError
+ attr_reader :code, :response_headers, :response_body
+
+ # Usage examples:
+ # ApiError.new
+ # ApiError.new("message")
+ # ApiError.new(:code => 500, :response_headers => {}, :response_body => "")
+ # ApiError.new(:code => 404, :message => "Not Found")
+ def initialize(arg = nil)
+ if arg.is_a? Hash
+ if arg.key?(:message) || arg.key?('message')
+ super(arg[:message] || arg['message'])
+ else
+ super arg
+ end
+
+ arg.each do |k, v|
+ instance_variable_set "@#{k}", v
+ end
+ else
+ super arg
+ @message = arg
+ end
+ end
+
+ # Override to_s to display a friendly error message
+ def to_s
+ message
+ end
+
+ def message
+ if @message.nil?
+ msg = "Error message: the server returns an error"
+ else
+ msg = @message
+ end
+
+ msg += "\nHTTP status code: #{code}" if code
+ msg += "\nResponse headers: #{response_headers}" if response_headers
+ msg += "\nResponse body: #{response_body}" if response_body
+
+ msg
+ end
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/configuration.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/configuration.rb
new file mode 100644
index 000000000..249f32bfb
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/configuration.rb
@@ -0,0 +1,406 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+module HatchetSdkRest
+ class Configuration
+ # Defines url scheme
+ attr_accessor :scheme
+
+ # Defines url host
+ attr_accessor :host
+
+ # Defines url base path
+ attr_accessor :base_path
+
+ # Define server configuration index
+ attr_accessor :server_index
+
+ # Define server operation configuration index
+ attr_accessor :server_operation_index
+
+ # Default server variables
+ attr_accessor :server_variables
+
+ # Default server operation variables
+ attr_accessor :server_operation_variables
+
+ # Defines API keys used with API Key authentications.
+ #
+ # @return [Hash] key: parameter name, value: parameter value (API key)
+ #
+ # @example parameter name is "api_key", API key is "xxx" (e.g. "api_key=xxx" in query string)
+ # config.api_key['api_key'] = 'xxx'
+ attr_accessor :api_key
+
+ # Defines API key prefixes used with API Key authentications.
+ #
+ # @return [Hash] key: parameter name, value: API key prefix
+ #
+ # @example parameter name is "Authorization", API key prefix is "Token" (e.g. "Authorization: Token xxx" in headers)
+ # config.api_key_prefix['api_key'] = 'Token'
+ attr_accessor :api_key_prefix
+
+ # Defines the username used with HTTP basic authentication.
+ #
+ # @return [String]
+ attr_accessor :username
+
+ # Defines the password used with HTTP basic authentication.
+ #
+ # @return [String]
+ attr_accessor :password
+
+ # Defines the access token (Bearer) used with OAuth2.
+ attr_accessor :access_token
+
+ # Defines a Proc used to fetch or refresh access tokens (Bearer) used with OAuth2.
+ # Overrides the access_token if set
+ # @return [Proc]
+ attr_accessor :access_token_getter
+
+ # Set this to return data as binary instead of downloading a temp file. When enabled (set to true)
+ # HTTP responses with return type `File` will be returned as a stream of binary data.
+ # Default to false.
+ attr_accessor :return_binary_data
+
+ # Set this to enable/disable debugging. When enabled (set to true), HTTP request/response
+ # details will be logged with `logger.debug` (see the `logger` attribute).
+ # Default to false.
+ #
+ # @return [true, false]
+ attr_accessor :debugging
+
+ # Set this to ignore operation servers for the API client. This is useful when you need to
+ # send requests to a different server than the one specified in the OpenAPI document.
+ # Will default to the base url defined in the spec but can be overridden by setting
+ # `scheme`, `host`, `base_path` directly.
+ # Default to false.
+ # @return [true, false]
+ attr_accessor :ignore_operation_servers
+
+ # Defines the logger used for debugging.
+ # Default to `Rails.logger` (when in Rails) or logging to STDOUT.
+ #
+ # @return [#debug]
+ attr_accessor :logger
+
+ # Defines the temporary folder to store downloaded files
+ # (for API endpoints that have file response).
+ # Default to use `Tempfile`.
+ #
+ # @return [String]
+ attr_accessor :temp_folder_path
+
+ # The time limit for HTTP request in seconds.
+ # Default to 0 (never times out).
+ attr_accessor :timeout
+
+ # Set this to false to skip client side validation in the operation.
+ # Default to true.
+ # @return [true, false]
+ attr_accessor :client_side_validation
+
+ ### TLS/SSL setting
+ # Set this to false to skip verifying SSL certificate when calling API from https server.
+ # Default to true.
+ #
+ # @note Do NOT set it to false in production code, otherwise you would face multiple types of cryptographic attacks.
+ #
+ # @return [true, false]
+ attr_accessor :ssl_verify
+
+ ### TLS/SSL setting
+ # Any `OpenSSL::SSL::` constant (see https://ruby-doc.org/stdlib-2.5.1/libdoc/openssl/rdoc/OpenSSL/SSL.html)
+ #
+ # @note Do NOT set it to false in production code, otherwise you would face multiple types of cryptographic attacks.
+ #
+ attr_accessor :ssl_verify_mode
+
+ ### TLS/SSL setting
+ # Set this to customize the certificate file to verify the peer.
+ #
+ # @return [String] the path to the certificate file
+ attr_accessor :ssl_ca_file
+
+ ### TLS/SSL setting
+ # Client certificate file (for client certificate)
+ attr_accessor :ssl_client_cert
+
+ ### TLS/SSL setting
+ # Client private key file (for client certificate)
+ attr_accessor :ssl_client_key
+
+ ### Proxy setting
+ # HTTP Proxy settings
+ attr_accessor :proxy
+
+ # Set this to customize parameters encoder of array parameter.
+ # Default to nil. Faraday uses NestedParamsEncoder when nil.
+ #
+ # @see The params_encoder option of Faraday. Related source code:
+ # https://github.com/lostisland/faraday/tree/main/lib/faraday/encoders
+ attr_accessor :params_encoder
+
+
+ attr_accessor :inject_format
+
+ attr_accessor :force_ending_format
+
+ def initialize
+ @scheme = 'http'
+ @host = 'localhost'
+ @base_path = ''
+ @server_index = nil
+ @server_operation_index = {}
+ @server_variables = {}
+ @server_operation_variables = {}
+ @api_key = {}
+ @api_key_prefix = {}
+ @client_side_validation = true
+ @ssl_verify = true
+ @ssl_verify_mode = nil
+ @ssl_ca_file = nil
+ @ssl_client_cert = nil
+ @ssl_client_key = nil
+ @middlewares = Hash.new { |h, k| h[k] = [] }
+ @configure_connection_blocks = []
+ @timeout = 60
+ # return data as binary instead of file
+ @return_binary_data = false
+ @params_encoder = nil
+ @debugging = false
+ @ignore_operation_servers = false
+ @inject_format = false
+ @force_ending_format = false
+ @logger = defined?(Rails) ? Rails.logger : Logger.new(STDOUT)
+
+ yield(self) if block_given?
+ end
+
+ # The default Configuration object.
+ def self.default
+ @@default ||= Configuration.new
+ end
+
+ def configure
+ yield(self) if block_given?
+ end
+
+ def scheme=(scheme)
+ # remove :// from scheme
+ @scheme = scheme.sub(/:\/\//, '')
+ end
+
+ def host=(host)
+ # remove http(s):// and anything after a slash
+ @host = host.sub(/https?:\/\//, '').split('/').first
+ end
+
+ def base_path=(base_path)
+ # Add leading and trailing slashes to base_path
+ @base_path = "/#{base_path}".gsub(/\/+/, '/')
+ @base_path = '' if @base_path == '/'
+ end
+
+ # Returns base URL for specified operation based on server settings
+ def base_url(operation = nil)
+ return "#{scheme}://#{[host, base_path].join('/').gsub(/\/+/, '/')}".sub(/\/+\z/, '') if ignore_operation_servers
+ if operation_server_settings.key?(operation) then
+ index = server_operation_index.fetch(operation, server_index)
+ server_url(index.nil? ? 0 : index, server_operation_variables.fetch(operation, server_variables), operation_server_settings[operation])
+ else
+ server_index.nil? ? "#{scheme}://#{[host, base_path].join('/').gsub(/\/+/, '/')}".sub(/\/+\z/, '') : server_url(server_index, server_variables, nil)
+ end
+ end
+
+ # Gets API key (with prefix if set).
+ # @param [String] param_name the parameter name of API key auth
+ def api_key_with_prefix(param_name, param_alias = nil)
+ key = @api_key[param_name]
+ key = @api_key.fetch(param_alias, key) unless param_alias.nil?
+ if @api_key_prefix[param_name]
+ "#{@api_key_prefix[param_name]} #{key}"
+ else
+ key
+ end
+ end
+
+ # Gets access_token using access_token_getter or uses the static access_token
+ def access_token_with_refresh
+ return access_token if access_token_getter.nil?
+ access_token_getter.call
+ end
+
+ # Gets Basic Auth token string
+ def basic_auth_token
+ 'Basic ' + ["#{username}:#{password}"].pack('m').delete("\r\n")
+ end
+
+ # Returns Auth Settings hash for api client.
+ def auth_settings
+ {
+ 'bearerAuth' =>
+ {
+ type: 'bearer',
+ in: 'header',
+ key: 'Authorization',
+ value: "Bearer #{access_token_with_refresh}"
+ },
+ 'cookieAuth' =>
+ {
+ type: 'api_key',
+ in: 'cookie',
+ key: 'hatchet',
+ value: api_key_with_prefix('hatchet')
+ },
+ 'customAuth' =>
+ {
+ type: 'bearer',
+ in: 'header',
+ key: 'Authorization',
+ value: "Bearer #{access_token_with_refresh}"
+ },
+ }
+ end
+
+ # Returns an array of Server setting
+ def server_settings
+ [
+ {
+ url: "",
+ description: "No description provided",
+ }
+ ]
+ end
+
+ def operation_server_settings
+ {
+ }
+ end
+
+ # Returns URL based on server settings
+ #
+ # @param index array index of the server settings
+ # @param variables hash of variable and the corresponding value
+ def server_url(index, variables = {}, servers = nil)
+ servers = server_settings if servers == nil
+
+ # check array index out of bound
+ if (index.nil? || index < 0 || index >= servers.size)
+ fail ArgumentError, "Invalid index #{index} when selecting the server. Must not be nil and must be less than #{servers.size}"
+ end
+
+ server = servers[index]
+ url = server[:url]
+
+ return url unless server.key? :variables
+
+ # go through variable and assign a value
+ server[:variables].each do |name, variable|
+ if variables.key?(name)
+ if (!server[:variables][name].key?(:enum_values) || server[:variables][name][:enum_values].include?(variables[name]))
+ url.gsub! "{" + name.to_s + "}", variables[name]
+ else
+ fail ArgumentError, "The variable `#{name}` in the server URL has invalid value #{variables[name]}. Must be #{server[:variables][name][:enum_values]}."
+ end
+ else
+ # use default value
+ url.gsub! "{" + name.to_s + "}", server[:variables][name][:default_value]
+ end
+ end
+
+ url
+ end
+
+ # Configure Faraday connection directly.
+ #
+ # ```
+ # c.configure_faraday_connection do |conn|
+ # conn.use Faraday::HttpCache, shared_cache: false, logger: logger
+ # conn.response :logger, nil, headers: true, bodies: true, log_level: :debug do |logger|
+ # logger.filter(/(Authorization: )(.*)/, '\1[REDACTED]')
+ # end
+ # end
+ #
+ # c.configure_faraday_connection do |conn|
+ # conn.adapter :typhoeus
+ # end
+ # ```
+ #
+ # @param block [Proc] `#call`able object that takes one arg, the connection
+ def configure_faraday_connection(&block)
+ @configure_connection_blocks << block
+ end
+
+ def configure_connection(conn)
+ @configure_connection_blocks.each do |block|
+ block.call(conn)
+ end
+ end
+
+ # Adds middleware to the stack
+ def use(*middleware)
+ set_faraday_middleware(:use, *middleware)
+ end
+
+ # Adds request middleware to the stack
+ def request(*middleware)
+ set_faraday_middleware(:request, *middleware)
+ end
+
+ # Adds response middleware to the stack
+ def response(*middleware)
+ set_faraday_middleware(:response, *middleware)
+ end
+
+ # Adds Faraday middleware setting information to the stack
+ #
+ # @example Use the `set_faraday_middleware` method to set middleware information
+ # config.set_faraday_middleware(:request, :retry, max: 3, methods: [:get, :post], retry_statuses: [503])
+ # config.set_faraday_middleware(:response, :logger, nil, { bodies: true, log_level: :debug })
+ # config.set_faraday_middleware(:use, Faraday::HttpCache, store: Rails.cache, shared_cache: false)
+ # config.set_faraday_middleware(:insert, 0, FaradayMiddleware::FollowRedirects, { standards_compliant: true, limit: 1 })
+ # config.set_faraday_middleware(:swap, 0, Faraday::Response::Logger)
+ # config.set_faraday_middleware(:delete, Faraday::Multipart::Middleware)
+ #
+ # @see https://github.com/lostisland/faraday/blob/v2.3.0/lib/faraday/rack_builder.rb#L92-L143
+ def set_faraday_middleware(operation, key, *args, &block)
+ unless [:request, :response, :use, :insert, :insert_before, :insert_after, :swap, :delete].include?(operation)
+ fail ArgumentError, "Invalid faraday middleware operation #{operation}. Must be" \
+ " :request, :response, :use, :insert, :insert_before, :insert_after, :swap or :delete."
+ end
+
+ @middlewares[operation] << [key, args, block]
+ end
+ ruby2_keywords(:set_faraday_middleware) if respond_to?(:ruby2_keywords, true)
+
+ # Set up middleware on the connection
+ def configure_middleware(connection)
+ return if @middlewares.empty?
+
+ [:request, :response, :use, :insert, :insert_before, :insert_after, :swap].each do |operation|
+ next unless @middlewares.key?(operation)
+
+ @middlewares[operation].each do |key, args, block|
+ connection.builder.send(operation, key, *args, &block)
+ end
+ end
+
+ if @middlewares.key?(:delete)
+ @middlewares[:delete].each do |key, _args, _block|
+ connection.builder.delete(key)
+ end
+ end
+ end
+
+ end
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/accept_invite_request.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/accept_invite_request.rb
new file mode 100644
index 000000000..1ed71da35
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/accept_invite_request.rb
@@ -0,0 +1,255 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class AcceptInviteRequest
+ attr_accessor :invite
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'invite' => :'invite'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'invite' => :'String'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::AcceptInviteRequest` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::AcceptInviteRequest`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'invite')
+ self.invite = attributes[:'invite']
+ else
+ self.invite = nil
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ if @invite.nil?
+ invalid_properties.push('invalid value for "invite", invite cannot be nil.')
+ end
+
+ if @invite.to_s.length > 36
+ invalid_properties.push('invalid value for "invite", the character length must be smaller than or equal to 36.')
+ end
+
+ if @invite.to_s.length < 36
+ invalid_properties.push('invalid value for "invite", the character length must be greater than or equal to 36.')
+ end
+
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ return false if @invite.nil?
+ return false if @invite.to_s.length > 36
+ return false if @invite.to_s.length < 36
+ true
+ end
+
+ # Custom attribute writer method with validation
+ # @param [Object] invite Value to be assigned
+ def invite=(invite)
+ if invite.nil?
+ fail ArgumentError, 'invite cannot be nil'
+ end
+
+ if invite.to_s.length > 36
+ fail ArgumentError, 'invalid value for "invite", the character length must be smaller than or equal to 36.'
+ end
+
+ if invite.to_s.length < 36
+ fail ArgumentError, 'invalid value for "invite", the character length must be greater than or equal to 36.'
+ end
+
+ @invite = invite
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ invite == o.invite
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [invite].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_error.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_error.rb
new file mode 100644
index 000000000..aa5ef7635
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_error.rb
@@ -0,0 +1,268 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIError
+ # a description for this error
+ attr_accessor :description
+
+ # a custom Hatchet error code
+ attr_accessor :code
+
+ # the field that this error is associated with, if applicable
+ attr_accessor :field
+
+ # a link to the documentation for this error, if it exists
+ attr_accessor :docs_link
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'description' => :'description',
+ :'code' => :'code',
+ :'field' => :'field',
+ :'docs_link' => :'docs_link'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'description' => :'String',
+ :'code' => :'Integer',
+ :'field' => :'String',
+ :'docs_link' => :'String'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIError` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIError`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'description')
+ self.description = attributes[:'description']
+ else
+ self.description = nil
+ end
+
+ if attributes.key?(:'code')
+ self.code = attributes[:'code']
+ end
+
+ if attributes.key?(:'field')
+ self.field = attributes[:'field']
+ end
+
+ if attributes.key?(:'docs_link')
+ self.docs_link = attributes[:'docs_link']
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ if @description.nil?
+ invalid_properties.push('invalid value for "description", description cannot be nil.')
+ end
+
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ return false if @description.nil?
+ true
+ end
+
+ # Custom attribute writer method with validation
+ # @param [Object] description Value to be assigned
+ def description=(description)
+ if description.nil?
+ fail ArgumentError, 'description cannot be nil'
+ end
+
+ @description = description
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ description == o.description &&
+ code == o.code &&
+ field == o.field &&
+ docs_link == o.docs_link
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [description, code, field, docs_link].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_errors.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_errors.rb
new file mode 100644
index 000000000..ead6d3978
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_errors.rb
@@ -0,0 +1,239 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIErrors
+ attr_accessor :errors
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'errors' => :'errors'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'errors' => :'Array'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIErrors` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIErrors`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'errors')
+ if (value = attributes[:'errors']).is_a?(Array)
+ self.errors = value
+ end
+ else
+ self.errors = nil
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ if @errors.nil?
+ invalid_properties.push('invalid value for "errors", errors cannot be nil.')
+ end
+
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ return false if @errors.nil?
+ true
+ end
+
+ # Custom attribute writer method with validation
+ # @param [Object] errors Value to be assigned
+ def errors=(errors)
+ if errors.nil?
+ fail ArgumentError, 'errors cannot be nil'
+ end
+
+ @errors = errors
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ errors == o.errors
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [errors].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta.rb
new file mode 100644
index 000000000..9de9bfdf1
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta.rb
@@ -0,0 +1,279 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIMeta
+ attr_accessor :auth
+
+ # the Pylon app ID for usepylon.com chat support
+ attr_accessor :pylon_app_id
+
+ attr_accessor :posthog
+
+ # whether or not users can sign up for this instance
+ attr_accessor :allow_signup
+
+ # whether or not users can invite other users to this instance
+ attr_accessor :allow_invites
+
+ # whether or not users can create new tenants
+ attr_accessor :allow_create_tenant
+
+ # whether or not users can change their password
+ attr_accessor :allow_change_password
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'auth' => :'auth',
+ :'pylon_app_id' => :'pylonAppId',
+ :'posthog' => :'posthog',
+ :'allow_signup' => :'allowSignup',
+ :'allow_invites' => :'allowInvites',
+ :'allow_create_tenant' => :'allowCreateTenant',
+ :'allow_change_password' => :'allowChangePassword'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'auth' => :'APIMetaAuth',
+ :'pylon_app_id' => :'String',
+ :'posthog' => :'APIMetaPosthog',
+ :'allow_signup' => :'Boolean',
+ :'allow_invites' => :'Boolean',
+ :'allow_create_tenant' => :'Boolean',
+ :'allow_change_password' => :'Boolean'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIMeta` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIMeta`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'auth')
+ self.auth = attributes[:'auth']
+ end
+
+ if attributes.key?(:'pylon_app_id')
+ self.pylon_app_id = attributes[:'pylon_app_id']
+ end
+
+ if attributes.key?(:'posthog')
+ self.posthog = attributes[:'posthog']
+ end
+
+ if attributes.key?(:'allow_signup')
+ self.allow_signup = attributes[:'allow_signup']
+ end
+
+ if attributes.key?(:'allow_invites')
+ self.allow_invites = attributes[:'allow_invites']
+ end
+
+ if attributes.key?(:'allow_create_tenant')
+ self.allow_create_tenant = attributes[:'allow_create_tenant']
+ end
+
+ if attributes.key?(:'allow_change_password')
+ self.allow_change_password = attributes[:'allow_change_password']
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ true
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ auth == o.auth &&
+ pylon_app_id == o.pylon_app_id &&
+ posthog == o.posthog &&
+ allow_signup == o.allow_signup &&
+ allow_invites == o.allow_invites &&
+ allow_create_tenant == o.allow_create_tenant &&
+ allow_change_password == o.allow_change_password
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [auth, pylon_app_id, posthog, allow_signup, allow_invites, allow_create_tenant, allow_change_password].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_auth.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_auth.rb
new file mode 100644
index 000000000..21e65356f
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_auth.rb
@@ -0,0 +1,223 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIMetaAuth
+ # the supported types of authentication
+ attr_accessor :schemes
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'schemes' => :'schemes'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'schemes' => :'Array'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIMetaAuth` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIMetaAuth`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'schemes')
+ if (value = attributes[:'schemes']).is_a?(Array)
+ self.schemes = value
+ end
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ true
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ schemes == o.schemes
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [schemes].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_integration.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_integration.rb
new file mode 100644
index 000000000..0d62dcdff
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_integration.rb
@@ -0,0 +1,265 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIMetaIntegration
+ # the name of the integration
+ attr_accessor :name
+
+ # whether this integration is enabled on the instance
+ attr_accessor :enabled
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'name' => :'name',
+ :'enabled' => :'enabled'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'name' => :'String',
+ :'enabled' => :'Boolean'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIMetaIntegration` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIMetaIntegration`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'name')
+ self.name = attributes[:'name']
+ else
+ self.name = nil
+ end
+
+ if attributes.key?(:'enabled')
+ self.enabled = attributes[:'enabled']
+ else
+ self.enabled = nil
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ if @name.nil?
+ invalid_properties.push('invalid value for "name", name cannot be nil.')
+ end
+
+ if @enabled.nil?
+ invalid_properties.push('invalid value for "enabled", enabled cannot be nil.')
+ end
+
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ return false if @name.nil?
+ return false if @enabled.nil?
+ true
+ end
+
+ # Custom attribute writer method with validation
+ # @param [Object] name Value to be assigned
+ def name=(name)
+ if name.nil?
+ fail ArgumentError, 'name cannot be nil'
+ end
+
+ @name = name
+ end
+
+ # Custom attribute writer method with validation
+ # @param [Object] enabled Value to be assigned
+ def enabled=(enabled)
+ if enabled.nil?
+ fail ArgumentError, 'enabled cannot be nil'
+ end
+
+ @enabled = enabled
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ name == o.name &&
+ enabled == o.enabled
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [name, enabled].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?.+)>\z/
+ inner_type = Regexp.last_match[:inner_type]
+ value.map { |v| _deserialize(inner_type, v) }
+ when /\AHash<(?.+?), (?.+)>\z/
+ k_type = Regexp.last_match[:k_type]
+ v_type = Regexp.last_match[:v_type]
+ {}.tap do |hash|
+ value.each do |k, v|
+ hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
+ end
+ end
+ else # model
+ # models (e.g. Pet) or oneOf
+ klass = HatchetSdkRest.const_get(type)
+ klass.respond_to?(:openapi_any_of) || klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
+ end
+ end
+
+ # Returns the string representation of the object
+ # @return [String] String presentation of the object
+ def to_s
+ to_hash.to_s
+ end
+
+ # to_body is an alias to to_hash (backward compatibility)
+ # @return [Hash] Returns the object in the form of hash
+ def to_body
+ to_hash
+ end
+
+ # Returns the object in the form of hash
+ # @return [Hash] Returns the object in the form of hash
+ def to_hash
+ hash = {}
+ self.class.attribute_map.each_pair do |attr, param|
+ value = self.send(attr)
+ if value.nil?
+ is_nullable = self.class.openapi_nullable.include?(attr)
+ next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
+ end
+
+ hash[param] = _to_hash(value)
+ end
+ hash
+ end
+
+ # Outputs non-array value in the form of hash
+ # For object, use to_hash. Otherwise, just return the value
+ # @param [Object] value Any valid value
+ # @return [Hash] Returns the value in the form of hash
+ def _to_hash(value)
+ if value.is_a?(Array)
+ value.compact.map { |v| _to_hash(v) }
+ elsif value.is_a?(Hash)
+ {}.tap do |hash|
+ value.each { |k, v| hash[k] = _to_hash(v) }
+ end
+ elsif value.respond_to? :to_hash
+ value.to_hash
+ else
+ value
+ end
+ end
+
+ end
+
+end
diff --git a/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_posthog.rb b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_posthog.rb
new file mode 100644
index 000000000..aa491854f
--- /dev/null
+++ b/sdks/ruby/src/lib/hatchet/clients/rest/lib/hatchet-sdk-rest/models/api_meta_posthog.rb
@@ -0,0 +1,231 @@
+=begin
+#Hatchet API
+
+#The Hatchet API
+
+The version of the OpenAPI document: 1.0.0
+
+Generated by: https://openapi-generator.tech
+Generator version: 7.14.0
+
+=end
+
+require 'date'
+require 'time'
+
+module HatchetSdkRest
+ class APIMetaPosthog
+ # the PostHog API key
+ attr_accessor :api_key
+
+ # the PostHog API host
+ attr_accessor :api_host
+
+ # Attribute mapping from ruby-style variable name to JSON key.
+ def self.attribute_map
+ {
+ :'api_key' => :'apiKey',
+ :'api_host' => :'apiHost'
+ }
+ end
+
+ # Returns attribute mapping this model knows about
+ def self.acceptable_attribute_map
+ attribute_map
+ end
+
+ # Returns all the JSON keys this model knows about
+ def self.acceptable_attributes
+ acceptable_attribute_map.values
+ end
+
+ # Attribute type mapping.
+ def self.openapi_types
+ {
+ :'api_key' => :'String',
+ :'api_host' => :'String'
+ }
+ end
+
+ # List of attributes with nullable: true
+ def self.openapi_nullable
+ Set.new([
+ ])
+ end
+
+ # Initializes the object
+ # @param [Hash] attributes Model attributes in the form of hash
+ def initialize(attributes = {})
+ if (!attributes.is_a?(Hash))
+ fail ArgumentError, "The input argument (attributes) must be a hash in `HatchetSdkRest::APIMetaPosthog` initialize method"
+ end
+
+ # check to see if the attribute exists and convert string to symbol for hash key
+ acceptable_attribute_map = self.class.acceptable_attribute_map
+ attributes = attributes.each_with_object({}) { |(k, v), h|
+ if (!acceptable_attribute_map.key?(k.to_sym))
+ fail ArgumentError, "`#{k}` is not a valid attribute in `HatchetSdkRest::APIMetaPosthog`. Please check the name to make sure it's valid. List of attributes: " + acceptable_attribute_map.keys.inspect
+ end
+ h[k.to_sym] = v
+ }
+
+ if attributes.key?(:'api_key')
+ self.api_key = attributes[:'api_key']
+ end
+
+ if attributes.key?(:'api_host')
+ self.api_host = attributes[:'api_host']
+ end
+ end
+
+ # Show invalid properties with the reasons. Usually used together with valid?
+ # @return Array for valid properties with the reasons
+ def list_invalid_properties
+ warn '[DEPRECATED] the `list_invalid_properties` method is obsolete'
+ invalid_properties = Array.new
+ invalid_properties
+ end
+
+ # Check to see if the all the properties in the model are valid
+ # @return true if the model is valid
+ def valid?
+ warn '[DEPRECATED] the `valid?` method is obsolete'
+ true
+ end
+
+ # Checks equality by comparing each attribute.
+ # @param [Object] Object to be compared
+ def ==(o)
+ return true if self.equal?(o)
+ self.class == o.class &&
+ api_key == o.api_key &&
+ api_host == o.api_host
+ end
+
+ # @see the `==` method
+ # @param [Object] Object to be compared
+ def eql?(o)
+ self == o
+ end
+
+ # Calculates hash code according to all attributes.
+ # @return [Integer] Hash code
+ def hash
+ [api_key, api_host].hash
+ end
+
+ # Builds the object from hash
+ # @param [Hash] attributes Model attributes in the form of hash
+ # @return [Object] Returns the model itself
+ def self.build_from_hash(attributes)
+ return nil unless attributes.is_a?(Hash)
+ attributes = attributes.transform_keys(&:to_sym)
+ transformed_hash = {}
+ openapi_types.each_pair do |key, type|
+ if attributes.key?(attribute_map[key]) && attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = nil
+ elsif type =~ /\AArray<(.*)>/i
+ # check to ensure the input is an array given that the attribute
+ # is documented as an array but the input is not
+ if attributes[attribute_map[key]].is_a?(Array)
+ transformed_hash["#{key}"] = attributes[attribute_map[key]].map { |v| _deserialize($1, v) }
+ end
+ elsif !attributes[attribute_map[key]].nil?
+ transformed_hash["#{key}"] = _deserialize(type, attributes[attribute_map[key]])
+ end
+ end
+ new(transformed_hash)
+ end
+
+ # Deserializes the data based on type
+ # @param string type Data type
+ # @param string value Value to be deserialized
+ # @return [Object] Deserialized data
+ def self._deserialize(type, value)
+ case type.to_sym
+ when :Time
+ Time.parse(value)
+ when :Date
+ Date.parse(value)
+ when :String
+ value.to_s
+ when :Integer
+ value.to_i
+ when :Float
+ value.to_f
+ when :Boolean
+ if value.to_s =~ /\A(true|t|yes|y|1)\z/i
+ true
+ else
+ false
+ end
+ when :Object
+ # generic object (usually a Hash), return directly
+ value
+ when /\AArray<(?