From 08bd27a86953984e32e2183fb2d81fbe5ab1d02f Mon Sep 17 00:00:00 2001 From: Matt Kaye Date: Fri, 16 May 2025 15:44:28 -0400 Subject: [PATCH] Feat: Dynamic (Event) Filters (#1704) * feat: add events tables * fix: tweak PK * feat: migration * feat: gen models * fix: add external id col + index * fix: uuid pk * fix: types * chore: gen * feat: add index * Feat: Write events into OLAP tables (#1634) * feat: query for event creation * feat: olap impl * feat: wire up the olap event write * feat: goroutine? * feat: start wiring up inserts to triggers * fix: no `RETURNING` * fix: hack * fix: inner join * feat: attempt 2 * fix: return errors * chore: lint * fix: diff * feat: add new partitions * fix: eof * fix: write external ids into table * chore: gen * fix: wiring * fix: event deduping * fix: insert in bulk * fix: bug * refactor: return type of trigger * fix: unnest ids * fix: unnest tenant ids * fix: run ids in bulk insert * feat: two bulk inserts, one tx * fix: cruft * fix: bug * Update pkg/repository/v1/olap.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: rework to avoid n^2 loop * fix: transaction timeout * fix: lint * fix: use error * fix: rm penultimate version * fix: rm penultimate test part ii * Feat: CEL-based filtering of events (#1676) * feat: add optional expression to workflow trigger event ref * feat: proto field for expression * feat: write and parse the expression * feat: wire up through put workflow ver request * feat: query * fix: naming * fix: cleanup * fix: rebase * Update pkg/repository/v1/trigger.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: skip workflow on cel eval failure * fix: zero value * fix: cel evaluator * fix: usage * fix: naming + type * fix: rm event filter from v0 defn * feat: tests + fix typing * fix: usage * fix: construct input * feat: always write events --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: select existing partitions * feat: add prio to push event request * feat: priority from pushed events * fix: missed a spot * fix: write events even if they're not tied to any workflows * fix: revert cel event filtering * fix: couple more * fix: simplify * feat: filters api * feat: table for storing filters * feat: migration * fix: pk ordering * feat: wiring up initial api * feat: impl filter list * feat: wire up inserts of filters * feat: add resource hint to push event * feat: multi-select filters * feat: wire up resource hint on event push * feat: filtering * fix: small bug * fix: rm version id * fix: query * fix: panic * fix: schema * fix: naming * fix: rm python changes * chore: lint * fix: uuid hint Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: use overwrite for listing filters b/c of nullable arg * fix: naming * feat: add events tables * fix: tweak PK * feat: migration * feat: gen models * fix: add external id col + index * fix: uuid pk * fix: types * chore: gen * feat: add index * Feat: Write events into OLAP tables (#1634) * feat: query for event creation * feat: olap impl * feat: wire up the olap event write * feat: goroutine? * feat: start wiring up inserts to triggers * fix: no `RETURNING` * fix: hack * fix: inner join * feat: attempt 2 * fix: return errors * chore: lint * fix: diff * feat: add new partitions * fix: eof * fix: write external ids into table * chore: gen * fix: wiring * fix: event deduping * fix: insert in bulk * fix: bug * refactor: return type of trigger * fix: unnest ids * fix: unnest tenant ids * fix: run ids in bulk insert * feat: two bulk inserts, one tx * fix: cruft * fix: bug * Update pkg/repository/v1/olap.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: rework to avoid n^2 loop * fix: transaction timeout * fix: lint * fix: use error * fix: rm penultimate version * fix: rm penultimate test part ii * Feat: CEL-based filtering of events (#1676) * feat: add optional expression to workflow trigger event ref * feat: proto field for expression * feat: write and parse the expression * feat: wire up through put workflow ver request * feat: query * fix: naming * fix: cleanup * fix: rebase * Update pkg/repository/v1/trigger.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: skip workflow on cel eval failure * fix: zero value * fix: cel evaluator * fix: usage * fix: naming + type * fix: rm event filter from v0 defn * feat: tests + fix typing * fix: usage * fix: construct input * feat: always write events --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: select existing partitions * feat: add prio to push event request * feat: priority from pushed events * fix: missed a spot * fix: write events even if they're not tied to any workflows * fix: revert cel event filtering * fix: couple more * fix: simplify * fix: gen api * fix: merge issues * feat: filter delete endpoint * fix: overwrite * fix: delete filter api wiring * fix: migration patch * chore: gen * fix: merge hell * Revert "Revert "Feat: Events in the OLAP Repo (#1633)" (#1706)" This reverts commit bf29269a27679755505f005ba481f86bbe8f222b. * Feat: Events Frontend (#1678) * feat: add events tables * fix: tweak PK * feat: migration * feat: gen models * fix: add external id col + index * fix: uuid pk * fix: types * chore: gen * feat: add index * Feat: Write events into OLAP tables (#1634) * feat: query for event creation * feat: olap impl * feat: wire up the olap event write * feat: goroutine? * feat: start wiring up inserts to triggers * fix: no `RETURNING` * fix: hack * fix: inner join * feat: attempt 2 * fix: return errors * chore: lint * fix: diff * feat: add new partitions * fix: eof * fix: write external ids into table * chore: gen * fix: wiring * fix: event deduping * fix: insert in bulk * fix: bug * refactor: return type of trigger * fix: unnest ids * fix: unnest tenant ids * fix: run ids in bulk insert * feat: two bulk inserts, one tx * fix: cruft * fix: bug * Update pkg/repository/v1/olap.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: rework to avoid n^2 loop * fix: transaction timeout * fix: lint * fix: use error * fix: rm penultimate version * fix: rm penultimate test part ii * Feat: CEL-based filtering of events (#1676) * feat: add optional expression to workflow trigger event ref * feat: proto field for expression * feat: write and parse the expression * feat: wire up through put workflow ver request * feat: query * fix: naming * fix: cleanup * fix: rebase * Update pkg/repository/v1/trigger.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: skip workflow on cel eval failure * fix: zero value * fix: cel evaluator * fix: usage * fix: naming + type * fix: rm event filter from v0 defn * feat: tests + fix typing * fix: usage * fix: construct input * feat: always write events --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: select existing partitions * feat: add prio to push event request * feat: priority from pushed events * fix: missed a spot * fix: write events even if they're not tied to any workflows * fix: revert cel event filtering * fix: couple more * fix: simplify * feat: initial API work * chore: gen ts * feat: fe skeleton * feat: wiring up skeleton data * feat: hook * fix: bugs * fix: lint on gen * fix: couple more * feat: wire up counts * feat: initial events cols + styling * feat: layout * feat: styling * fix: cleanup * feat: use external ids on the FE * fix: separate openapi spec for new events route * fix: required param * fix: update queries and api * feat: event detail * fix: page * fix: rebase * tweak: table * feat: add events page to sidebar * feat: modify queries to allow fetching by triggering event * feat: add triggering event id to api * chore: lint * feat: wire up events api * fix: rm log * fix: gen * feat: wire up status counts * fix: rm time series * fix: rm state * fix: lint * fix: eof * chore: lint * feat: wire up filters * fix: lint * chore: api gen * feat: add events tables * fix: tweak PK * feat: migration * feat: gen models * fix: add external id col + index * fix: uuid pk * fix: types * chore: gen * feat: add index * Feat: Write events into OLAP tables (#1634) * feat: query for event creation * feat: olap impl * feat: wire up the olap event write * feat: goroutine? * feat: start wiring up inserts to triggers * fix: no `RETURNING` * fix: hack * fix: inner join * feat: attempt 2 * fix: return errors * chore: lint * fix: diff * feat: add new partitions * fix: eof * fix: write external ids into table * chore: gen * fix: wiring * fix: event deduping * fix: insert in bulk * fix: bug * refactor: return type of trigger * fix: unnest ids * fix: unnest tenant ids * fix: run ids in bulk insert * feat: two bulk inserts, one tx * fix: cruft * fix: bug * Update pkg/repository/v1/olap.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: rework to avoid n^2 loop * fix: transaction timeout * fix: lint * fix: use error * fix: rm penultimate version * fix: rm penultimate test part ii * Feat: CEL-based filtering of events (#1676) * feat: add optional expression to workflow trigger event ref * feat: proto field for expression * feat: write and parse the expression * feat: wire up through put workflow ver request * feat: query * fix: naming * fix: cleanup * fix: rebase * Update pkg/repository/v1/trigger.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: skip workflow on cel eval failure * fix: zero value * fix: cel evaluator * fix: usage * fix: naming + type * fix: rm event filter from v0 defn * feat: tests + fix typing * fix: usage * fix: construct input * feat: always write events --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: select existing partitions * feat: add prio to push event request * feat: priority from pushed events * fix: missed a spot * fix: write events even if they're not tied to any workflows * fix: revert cel event filtering * fix: couple more * fix: simplify * fix: gen api * fix: gen * fix: more merge issues * chore: gen * fix: lockfile * fix: merge issues * chore: gen again * fix: rm unused fields from openapi spec --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * fix: migration ver * fix: insert trigger, event types * fix: bunch o refs * fix: migration * fix: queries * fix: finish wiring up inserts * fix: misc bugs * fix: fe filtering * chore: gen * fix: migration ver * chore: lint * fix: missed a couple * fix: whitespace * fix: formatting, gen * fix: uuid id for filters * feat: rest api for filters * fix: tag * feat: add event id, payload, and metadata to cel env * fix: rename resource hint to scope * chore: gen * fix: same conflict, different day * feat: send filter payload with input * fix: lint * fix: invert filter to "positive" case * fix: naming * feat: send payloads back with trigger data * fix: add case to check if no filters were found * fix: send additional meta back on bulk push * fix: cleanup from self review * fix: more small cleanup * feat: initial pr feedback * feat: validation * fix: populator * Feat: SDK changes for event filtering (#1677) * feat: query for event creation * feat: wire up the olap event write * feat: goroutine? * feat: start wiring up inserts to triggers * fix: no `RETURNING` * fix: hack * fix: inner join * feat: attempt 2 * fix: return errors * fix: diff * feat: add new partitions * fix: write external ids into table * chore: gen * fix: wiring * fix: event deduping * fix: insert in bulk * fix: unnest ids * fix: run ids in bulk insert * feat: two bulk inserts, one tx * fix: rework to avoid n^2 loop * feat: proto field for expression * fix: rm event filter from v0 defn * chore: gen python * Revert "fix: select existing partitions" This reverts commit fefbdd5122b85c5d807c3dce3aed7d974f01a7d8. * fix: rebase hell * feat: prio * chore: docs * feat: gen ts * feat: ts wiring * feat: go * fix: prio test bug * chore: gen * fix: validation bug * feat: extend events client * feat: e2e test * chore: docs * fix: test * fix: unwind event filter * fix: rm should skip * chore: gen * chore: gen * chore: gen * feat: resource hints + more e2e tests * fix: use `cached_property` for id * fix: raises * fix: rm print cruft * feat: wiring + e2e test * chore: gen * feat: wire up python sdk * feat: static payload in test * fix: use test run id in payload * fix: longer sleeps * feat: more tests * feat: intermediate work * feat: add validator for filter payload * fix: rm cruft * fix: event example * fix: event example * fix: e2e tests * fix: finish cleaning up tests * fix: __hash__ method * fix: copilot comments! * fix: apply namespaces to workflow names in a couple of the feature clients * fix: handle case where namespace is falsey * refactor: factor out * fix: all the other refs to namespaced things * fix: put `apply_namespace` on the client config * fix: namespace overrides * fix: implicitly use function name for hatchet task * fix: name * fix: refs * chore: ver * fix: durable tests * feat: add a changelog! * fix: changelog format * feat: start wiring up filters in ts * feat: scopes on event push * feat: wire up triggering event id filter * feat: initial ts e2e test work * fix: run the test * fix: drive-by bug in AI-gen slop * fix: test naming * feat: more test cleanup * fix: api * fix: ns override * fix: test + API * fix: lint * fix: cruft * chore: gen * fix: dont run in ci? * fix: wire up Go SDK * fix: compiler * fix: examples * fix: event snippet, I think * chore: docs * Revert "fix: examples" This reverts commit cbf33d62990dbf901b94608fa110c1a64fac6a27. * Revert "fix: compiler" This reverts commit 52336abeb2ed0466dba929b86f52b9c210e3eaf9. * fix: add push opt funcs for prio and scope * chore: ver --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- api-contracts/events/events.proto | 6 + .../openapi/components/schemas/_index.yaml | 6 + .../openapi/components/schemas/event.yaml | 3 + .../openapi/components/schemas/v1/filter.yaml | 62 + api-contracts/openapi/openapi.yaml | 4 + .../openapi/paths/v1/filters/filter.yaml | 217 +++ api/v1/server/handlers/events/bulk_create.go | 1 + api/v1/server/handlers/events/create.go | 2 +- api/v1/server/handlers/ingestors/sns.go | 2 +- api/v1/server/handlers/v1/filters/create.go | 49 + api/v1/server/handlers/v1/filters/delete.go | 29 + api/v1/server/handlers/v1/filters/get.go | 18 + api/v1/server/handlers/v1/filters/list.go | 68 + api/v1/server/handlers/v1/filters/service.go | 16 + api/v1/server/oas/gen/openapi.gen.go | 984 +++++++++---- api/v1/server/oas/transformers/v1/filters.go | 40 + api/v1/server/run/run.go | 16 + .../migrations/20250512141736_v1_0_20.sql | 30 + frontend/app/src/lib/api/generated/Api.ts | 104 ++ .../src/lib/api/generated/data-contracts.ts | 42 + .../sdks/python/feature-clients/_meta.ts | 7 + .../lib/docs/generated/snips/go/run/event.ts | 4 +- .../snips/go/z_v0/assignment-affinity/run.ts | 2 +- .../snips/go/z_v0/assignment-sticky/run.ts | 2 +- .../snips/go/z_v0/cancellation/run.ts | 2 +- .../generated/snips/go/z_v0/compute/main.ts | 2 +- .../docs/generated/snips/go/z_v0/dag/main.ts | 2 +- .../snips/go/z_v0/deprecated/requeue/main.ts | 2 +- .../z_v0/deprecated/schedule-timeout/main.ts | 2 +- .../snips/go/z_v0/deprecated/timeout/main.ts | 2 +- .../snips/go/z_v0/deprecated/yaml/main.ts | 2 +- .../snips/go/z_v0/errors-test/main.ts | 2 +- .../cancel-in-progress/main.ts | 2 +- .../group-round-robin/main.ts | 2 +- .../generated/snips/go/z_v0/logging/main.ts | 2 +- .../generated/snips/go/z_v0/middleware/run.ts | 2 +- .../snips/go/z_v0/namespaced/main.ts | 2 +- .../snips/go/z_v0/register-action/main.ts | 2 +- .../generated/snips/go/z_v0/retries/main.ts | 2 +- .../generated/snips/go/z_v0/simple/main.ts | 2 +- .../generated/snips/go/z_v0/timeout/run.ts | 2 +- .../generated/snips/go/z_v0/webhook/run.ts | 2 +- .../docs/generated/snips/python/dag/worker.ts | 2 +- .../snips/python/events/test_event.ts | 2 +- .../generated/snips/python/events/worker.ts | 6 +- .../snips/python/priority/test_priority.ts | 2 +- .../generated/snips/python/simple/index.ts | 2 + .../python/simple/test_simple_workflow.ts | 12 + .../generated/snips/python/simple/trigger.ts | 2 +- .../generated/snips/python/simple/worker.ts | 4 +- .../lib/docs/generated/snips/python/worker.ts | 2 +- .../snips/typescript/on_event/event.e2e.ts | 12 + .../snips/typescript/on_event/event.ts | 4 +- .../snips/typescript/on_event/index.ts | 2 + .../snips/typescript/on_event/workflow.ts | 6 +- .../docs/lib/generated/snips/go/run/event.ts | 4 +- .../snips/go/z_v0/assignment-affinity/run.ts | 2 +- .../snips/go/z_v0/assignment-sticky/run.ts | 2 +- .../snips/go/z_v0/cancellation/run.ts | 2 +- .../generated/snips/go/z_v0/compute/main.ts | 2 +- .../lib/generated/snips/go/z_v0/dag/main.ts | 2 +- .../snips/go/z_v0/deprecated/requeue/main.ts | 2 +- .../z_v0/deprecated/schedule-timeout/main.ts | 2 +- .../snips/go/z_v0/deprecated/timeout/main.ts | 2 +- .../snips/go/z_v0/deprecated/yaml/main.ts | 2 +- .../snips/go/z_v0/errors-test/main.ts | 2 +- .../cancel-in-progress/main.ts | 2 +- .../group-round-robin/main.ts | 2 +- .../generated/snips/go/z_v0/logging/main.ts | 2 +- .../generated/snips/go/z_v0/middleware/run.ts | 2 +- .../snips/go/z_v0/namespaced/main.ts | 2 +- .../snips/go/z_v0/register-action/main.ts | 2 +- .../generated/snips/go/z_v0/retries/main.ts | 2 +- .../generated/snips/go/z_v0/simple/main.ts | 2 +- .../generated/snips/go/z_v0/timeout/run.ts | 2 +- .../generated/snips/go/z_v0/webhook/run.ts | 2 +- .../lib/generated/snips/python/dag/worker.ts | 2 +- .../snips/python/events/test_event.ts | 2 +- .../generated/snips/python/events/worker.ts | 6 +- .../snips/python/priority/test_priority.ts | 2 +- .../generated/snips/python/simple/index.ts | 2 + .../python/simple/test_simple_workflow.ts | 11 + .../generated/snips/python/simple/trigger.ts | 2 +- .../generated/snips/python/simple/worker.ts | 4 +- .../docs/lib/generated/snips/python/worker.ts | 2 +- .../snips/typescript/on_event/event.e2e.ts | 11 + .../snips/typescript/on_event/event.ts | 4 +- .../snips/typescript/on_event/index.ts | 2 + .../snips/typescript/on_event/workflow.ts | 6 +- frontend/docs/pages/sdks/python/client.mdx | 124 +- .../sdks/python/feature-clients/_meta.js | 7 + .../sdks/python/feature-clients/filters.mdx | 158 ++ .../sdks/python/feature-clients/metrics.mdx | 20 +- .../sdks/python/feature-clients/runs.mdx | 50 +- frontend/docs/pages/sdks/python/runnables.mdx | 120 +- internal/cel/cel.go | 50 + internal/cel/cel_test.go | 81 + .../controllers/v1/task/controller.go | 1 + .../services/ingestor/contracts/events.pb.go | 179 ++- internal/services/ingestor/ingestor.go | 6 +- internal/services/ingestor/ingestor_v1.go | 14 +- internal/services/ingestor/server.go | 21 +- .../services/shared/tasktypes/v1/event.go | 11 +- pkg/client/event.go | 22 + pkg/client/rest/gen.go | 698 +++++++++ pkg/repository/event.go | 3 + pkg/repository/postgres/event.go | 2 + pkg/repository/v1/filters.go | 78 + pkg/repository/v1/input.go | 19 +- pkg/repository/v1/match.go | 4 +- pkg/repository/v1/repository.go | 7 + .../v1/sqlcv1/filters-overwrite.sql | 14 + .../v1/sqlcv1/filters-overwrite.sql.go | 63 + pkg/repository/v1/sqlcv1/filters.sql | 37 + pkg/repository/v1/sqlcv1/filters.sql.go | 125 ++ pkg/repository/v1/sqlcv1/models.go | 11 + pkg/repository/v1/sqlcv1/sqlc.yaml | 1 + pkg/repository/v1/task.go | 2 + pkg/repository/v1/trigger.go | 159 +- sdks/python/CHANGELOG.md | 21 + sdks/python/docs/feature-clients/filters.md | 3 + sdks/python/examples/events/test_event.py | 342 ++++- sdks/python/examples/events/worker.py | 18 +- .../examples/simple/test_simple_workflow.py | 33 +- sdks/python/examples/simple/trigger.py | 4 +- sdks/python/examples/simple/worker.py | 11 +- sdks/python/examples/worker.py | 7 +- sdks/python/hatchet_sdk/client.py | 2 + sdks/python/hatchet_sdk/clients/admin.py | 8 +- .../clients/dispatcher/action_listener.py | 15 +- sdks/python/hatchet_sdk/clients/events.py | 66 +- .../hatchet_sdk/clients/rest/__init__.py | 11 + .../hatchet_sdk/clients/rest/api/__init__.py | 1 + .../hatchet_sdk/clients/rest/api/event_api.py | 335 +++++ .../clients/rest/api/filter_api.py | 1305 +++++++++++++++++ .../hatchet_sdk/clients/rest/api/task_api.py | 51 + .../clients/rest/api/workflow_runs_api.py | 34 + .../clients/rest/models/__init__.py | 10 + .../rest/models/create_event_request.py | 18 +- .../rest/models/v1_create_filter_request.py | 99 ++ .../clients/rest/models/v1_event.py | 142 ++ .../clients/rest/models/v1_event_list.py | 110 ++ .../models/v1_event_workflow_run_summary.py | 101 ++ .../clients/rest/models/v1_filter.py | 127 ++ .../clients/rest/models/v1_filter_list.py | 110 ++ .../clients/rest/models/v1_log_line.py | 23 +- .../clients/rest/models/v1_task_event.py | 12 + .../clients/rest/models/v1_task_summary.py | 12 + .../clients/rest/models/v1_task_timing.py | 19 + .../clients/rest/models/workflow.py | 5 + sdks/python/hatchet_sdk/config.py | 29 + sdks/python/hatchet_sdk/context/context.py | 1 + .../hatchet_sdk/contracts/events_pb2.py | 40 +- .../hatchet_sdk/contracts/events_pb2.pyi | 20 +- sdks/python/hatchet_sdk/features/cron.py | 2 +- sdks/python/hatchet_sdk/features/filters.py | 181 +++ sdks/python/hatchet_sdk/features/runs.py | 8 +- sdks/python/hatchet_sdk/features/scheduled.py | 2 +- sdks/python/hatchet_sdk/features/workflows.py | 2 +- sdks/python/hatchet_sdk/hatchet.py | 153 +- .../hatchet_sdk/runnables/standalone.py | 6 + sdks/python/hatchet_sdk/runnables/workflow.py | 31 +- sdks/python/hatchet_sdk/worker/worker.py | 2 +- sdks/python/pyproject.toml | 2 +- sdks/typescript/package.json | 2 +- .../src/clients/event/event-client.test.ts | 7 +- .../src/clients/event/event-client.ts | 26 +- .../clients/hatchet-client/hatchet-client.ts | 3 +- .../src/clients/rest/generated/Api.ts | 186 ++- .../clients/rest/generated/data-contracts.ts | 176 ++- sdks/typescript/src/protoc/events/events.ts | 63 +- sdks/typescript/src/v1/client/client.ts | 14 + .../src/v1/client/features/filters.ts | 57 + .../typescript/src/v1/client/features/runs.ts | 9 + .../src/v1/client/features/workflows.ts | 2 +- .../src/v1/examples/on_event/event.e2e.ts | 374 +++++ .../src/v1/examples/on_event/event.ts | 1 + .../src/v1/examples/on_event/workflow.ts | 1 + sql/schema/v1-core.sql | 21 + 179 files changed, 7723 insertions(+), 748 deletions(-) create mode 100644 api-contracts/openapi/components/schemas/v1/filter.yaml create mode 100644 api-contracts/openapi/paths/v1/filters/filter.yaml create mode 100644 api/v1/server/handlers/v1/filters/create.go create mode 100644 api/v1/server/handlers/v1/filters/delete.go create mode 100644 api/v1/server/handlers/v1/filters/get.go create mode 100644 api/v1/server/handlers/v1/filters/list.go create mode 100644 api/v1/server/handlers/v1/filters/service.go create mode 100644 api/v1/server/oas/transformers/v1/filters.go create mode 100644 cmd/hatchet-migrate/migrate/migrations/20250512141736_v1_0_20.sql create mode 100644 frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts create mode 100644 frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts create mode 100644 frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts create mode 100644 frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts create mode 100644 frontend/docs/pages/sdks/python/feature-clients/filters.mdx create mode 100644 pkg/repository/v1/filters.go create mode 100644 pkg/repository/v1/sqlcv1/filters-overwrite.sql create mode 100644 pkg/repository/v1/sqlcv1/filters-overwrite.sql.go create mode 100644 pkg/repository/v1/sqlcv1/filters.sql create mode 100644 pkg/repository/v1/sqlcv1/filters.sql.go create mode 100644 sdks/python/CHANGELOG.md create mode 100644 sdks/python/docs/feature-clients/filters.md create mode 100644 sdks/python/hatchet_sdk/clients/rest/api/filter_api.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_create_filter_request.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_event.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_event_list.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_filter.py create mode 100644 sdks/python/hatchet_sdk/clients/rest/models/v1_filter_list.py create mode 100644 sdks/python/hatchet_sdk/features/filters.py create mode 100644 sdks/typescript/src/v1/client/features/filters.ts create mode 100644 sdks/typescript/src/v1/examples/on_event/event.e2e.ts diff --git a/api-contracts/events/events.proto b/api-contracts/events/events.proto index e1d10b21b..d13b92185 100644 --- a/api-contracts/events/events.proto +++ b/api-contracts/events/events.proto @@ -35,6 +35,9 @@ message Event { // the payload for the event optional string additionalMetadata = 6; + // the scope associated with this filter. Used for subsetting candidate filters at evaluation time + optional string scope = 7; + } message Events { @@ -99,6 +102,9 @@ message PushEventRequest { optional string additionalMetadata = 4; optional int32 priority = 5; + + // the scope associated with this filter. Used for subsetting candidate filters at evaluation time + optional string scope = 6; } message ReplayEventRequest { diff --git a/api-contracts/openapi/components/schemas/_index.yaml b/api-contracts/openapi/components/schemas/_index.yaml index 6b171c9d8..249aad1c0 100644 --- a/api-contracts/openapi/components/schemas/_index.yaml +++ b/api-contracts/openapi/components/schemas/_index.yaml @@ -104,6 +104,10 @@ EventList: $ref: "./event.yaml#/EventList" V1EventList: $ref: "./v1/event.yaml#/V1EventList" +V1FilterList: + $ref: "./v1/filter.yaml#/V1FilterList" +V1Filter: + $ref: "./v1/filter.yaml#/V1Filter" RateLimit: $ref: "./rate_limits.yaml#/RateLimit" RateLimitList: @@ -336,3 +340,5 @@ V1TaskTiming: $ref: "./v1/task.yaml#/V1TaskTiming" V1TaskTimingList: $ref: "./v1/task.yaml#/V1TaskTimingList" +V1CreateFilterRequest: + $ref: "./v1/filter.yaml#/V1CreateFilterRequest" diff --git a/api-contracts/openapi/components/schemas/event.yaml b/api-contracts/openapi/components/schemas/event.yaml index fc230b4ea..ecc5a4a1d 100644 --- a/api-contracts/openapi/components/schemas/event.yaml +++ b/api-contracts/openapi/components/schemas/event.yaml @@ -58,6 +58,9 @@ CreateEventRequest: type: integer description: The priority of the event. format: int32 + scope: + type: string + description: The scope for event filtering. required: - key - data diff --git a/api-contracts/openapi/components/schemas/v1/filter.yaml b/api-contracts/openapi/components/schemas/v1/filter.yaml new file mode 100644 index 000000000..4d7136753 --- /dev/null +++ b/api-contracts/openapi/components/schemas/v1/filter.yaml @@ -0,0 +1,62 @@ +V1Filter: + properties: + metadata: + $ref: "../metadata.yaml#/APIResourceMeta" + tenantId: + type: string + description: The ID of the tenant associated with this filter. + workflowId: + type: string + format: uuid + minLength: 36 + maxLength: 36 + description: The workflow id associated with this filter. + scope: + type: string + description: The scope associated with this filter. Used for subsetting candidate filters at evaluation time + expression: + type: string + description: The expression associated with this filter. + payload: + type: object + description: Additional payload data associated with the filter + required: + - metadata + - tenantId + - workflowId + - scope + - expression + - payload + +V1FilterList: + type: object + properties: + pagination: + $ref: "../metadata.yaml#/PaginationResponse" + rows: + type: array + items: + $ref: "#/V1Filter" + +V1CreateFilterRequest: + type: object + properties: + workflowId: + type: string + format: uuid + minLength: 36 + maxLength: 36 + description: The workflow id + expression: + type: string + description: The expression for the filter + scope: + type: string + description: The scope associated with this filter. Used for subsetting candidate filters at evaluation time + payload: + type: object + description: The payload for the filter + required: + - workflowId + - scope + - expression diff --git a/api-contracts/openapi/openapi.yaml b/api-contracts/openapi/openapi.yaml index dbc92df4b..83964b44c 100644 --- a/api-contracts/openapi/openapi.yaml +++ b/api-contracts/openapi/openapi.yaml @@ -50,6 +50,10 @@ paths: $ref: "./paths/v1/tasks/tasks.yaml#/getTaskPointMetrics" /api/v1/stable/tenants/{tenant}/events: $ref: "./paths/v1/events/event.yaml#/V1EventList" + /api/v1/stable/tenants/{tenant}/filters: + $ref: "./paths/v1/filters/filter.yaml#/V1FilterListCreate" + /api/v1/stable/tenants/{tenant}/filters/{v1-filter}: + $ref: "./paths/v1/filters/filter.yaml#/V1FilterGetDelete" /api/ready: $ref: "./paths/metadata/metadata.yaml#/readiness" /api/live: diff --git a/api-contracts/openapi/paths/v1/filters/filter.yaml b/api-contracts/openapi/paths/v1/filters/filter.yaml new file mode 100644 index 000000000..52e3e2fd2 --- /dev/null +++ b/api-contracts/openapi/paths/v1/filters/filter.yaml @@ -0,0 +1,217 @@ +V1FilterGetDelete: + get: + x-resources: ["tenant", "v1-filter"] + description: Get a filter by its id + operationId: v1-filter:get + parameters: + - description: The tenant id + in: path + name: tenant + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + - description: The filter id + in: path + name: v1-filter + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + responses: + "200": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/V1Filter" + description: Successfully got the filter + "400": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: A malformed or bad request + "403": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Forbidden + summary: Get a filter + tags: + - Filter + delete: + x-resources: ["tenant", "v1-filter"] + description: Delete a filter + operationId: v1-filter:delete + parameters: + - description: The tenant id + in: path + name: tenant + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + - description: The filter id to delete + in: path + name: v1-filter + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + responses: + "200": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/V1Filter" + description: Successfully created the cron job workflow trigger + "400": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: A malformed or bad request + "403": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Forbidden + "404": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Not found + tags: + - Filter +V1FilterListCreate: + get: + x-resources: ["tenant"] + description: Lists all filters for a tenant. + operationId: v1-filter:list + parameters: + - description: The tenant id + in: path + name: tenant + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + - description: The number to skip + in: query + name: offset + required: false + schema: + type: integer + format: int64 + - description: The number to limit by + in: query + name: limit + required: false + schema: + type: integer + format: int64 + - description: The workflow ids to filter by + in: query + name: workflowIds + required: false + schema: + type: array + items: + type: string + description: The workflow id associated with this filter. + format: uuid + minLength: 36 + maxLength: 36 + - description: The scopes to subset candidate filters by + in: query + name: scopes + required: false + schema: + type: array + items: + type: string + description: The scope associated with this filter. Used for subsetting candidate filters at evaluation time + responses: + "200": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/V1FilterList" + description: Successfully listed the filters + "400": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: A malformed or bad request + "403": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Forbidden + summary: List filters + tags: + - Filter + post: + x-resources: ["tenant"] + description: Create a new filter + operationId: v1-filter:create + parameters: + - description: The tenant id + in: path + name: tenant + required: true + schema: + type: string + format: uuid + minLength: 36 + maxLength: 36 + requestBody: + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/V1CreateFilterRequest" + description: The input to the filter creation + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/V1Filter" + description: Successfully created the cron job workflow trigger + "400": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: A malformed or bad request + "403": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Forbidden + "404": + content: + application/json: + schema: + $ref: "../../../components/schemas/_index.yaml#/APIErrors" + description: Not found + summary: Create a filter + tags: + - Filter diff --git a/api/v1/server/handlers/events/bulk_create.go b/api/v1/server/handlers/events/bulk_create.go index 8fef4e7b7..f61151461 100644 --- a/api/v1/server/handlers/events/bulk_create.go +++ b/api/v1/server/handlers/events/bulk_create.go @@ -43,6 +43,7 @@ func (t *EventService) EventCreateBulk(ctx echo.Context, request gen.EventCreate Data: dataBytes, AdditionalMetadata: additionalMetadata, Priority: event.Priority, + Scope: event.Scope, } } events, err := t.config.Ingestor.BulkIngestEvent(ctx.Request().Context(), tenant, eventOpts) diff --git a/api/v1/server/handlers/events/create.go b/api/v1/server/handlers/events/create.go index 5dcac0b80..b0a07fdcb 100644 --- a/api/v1/server/handlers/events/create.go +++ b/api/v1/server/handlers/events/create.go @@ -32,7 +32,7 @@ func (t *EventService) EventCreate(ctx echo.Context, request gen.EventCreateRequ } } - newEvent, err := t.config.Ingestor.IngestEvent(ctx.Request().Context(), tenant, request.Body.Key, dataBytes, additionalMetadata, request.Body.Priority) + newEvent, err := t.config.Ingestor.IngestEvent(ctx.Request().Context(), tenant, request.Body.Key, dataBytes, additionalMetadata, request.Body.Priority, request.Body.Scope) if err != nil { if err == metered.ErrResourceExhausted { diff --git a/api/v1/server/handlers/ingestors/sns.go b/api/v1/server/handlers/ingestors/sns.go index e623aa317..9e99f3c66 100644 --- a/api/v1/server/handlers/ingestors/sns.go +++ b/api/v1/server/handlers/ingestors/sns.go @@ -64,7 +64,7 @@ func (i *IngestorsService) SnsUpdate(ctx echo.Context, req gen.SnsUpdateRequestO return nil, err } default: - _, err := i.config.Ingestor.IngestEvent(ctx.Request().Context(), tenant, req.Event, body, nil, nil) + _, err := i.config.Ingestor.IngestEvent(ctx.Request().Context(), tenant, req.Event, body, nil, nil, nil) if err != nil { return nil, err diff --git a/api/v1/server/handlers/v1/filters/create.go b/api/v1/server/handlers/v1/filters/create.go new file mode 100644 index 000000000..df3014307 --- /dev/null +++ b/api/v1/server/handlers/v1/filters/create.go @@ -0,0 +1,49 @@ +package filtersv1 + +import ( + "encoding/json" + "fmt" + + "github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers/v1" + "github.com/hatchet-dev/hatchet/pkg/repository/postgres/dbsqlc" + "github.com/hatchet-dev/hatchet/pkg/repository/postgres/sqlchelpers" + v1 "github.com/hatchet-dev/hatchet/pkg/repository/v1" + "github.com/labstack/echo/v4" +) + +func (t *V1FiltersService) V1FilterCreate(ctx echo.Context, request gen.V1FilterCreateRequestObject) (gen.V1FilterCreateResponseObject, error) { + tenant := ctx.Get("tenant").(*dbsqlc.Tenant) + + var payload []byte + if request.Body.Payload != nil { + marshalledPayload, err := json.Marshal(request.Body.Payload) + + if err != nil { + return gen.V1FilterCreate400JSONResponse(apierrors.NewAPIErrors("failed to marshal payload to json")), nil + } + payload = marshalledPayload + } + + params := v1.CreateFilterOpts{ + Workflowid: sqlchelpers.UUIDFromStr(request.Body.WorkflowId.String()), + Scope: request.Body.Scope, + Expression: request.Body.Expression, + Payload: payload, + } + + filter, err := t.config.V1.Filters().CreateFilter( + ctx.Request().Context(), + tenant.ID.String(), + params, + ) + + if err != nil { + return nil, fmt.Errorf("failed to create filter") + } + + transformed := transformers.ToV1Filter(filter) + + return gen.V1FilterCreate200JSONResponse(transformed), nil +} diff --git a/api/v1/server/handlers/v1/filters/delete.go b/api/v1/server/handlers/v1/filters/delete.go new file mode 100644 index 000000000..7b35f93f9 --- /dev/null +++ b/api/v1/server/handlers/v1/filters/delete.go @@ -0,0 +1,29 @@ +package filtersv1 + +import ( + "github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers/v1" + "github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1" + "github.com/labstack/echo/v4" +) + +func (t *V1FiltersService) V1FilterDelete(ctx echo.Context, request gen.V1FilterDeleteRequestObject) (gen.V1FilterDeleteResponseObject, error) { + filter := ctx.Get("v1-filter").(*sqlcv1.V1Filter) + + filter, err := t.config.V1.Filters().DeleteFilter( + ctx.Request().Context(), + filter.TenantID.String(), + filter.ID.String(), + ) + + if err != nil { + return gen.V1FilterDelete400JSONResponse(apierrors.NewAPIErrors("failed to delete filter")), nil + } + + transformed := transformers.ToV1Filter(filter) + + return gen.V1FilterDelete200JSONResponse( + transformed, + ), nil +} diff --git a/api/v1/server/handlers/v1/filters/get.go b/api/v1/server/handlers/v1/filters/get.go new file mode 100644 index 000000000..acb3e0925 --- /dev/null +++ b/api/v1/server/handlers/v1/filters/get.go @@ -0,0 +1,18 @@ +package filtersv1 + +import ( + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers/v1" + "github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1" + "github.com/labstack/echo/v4" +) + +func (t *V1FiltersService) V1FilterGet(ctx echo.Context, request gen.V1FilterGetRequestObject) (gen.V1FilterGetResponseObject, error) { + filter := ctx.Get("v1-filter").(*sqlcv1.V1Filter) + + transformed := transformers.ToV1Filter(filter) + + return gen.V1FilterGet200JSONResponse( + transformed, + ), nil +} diff --git a/api/v1/server/handlers/v1/filters/list.go b/api/v1/server/handlers/v1/filters/list.go new file mode 100644 index 000000000..05628e955 --- /dev/null +++ b/api/v1/server/handlers/v1/filters/list.go @@ -0,0 +1,68 @@ +package filtersv1 + +import ( + "github.com/hatchet-dev/hatchet/api/v1/server/oas/apierrors" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/transformers/v1" + "github.com/hatchet-dev/hatchet/pkg/repository/postgres/dbsqlc" + "github.com/hatchet-dev/hatchet/pkg/repository/postgres/sqlchelpers" + v1 "github.com/hatchet-dev/hatchet/pkg/repository/v1" + "github.com/jackc/pgx/v5/pgtype" + "github.com/labstack/echo/v4" +) + +func (t *V1FiltersService) V1FilterList(ctx echo.Context, request gen.V1FilterListRequestObject) (gen.V1FilterListResponseObject, error) { + tenant := ctx.Get("tenant").(*dbsqlc.Tenant) + + scopes := request.Params.Scopes + workflowIds := request.Params.WorkflowIds + + if scopes != nil && workflowIds != nil && len(*scopes) != len(*workflowIds) { + return gen.V1FilterList400JSONResponse(apierrors.NewAPIErrors("scopes and workflow ids must be the same length")), nil + } + + numScopesOrIds := 1 + + if scopes != nil { + numScopesOrIds = len(*scopes) + } else if workflowIds != nil { + numScopesOrIds = len(*workflowIds) + } + + workflowIdParams := make([]pgtype.UUID, numScopesOrIds) + + if workflowIds != nil { + for ix, id := range *workflowIds { + workflowIdParams[ix] = sqlchelpers.UUIDFromStr(id.String()) + } + } + + scopeParams := make([]*string, numScopesOrIds) + + if scopes != nil { + for ix, scope := range *scopes { + scopeParams[ix] = &scope + } + } + + filters, err := t.config.V1.Filters().ListFilters( + ctx.Request().Context(), + tenant.ID.String(), + v1.ListFiltersOpts{ + WorkflowIds: workflowIdParams, + Scopes: scopeParams, + FilterLimit: request.Params.Limit, + FilterOffset: request.Params.Offset, + }, + ) + + if err != nil { + return gen.V1FilterList400JSONResponse(apierrors.NewAPIErrors("failed to list filters")), nil + } + + transformed := transformers.ToV1FilterList(filters) + + return gen.V1FilterList200JSONResponse( + transformed, + ), nil +} diff --git a/api/v1/server/handlers/v1/filters/service.go b/api/v1/server/handlers/v1/filters/service.go new file mode 100644 index 000000000..a5f6840e6 --- /dev/null +++ b/api/v1/server/handlers/v1/filters/service.go @@ -0,0 +1,16 @@ +package filtersv1 + +import ( + "github.com/hatchet-dev/hatchet/pkg/config/server" +) + +type V1FiltersService struct { + config *server.ServerConfig +} + +func NewV1FiltersService(config *server.ServerConfig) *V1FiltersService { + + return &V1FiltersService{ + config: config, + } +} diff --git a/api/v1/server/oas/gen/openapi.gen.go b/api/v1/server/oas/gen/openapi.gen.go index 8e2e25313..899424714 100644 --- a/api/v1/server/oas/gen/openapi.gen.go +++ b/api/v1/server/oas/gen/openapi.gen.go @@ -429,6 +429,9 @@ type CreateEventRequest struct { // Priority The priority of the event. Priority *int32 `json:"priority,omitempty"` + + // Scope The scope for event filtering. + Scope *string `json:"scope,omitempty"` } // CreateSNSIntegrationRequest defines model for CreateSNSIntegrationRequest. @@ -1194,6 +1197,21 @@ type V1CancelTaskRequest struct { Filter *V1TaskFilter `json:"filter,omitempty"` } +// V1CreateFilterRequest defines model for V1CreateFilterRequest. +type V1CreateFilterRequest struct { + // Expression The expression for the filter + Expression string `json:"expression"` + + // Payload The payload for the filter + Payload *map[string]interface{} `json:"payload,omitempty"` + + // Scope The scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope string `json:"scope"` + + // WorkflowId The workflow id + WorkflowId openapi_types.UUID `json:"workflowId"` +} + // V1DagChildren defines model for V1DagChildren. type V1DagChildren struct { Children *[]V1TaskSummary `json:"children,omitempty"` @@ -1239,6 +1257,31 @@ type V1EventWorkflowRunSummary struct { Succeeded int64 `json:"succeeded"` } +// V1Filter defines model for V1Filter. +type V1Filter struct { + // Expression The expression associated with this filter. + Expression string `json:"expression"` + Metadata APIResourceMeta `json:"metadata"` + + // Payload Additional payload data associated with the filter + Payload map[string]interface{} `json:"payload"` + + // Scope The scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope string `json:"scope"` + + // TenantId The ID of the tenant associated with this filter. + TenantId string `json:"tenantId"` + + // WorkflowId The workflow id associated with this filter. + WorkflowId openapi_types.UUID `json:"workflowId"` +} + +// V1FilterList defines model for V1FilterList. +type V1FilterList struct { + Pagination *PaginationResponse `json:"pagination,omitempty"` + Rows *[]V1Filter `json:"rows,omitempty"` +} + // V1LogLine defines model for V1LogLine. type V1LogLine struct { // Attempt The attempt number of the log line. @@ -1953,6 +1996,21 @@ type V1EventListParams struct { Keys *[]EventKey `form:"keys,omitempty" json:"keys,omitempty"` } +// V1FilterListParams defines parameters for V1FilterList. +type V1FilterListParams struct { + // Offset The number to skip + Offset *int64 `form:"offset,omitempty" json:"offset,omitempty"` + + // Limit The number to limit by + Limit *int64 `form:"limit,omitempty" json:"limit,omitempty"` + + // WorkflowIds The workflow ids to filter by + WorkflowIds *[]openapi_types.UUID `form:"workflowIds,omitempty" json:"workflowIds,omitempty"` + + // Scopes The scopes to subset candidate filters by + Scopes *[]string `form:"scopes,omitempty" json:"scopes,omitempty"` +} + // V1TaskListStatusMetricsParams defines parameters for V1TaskListStatusMetrics. type V1TaskListStatusMetricsParams struct { // Since The start time to get metrics for @@ -2307,6 +2365,9 @@ type WorkflowVersionGetParams struct { // AlertEmailGroupUpdateJSONRequestBody defines body for AlertEmailGroupUpdate for application/json ContentType. type AlertEmailGroupUpdateJSONRequestBody = UpdateTenantAlertEmailGroupRequest +// V1FilterCreateJSONRequestBody defines body for V1FilterCreate for application/json ContentType. +type V1FilterCreateJSONRequestBody = V1CreateFilterRequest + // V1TaskCancelJSONRequestBody defines body for V1TaskCancel for application/json ContentType. type V1TaskCancelJSONRequestBody = V1CancelTaskRequest @@ -2450,6 +2511,18 @@ type ServerInterface interface { // List events // (GET /api/v1/stable/tenants/{tenant}/events) V1EventList(ctx echo.Context, tenant openapi_types.UUID, params V1EventListParams) error + // List filters + // (GET /api/v1/stable/tenants/{tenant}/filters) + V1FilterList(ctx echo.Context, tenant openapi_types.UUID, params V1FilterListParams) error + // Create a filter + // (POST /api/v1/stable/tenants/{tenant}/filters) + V1FilterCreate(ctx echo.Context, tenant openapi_types.UUID) error + + // (DELETE /api/v1/stable/tenants/{tenant}/filters/{v1-filter}) + V1FilterDelete(ctx echo.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID) error + // Get a filter + // (GET /api/v1/stable/tenants/{tenant}/filters/{v1-filter}) + V1FilterGet(ctx echo.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID) error // Get task metrics // (GET /api/v1/stable/tenants/{tenant}/task-metrics) V1TaskListStatusMetrics(ctx echo.Context, tenant openapi_types.UUID, params V1TaskListStatusMetricsParams) error @@ -3117,6 +3190,132 @@ func (w *ServerInterfaceWrapper) V1EventList(ctx echo.Context) error { return err } +// V1FilterList converts echo context to params. +func (w *ServerInterfaceWrapper) V1FilterList(ctx echo.Context) error { + var err error + // ------------- Path parameter "tenant" ------------- + var tenant openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "tenant", runtime.ParamLocationPath, ctx.Param("tenant"), &tenant) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter tenant: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + ctx.Set(CookieAuthScopes, []string{}) + + // Parameter object where we will unmarshal all parameters from the context + var params V1FilterListParams + // ------------- Optional query parameter "offset" ------------- + + err = runtime.BindQueryParameter("form", true, false, "offset", ctx.QueryParams(), ¶ms.Offset) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter offset: %s", err)) + } + + // ------------- Optional query parameter "limit" ------------- + + err = runtime.BindQueryParameter("form", true, false, "limit", ctx.QueryParams(), ¶ms.Limit) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter limit: %s", err)) + } + + // ------------- Optional query parameter "workflowIds" ------------- + + err = runtime.BindQueryParameter("form", true, false, "workflowIds", ctx.QueryParams(), ¶ms.WorkflowIds) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter workflowIds: %s", err)) + } + + // ------------- Optional query parameter "scopes" ------------- + + err = runtime.BindQueryParameter("form", true, false, "scopes", ctx.QueryParams(), ¶ms.Scopes) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter scopes: %s", err)) + } + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.V1FilterList(ctx, tenant, params) + return err +} + +// V1FilterCreate converts echo context to params. +func (w *ServerInterfaceWrapper) V1FilterCreate(ctx echo.Context) error { + var err error + // ------------- Path parameter "tenant" ------------- + var tenant openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "tenant", runtime.ParamLocationPath, ctx.Param("tenant"), &tenant) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter tenant: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + ctx.Set(CookieAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.V1FilterCreate(ctx, tenant) + return err +} + +// V1FilterDelete converts echo context to params. +func (w *ServerInterfaceWrapper) V1FilterDelete(ctx echo.Context) error { + var err error + // ------------- Path parameter "tenant" ------------- + var tenant openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "tenant", runtime.ParamLocationPath, ctx.Param("tenant"), &tenant) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter tenant: %s", err)) + } + + // ------------- Path parameter "v1-filter" ------------- + var v1Filter openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "v1-filter", runtime.ParamLocationPath, ctx.Param("v1-filter"), &v1Filter) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter v1-filter: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + ctx.Set(CookieAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.V1FilterDelete(ctx, tenant, v1Filter) + return err +} + +// V1FilterGet converts echo context to params. +func (w *ServerInterfaceWrapper) V1FilterGet(ctx echo.Context) error { + var err error + // ------------- Path parameter "tenant" ------------- + var tenant openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "tenant", runtime.ParamLocationPath, ctx.Param("tenant"), &tenant) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter tenant: %s", err)) + } + + // ------------- Path parameter "v1-filter" ------------- + var v1Filter openapi_types.UUID + + err = runtime.BindStyledParameterWithLocation("simple", false, "v1-filter", runtime.ParamLocationPath, ctx.Param("v1-filter"), &v1Filter) + if err != nil { + return echo.NewHTTPError(http.StatusBadRequest, fmt.Sprintf("Invalid format for parameter v1-filter: %s", err)) + } + + ctx.Set(BearerAuthScopes, []string{}) + + ctx.Set(CookieAuthScopes, []string{}) + + // Invoke the callback with all the unmarshaled arguments + err = w.Handler.V1FilterGet(ctx, tenant, v1Filter) + return err +} + // V1TaskListStatusMetrics converts echo context to params. func (w *ServerInterfaceWrapper) V1TaskListStatusMetrics(ctx echo.Context) error { var err error @@ -5668,6 +5867,10 @@ func RegisterHandlersWithBaseURL(router EchoRouter, si ServerInterface, baseURL router.GET(baseURL+"/api/v1/stable/tasks/:task/logs", wrapper.V1LogLineList) router.GET(baseURL+"/api/v1/stable/tasks/:task/task-events", wrapper.V1TaskEventList) router.GET(baseURL+"/api/v1/stable/tenants/:tenant/events", wrapper.V1EventList) + router.GET(baseURL+"/api/v1/stable/tenants/:tenant/filters", wrapper.V1FilterList) + router.POST(baseURL+"/api/v1/stable/tenants/:tenant/filters", wrapper.V1FilterCreate) + router.DELETE(baseURL+"/api/v1/stable/tenants/:tenant/filters/:v1-filter", wrapper.V1FilterDelete) + router.GET(baseURL+"/api/v1/stable/tenants/:tenant/filters/:v1-filter", wrapper.V1FilterGet) router.GET(baseURL+"/api/v1/stable/tenants/:tenant/task-metrics", wrapper.V1TaskListStatusMetrics) router.GET(baseURL+"/api/v1/stable/tenants/:tenant/task-point-metrics", wrapper.V1TaskGetPointMetrics) router.POST(baseURL+"/api/v1/stable/tenants/:tenant/tasks/cancel", wrapper.V1TaskCancel) @@ -6434,6 +6637,168 @@ func (response V1EventList403JSONResponse) VisitV1EventListResponse(w http.Respo return json.NewEncoder(w).Encode(response) } +type V1FilterListRequestObject struct { + Tenant openapi_types.UUID `json:"tenant"` + Params V1FilterListParams +} + +type V1FilterListResponseObject interface { + VisitV1FilterListResponse(w http.ResponseWriter) error +} + +type V1FilterList200JSONResponse V1FilterList + +func (response V1FilterList200JSONResponse) VisitV1FilterListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterList400JSONResponse APIErrors + +func (response V1FilterList400JSONResponse) VisitV1FilterListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterList403JSONResponse APIErrors + +func (response V1FilterList403JSONResponse) VisitV1FilterListResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterCreateRequestObject struct { + Tenant openapi_types.UUID `json:"tenant"` + Body *V1FilterCreateJSONRequestBody +} + +type V1FilterCreateResponseObject interface { + VisitV1FilterCreateResponse(w http.ResponseWriter) error +} + +type V1FilterCreate200JSONResponse V1Filter + +func (response V1FilterCreate200JSONResponse) VisitV1FilterCreateResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterCreate400JSONResponse APIErrors + +func (response V1FilterCreate400JSONResponse) VisitV1FilterCreateResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterCreate403JSONResponse APIErrors + +func (response V1FilterCreate403JSONResponse) VisitV1FilterCreateResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterCreate404JSONResponse APIErrors + +func (response V1FilterCreate404JSONResponse) VisitV1FilterCreateResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterDeleteRequestObject struct { + Tenant openapi_types.UUID `json:"tenant"` + V1Filter openapi_types.UUID `json:"v1-filter"` +} + +type V1FilterDeleteResponseObject interface { + VisitV1FilterDeleteResponse(w http.ResponseWriter) error +} + +type V1FilterDelete200JSONResponse V1Filter + +func (response V1FilterDelete200JSONResponse) VisitV1FilterDeleteResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterDelete400JSONResponse APIErrors + +func (response V1FilterDelete400JSONResponse) VisitV1FilterDeleteResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterDelete403JSONResponse APIErrors + +func (response V1FilterDelete403JSONResponse) VisitV1FilterDeleteResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterDelete404JSONResponse APIErrors + +func (response V1FilterDelete404JSONResponse) VisitV1FilterDeleteResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterGetRequestObject struct { + Tenant openapi_types.UUID `json:"tenant"` + V1Filter openapi_types.UUID `json:"v1-filter"` +} + +type V1FilterGetResponseObject interface { + VisitV1FilterGetResponse(w http.ResponseWriter) error +} + +type V1FilterGet200JSONResponse V1Filter + +func (response V1FilterGet200JSONResponse) VisitV1FilterGetResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(200) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterGet400JSONResponse APIErrors + +func (response V1FilterGet400JSONResponse) VisitV1FilterGetResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(400) + + return json.NewEncoder(w).Encode(response) +} + +type V1FilterGet403JSONResponse APIErrors + +func (response V1FilterGet403JSONResponse) VisitV1FilterGetResponse(w http.ResponseWriter) error { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(403) + + return json.NewEncoder(w).Encode(response) +} + type V1TaskListStatusMetricsRequestObject struct { Tenant openapi_types.UUID `json:"tenant"` Params V1TaskListStatusMetricsParams @@ -9999,6 +10364,14 @@ type StrictServerInterface interface { V1EventList(ctx echo.Context, request V1EventListRequestObject) (V1EventListResponseObject, error) + V1FilterList(ctx echo.Context, request V1FilterListRequestObject) (V1FilterListResponseObject, error) + + V1FilterCreate(ctx echo.Context, request V1FilterCreateRequestObject) (V1FilterCreateResponseObject, error) + + V1FilterDelete(ctx echo.Context, request V1FilterDeleteRequestObject) (V1FilterDeleteResponseObject, error) + + V1FilterGet(ctx echo.Context, request V1FilterGetRequestObject) (V1FilterGetResponseObject, error) + V1TaskListStatusMetrics(ctx echo.Context, request V1TaskListStatusMetricsRequestObject) (V1TaskListStatusMetricsResponseObject, error) V1TaskGetPointMetrics(ctx echo.Context, request V1TaskGetPointMetricsRequestObject) (V1TaskGetPointMetricsResponseObject, error) @@ -10666,6 +11039,115 @@ func (sh *strictHandler) V1EventList(ctx echo.Context, tenant openapi_types.UUID return nil } +// V1FilterList operation middleware +func (sh *strictHandler) V1FilterList(ctx echo.Context, tenant openapi_types.UUID, params V1FilterListParams) error { + var request V1FilterListRequestObject + + request.Tenant = tenant + request.Params = params + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.V1FilterList(ctx, request.(V1FilterListRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "V1FilterList") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(V1FilterListResponseObject); ok { + return validResponse.VisitV1FilterListResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("Unexpected response type: %T", response) + } + return nil +} + +// V1FilterCreate operation middleware +func (sh *strictHandler) V1FilterCreate(ctx echo.Context, tenant openapi_types.UUID) error { + var request V1FilterCreateRequestObject + + request.Tenant = tenant + + var body V1FilterCreateJSONRequestBody + if err := ctx.Bind(&body); err != nil { + return err + } + request.Body = &body + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.V1FilterCreate(ctx, request.(V1FilterCreateRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "V1FilterCreate") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(V1FilterCreateResponseObject); ok { + return validResponse.VisitV1FilterCreateResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("Unexpected response type: %T", response) + } + return nil +} + +// V1FilterDelete operation middleware +func (sh *strictHandler) V1FilterDelete(ctx echo.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID) error { + var request V1FilterDeleteRequestObject + + request.Tenant = tenant + request.V1Filter = v1Filter + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.V1FilterDelete(ctx, request.(V1FilterDeleteRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "V1FilterDelete") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(V1FilterDeleteResponseObject); ok { + return validResponse.VisitV1FilterDeleteResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("Unexpected response type: %T", response) + } + return nil +} + +// V1FilterGet operation middleware +func (sh *strictHandler) V1FilterGet(ctx echo.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID) error { + var request V1FilterGetRequestObject + + request.Tenant = tenant + request.V1Filter = v1Filter + + handler := func(ctx echo.Context, request interface{}) (interface{}, error) { + return sh.ssi.V1FilterGet(ctx, request.(V1FilterGetRequestObject)) + } + for _, middleware := range sh.middlewares { + handler = middleware(handler, "V1FilterGet") + } + + response, err := handler(ctx, request) + + if err != nil { + return err + } else if validResponse, ok := response.(V1FilterGetResponseObject); ok { + return validResponse.VisitV1FilterGetResponse(ctx.Response()) + } else if response != nil { + return fmt.Errorf("Unexpected response type: %T", response) + } + return nil +} + // V1TaskListStatusMetrics operation middleware func (sh *strictHandler) V1TaskListStatusMetrics(ctx echo.Context, tenant openapi_types.UUID, params V1TaskListStatusMetricsParams) error { var request V1TaskListStatusMetricsRequestObject @@ -13094,253 +13576,261 @@ func (sh *strictHandler) WorkflowVersionGet(ctx echo.Context, workflow openapi_t // Base64 encoded, gzipped, json marshaled Swagger object var swaggerSpec = []string{ - "H4sIAAAAAAAC/+y9+2/bOrI4/q8I+n6BuwvYebU999wA94c0cXu8TZOsnbTYuzcIaImxdSJLOiKVNLfI", - "//4Bn6IkUqL8it0IWOxJLT6Gw5nhcDiPn64Xz5M4ghFG7vFPF3kzOAf0z5Or4SBN45T8naRxAlMcQPrF", - "i31I/utD5KVBgoM4co9d4HgZwvHc+QNgbwaxA0lvhzbuufAHmCchdI8P3x8c9Nz7OJ0D7B67WRDh3967", - "PRc/J9A9doMIwylM3ZdecfjqbMq/nfs4dfAsQGxOdTr3JG/4CDlMc4gQmMJ8VoTTIJrSSWMP3YVB9KCb", - "kvzu4NjBM+j4sZfNYYSBBoCeE9w7AXbgjwBhVABnGuBZNtnz4vn+jOGp78NH8bcOovsAhn4VGgID/eTg", - "GcDK5E6AHIBQ7AUAQ995CvCMwgOSJAw8MAkL2+FGYK5BxEvPTeFfWZBC3z3+d2HqW9k4nvwJPUxgFLSC", - "qsQC5e8BhnP6x/+fwnv32P3/9nPa2+eEty+p7kVOA9IUPFdA4uMaoPkKMajCAsIwfjqdgWgKrwBCT3Gq", - "QezTDOIZTJ04daIYOxmCKXI8EDke7Ug2P0idRPRXcInTDEpwJnEcQhAReNi0KQQYXsMIRLjNpLSbE8En", - "B9O+yHrGYfQYYLZwy8kC2sOJ6Vf2M6X2ADlBhDCIPGg9+ziYRlnSYnIUTCMnS3JWajVlhmcWpEXI4oQ0", - "fem5SYzwLJ5a9rrirUnH5zCOTpJkaODKK/KdsJszPKOryRCkfQjXEyrCDsqSJE5xgREPj969//Dbf/7e", - "J3+U/o/8/l8Hh0daRjXR/wnHSZEH6Lp0VEFA53BB3yGDIie+dwhmYYQDjwo6FeJ/uxOAAs/tudM4noaQ", - "8KLk8YoYqzCzCewhOQFSIMR+SZpERIDVcC2nHDkEkYa8kxNHVHIrdFUlJCoOtbghXwhC2BA5jFXp3ihO", - "ucwVi6mRYVc5kZZEWRL8ESNsoMAY4T/iqXNyNXRmpJUK4wzjBB3v73P63+NfCHHqjh+QBF/gc/M8D/C5", - "ME0ye7jLSRdMPB/eW5PvCKI4Sz2oF+NMJvonhtXjYA6VQzHlYzlPAHFxWpDa7tHB0VH/8Kh/+O768MPx", - "wW/H73/f+/333999+L1/8OH44MBV1BUfYNgnE+hQFRgEQuAzulGA6TlB5NzcMAFBhlYBmkyODt//fvCf", - "/aP3v8H++3fgQx8cffD77w//87dD/9C7v/8vMv8c/DiH0ZQw+bvfNOBkib8omkKAsMP7rwNXJX4IyCT5", - "rqqgG3jjOn6AOvHwIwlSiHRL/j6DjP0JsWLS3eGt96w3eA4x8AEjyYYzo0DBRrlyXZIrEra94v4effjQ", - "hEMJW0+KF4kMLRI9DyaY6Qgj+FcGmTAp4pMpBAyzy1HnPIjMxNpzf/RjkAR9clmYwqgPf+AU9DGYUige", - "QRiQfXGP5Yp7WRb47kuFkBi8uvV+zMIHpoMNHmGEjUuGj+IuZKWvaoZs1FzZDLcvPfeUnEOhBUBDvwhS", - "6+3IL1wZ5bY222O1IAIhXVIceVmawsh7Pg/mAR7jFGA4fWandzYnHU5PLk4H53fDi7ur0eXn0WA8dnvu", - "2ejy6u5i8H0wvnZ77j9vBjeD/J+fR5c3V3ejy5uLs7vR5cfhhbLHOZRsM4R4MGOUMcYw0jOkn6X5pe5p", - "FngzyptMZgTIoeS45y5OxPE8wFEQ9sREFKF6AXHCxAPTiZeSD3R8HWOUkYaSOEKwijUsRG4VYwWw6sFg", - "o5jhOE3j6HucPtyH8dN1GkynMDXuI/D9gEABwq+KYK4M7KVxNPiRpBAhrlNWCIc0ueAbUD3WoyTD2pGT", - "NIjTAFPalgwWRPjdEdueYE7o/R1lL/b3YdXQURFhZLaebnEKnJVV3UoM1ksTPc5KRCfbOOJUkRRIeV3Z", - "5hwZ+rEoQ9kN8KBTM0n/B/hs7J5vk7oZ1THEV3HSynEq+9awP0zPpYvNcT6+GCvXFiPycZwE3klq4qI5", - "+L84coTm4JCNdv52Mrr4uwB6fDF26BjLSB95hM6D6L8Pe3Pw47+PPvxWPUslsGZmZdaMkxCmeDAHQfg5", - "jbPELHZJE6STcWGAMFkjayHuzClyrS+UCyzfDx5hj85YXTsHtWnlDdoTG1y71/ST2FayVgfH3Pyykr0V", - "6+q5aRzCJiWGreYrnE9gOiLttfhw+WBNWDHiw04HZmauVWCBLgOF2VQ/Kfmy+kl73JRLxfSL4eZPgdLj", - "MT/+kK30zn+9UloXTGXF01DLT4pppWoWkWdgq7mWuC/NIZ7FfrP2raDrK+ui6FK1Z8TCB3bPZdQy9LVz", - "PHF4Gj4b1Q3R4BtMycmuHcZ895Og6QYqzV6AlVNGTgdyDxrp9DzQcXsCpkEkzXh1u3glW0rtkwqupzbX", - "MJVvrMyNOtpR7ihng08nN+fk7nFyNTTcNpQBLlMfph+fP4nHGjFMJLQ1WDFo5CNRlW2TutqSqtYSfI3l", - "A0jzgVRmtSq4w7OiAC8/fPFnMeNCBP2PsmiczecgfW6CjG7V92q3GpZkGqNcyK3Y8DOgM262UaOdv/1j", - "fHnhTJ4xRH9vvntJpZVO/2U5GhBjbAHzy+VU+V4Aui1Q1oDIJchZkEJPgCSkCECeyx7EzfLDJIEsRM8Y", - "gtSbaU8jE71XjfLUlKV9m6EaXkaUS8KtsqGTZhEqX8EMvgD3ILAYmrVqM24CI5+stGFg3qzNyH9lMGuG", - "mLVqM26aRZEFxLxZm5FR5nkQ+s1Ay4b2o0sqR3UWV809iX7bUy+CC/DYEieWWawrZtx/xBONIK9zX6Hy", - "XHFg4afYn/Fkb00PD5UxEYaJvfQaY5joEFurCuNgDuMM65fPPzYt/XFZNfhRUX/F9YsuXafX/iOejLKo", - "RrqxpyW75yLZSfpRmZuMIECGi9l9EAVo1m7qPxlF1u0oIVrW0rB7SxBdClEW6m2mCIMUt1sMwgBnyGI9", - "5HxibTl9j7KoHYmTzW9P5d4DTOtZoM1yFaW0CWTlYC71XP7ayAYRBCJ3wcw1Y7lNQvW4GlycDS8+uz13", - "dHNxwf4a35yeDgZngzO35346GZ7TP9iDEPv748npl8tPn7TaClHj9G4its5l5a6azeaT0OcQZH4P2ajy", - "KJ+8tfojgbhogkavDG8RmsYXRAU2PpGOzOgyQ+A9fIeTWRw/vPoiFVhWtcR4eh5EsJXPCzlM6WeiSBDJ", - "Io7UMJ46YRDBNg4OzDFWOwcZjjdoVFJMvVkLjU2ihC3VGST31pUz3OaoOoePMCwabj7eEEEzvPh06fbc", - "7yejC7fnDkajy5FepijjyMuT1f4XINAJEv799e+egqz00oN9XOL+WRyh5Q2Ud665g2oQoLpA/HSZwwG+", - "SyjtHvXcCP4Q/3rXc6NsTv+B3OPDA2oFLnBWobPOU4q3cBJGhXLiI6trlQKL1q0Q/qiO/M5u5HxdWgev", - "GINQvcSSptSyEwYIs9eN3C3/wOYWp5FY/yQ32K8Qp4GnkcdRNr+yu2JTOhYX7T3Tev9pdatmYwXM34te", - "sY0Djuyu02xEfqnea34lzkEtzNJTEaKT/yOAIXWbqaLSymabEvEfkgG0IjoECI/gfRAaniWp3x93DFQH", - "o06BKe0IqevLGrwn6UTfQJgZjh/+PKPaONhDI3Kowzk3+fJdfwoiP37Sb/sqbMoNiH40r0NIE8065sCH", - "totg3/RTsG90GWQvg0hxY8rRzFyj7+PUg36z8bhkxS7sl1ivhKpAabcqXW/BYZjzmPY4lJ+XOBDLY1SO", - "RIZNgTUFldrRoAcjPFbus6V3IgqeiZ7ZV0fnsqYaINrcUBexSCxhTVibyYCjNLcZVC7QZbfJeh6RG9FT", - "79YclvLoWvEPyV9vxyl3BJMQPP9S/q9sSYphBhlXVqCH112f0vzDwUHDektwm1ZtMpwo3e2FdsnSZQuf", - "gC4lXE6ZvYat9G6eWv9MMmrJxqEZcAoRvkkNutbN6NzBsYNg5FPHPn7NRQ6O1/Pobjogsij4i2gDPoxw", - "cB/AVGqTXAHiQSLM/1CNrZrAMI6mAuIGWdlbp/ujnWmz1qVx7M2gn4VQobRlPY/X7DncczHzkLY/Gds4", - "G+eD3yro8Vdn6aU+/uSP8ekfg7Mb8qNO/ZEzr9cxbktd3Kqrz/3cNuHO1prEVucBN8qiU9Xs2fr5hAGw", - "6bNUAcBmiWMrVfV7pcNrugrmRFHrJVil3S24/mnEiZW/oJERWzkNVkcxXRFVHNdbUMdwDpJZnMJxGOMV", - "3w8Ldy/9Iz4ziKAwZmYi3sP+0WHBuxp/3zUti3x20kwsrFk5UR9qmxcahKHwYLBfaUU0aUw3vIk96CUG", - "z9HSU++j5Vdd8ZpLyEd9xqo+PM1AFMHQBC//7AS+3k6GyODOExtdb4FgI1wYgwPEFDRIYMFJllKewdy0", - "evJtiaWT7uZ108GXWfRWqP12irlAhER3kS56ChlqDxoME5Pc0/vdzILQT2HRdaDh1r8mX5kEpJWI40ZI", - "Ugh8MAmhaXPFd5l7gAnERjJZyoXLMIOZApRVFMhBuJzwDWRvaDVbvwaXrRM8SOLCe6SiMK/IsYsS4XeT", - "NaSRBgrd0WmcRVgPLjRCuYghN+9Tg6HyzbfgmWbh2MT98GT71bNdnGETiAtyJH1oPLnHMLVH5sod5ViX", - "mp1ZQtuy9RElbU3ixELWtFmx7FKzYqL6GPzzrA4nSYFyZbXOcBx1J6k3Cx7hTsql9pfurRIxMblR6TvV", - "cH0KcfpcI0XXxo/KNWYzLFFzY1CQIPCov32a6H0bLvhFBtQ+8vI2hsA7z0wFZluvr++guNRpSE7woMV6", - "+CsZ7UHoBj5CYfuz7T0Wfazo7lOQIjyGTEm2p71z0LZXS7dldssoAFiaWWJWQZPqR8j2t4aYtyVmrECm", - "jYSci3RhQxoNmI397uLy7vvl6Mtg5PbyH0cn14O78+HX4XVugx9efL67Hn4dnN1d3lA71ng8/HzBrPTX", - "J6Nr+tfJ6ZeLy+/ng7PPzLg/vBiO/yja+UeD69G/2DuAavInQ1/eXN+NBp9GA95nNFAmUecen1+SlueD", - "k7Ecczg4u/v4r7ubMV0KWdOn88vvd6ObizuWI+jL4F936suDoQkHVGtO03GMglTFsZQvcDS8Hp6enNeN", - "Vvdkwv+6Y2j4OrgoIb7Fkwr/m7Wu86TPE5GWU6TClGeCGBjydXwXqRZjh7YW9oI57YX2tHkVQQTCZxx4", - "6DLBlxmuGTU3QMwAcuIEQ9/hl0w5iH6OtadnM2WJWDrNRB7nZBcZze30FkngKFz56DqZp83istn0LWuK", - "0DNncdGueQsEvn4vdNlupnGfEa07ok8YL8VVBdF0DDH5D9ock7PUEYMfSUB2mQasUGDqx2e92DTIeaJ5", - "GmnsjQNS6IAkSWPgzYJoyhI2UgTXzS+y0DAioW54C0LBliwyY1bhoX57tbhQrDufQBBmKbQAhbqEqICo", - "jwKIRjnr5wwBYks1P9jkHr4g4jtLH23K2bDqffnAD0Fkn6jdI/KejU67zr1o4gAsHFE5Va3WVm+WBFqA", - "zXJhKD3s1pPQ6UUm56x9bBKpWXla7k2mK10sa1TTkwNnKNODifhsxhprUfdkQkco5Ew0nrkNB4dId5Xv", - "lZrEo4F2tuYo4aTc7gRhe1qF/9UIyj5fDGG9ptY3CKasx1U2CQOvjhToeDWJz1SYt2bT+f4tsukjvk/i", - "lnL5/YLetE7Ovg4v3J77dfD142BUc6WojweiNnJk9rLSWVAqOKeBTU2YKMChGBnq5m4zXtlLVCJAUL6K", - "RXn3Zn/ckZut23MH39hdT72jkjvwyfgL//N0dHmhOMjV4L2g7+hUPpDOa6Jr6HeHBiTohTOLA8Kx8wRS", - "mq+iogix3vpolXaBR/qYo9WEEbGxzUvUw79cLgRJD82sK6nHLoioacPaxw7NIYapiCASZygby/lbsAf3", - "nEPHB88959B5gvCB/HceR3j29wWf/iV6tBFFZpErEHUVh4GnyUfEdPO666rMeM+aahSGFiK3yH5NHuoc", - "OPPquNXIVpgahdG33KogZNG3A7fnfjvUixLmc7YBF2ijV/0NTf3/FjPLqitvCAFaSVJXo06jAmLe/x02", - "FHZ2ite1U6zRfrCWLPwt7MALm3ENXPidOi+Yg5bQFciQLiJeZRPmAeEEyEloawdEvuOBKIqxA2gdEFpg", - "TGRzK2+YFjqkuyA2GkiA76cQIdVQUlDtxM27ai8hH/4AaKaT8jOAZuqQ/4FK03G5z7QjVp9rzEpdOacz", - "gI0TfoNpcB80oZeae4gMeuTNeY24Agx6TpgBZK5Ep50DyNJzDoJ4gw8hfoCSEDwXGEHsX2vLShG7twYC", - "K5bqM+cVh09mJFLehU851oSap4d9geNelgJ8ob5jdYBIIGrxtxwMlbxDslChiicTys/jaRAtntF+Mf5e", - "KsH91mFcrDFpwvUITgOEa6T7NqLb7oQ0CIYt3C1RLMt201S1Gs2CBO2q1a9iBd3gab6OU4ZNptu2b4es", - "ztY1QA81RaEwTCMQ8kh840WSN3OGZ6jHtVEPRE4K77l+HrAzG6AHJ06L8TxqZ/UGutLkBD33PghxsxX8", - "2yHBxyfWVqvjfTs8A9NTJVKiHBmkiaFonlFmTK8C7oOpbdoNDbBdFv2dyaLPN2upPPraeW9zStgCuSxo", - "Uuu5Z8bBL5JbvcuAXuN0ncqMcH+JPHH57HJPFH99TtrGBKEAYzhPDF4f/KOygnJ+UE0wwUYyjoYifWc9", - "H5VTbb5eotJyoIAu9g6nzw71MbbBdPvMpyV0LJH7NB9pG4RlbZbSb4csx1CnxLVV4kgLk27UXmgQnOgF", - "Bo0B+loT7kA1iGv6q82qBrL5guEV1sE+tccIxGkAUfPyyZczdnsxpv0gbaw0XBbQgDCYJ+3iKESgW7uo", - "bNaEAadOre5ZjuvbejLbClmSE71BmhQpbP0xEy2DJMRYheCIckCEPpqiHCQxHlxc312ri5FruGNHQyWi", - "43Q0OLku5Wj6Mry6MkRMFKSQ5QXM3jMcBRHznmiTTgS2JZY81LU8fxZhZgZpm/2oCEIzx9d5DDAkmDnv", - "Kg4izDwFqjvACU4rQPOQEr3zVTCHCyYX4400MStWy9A4PLBo6bY7q6LGKtMP6zbKIhM+vdooUYvw6iLJ", - "VeIxeb5Qc5haCcK2GMmXpiH3AmyKXJSSIA9HOr38enU+uK5EIdUEVxVtQIulKlLsFcXTOJ9mWaMP1fzS", - "LNJeAFaqNalWNJ23GtNYRSs6ELJ1TWo0uDXc8aQikOPkCSCH97K/5PlFtcjO6qvZAmXELM+6WVPBvzRU", - "zwkiZx6EYYCgF0c+slNkNeZv0kLcMcuzOH+TbnMAQ4TJb39vTqNshX4yvOhmj3/phFWdg36qQTmnev6I", - "IH5MYASSYO8iji6yMASTEP5jTB1VZat+ME/iFOdln91q4wSQ25A7DfAsm+x58Xyf107u+/BR/L0PkmD/", - "8XAfwfQRpvsxoGf0j37Ex3KP70GI4JKv3tl8nICnCPqnteyoWIBY8ypj1uUcqA7IvrWkoB3akzXcsywy", - "0mmYZk1Z6cq6Y576xJCRrnp2LWs3IPcYafYwiKvcZLGG2S2eFWovzcMIwbT9KRTwbvZb2u4VpFjmYpNp", - "xptITjwZCNtIMd9q4z2jTf7VBShLNZbZJF7UJjGtzsQd6zQTLZODTrW2qlpKj5lERH5kzfkg5bqS4L/E", - "i6pJpUjnxeR3zLpSSLCq34LbsgK9XlOLWR1dpRa6UJ00NuZ1MOcPQWs0a/owwTNTBdcEzwqHtyjcAjBM", - "70EY6ofcmOK3dFrC68VOF9Z5VYcMeyFriSxyRLCO9uh6a9qKxmS9grtZp5H8QhqJRcriBh1gqRyxTPiW", - "jtizwkG9yKF7WzpCXvMcJdRE03S0Ok750bey03RjgV/FqgUaF1D+1URK+vyOqj5b75kqWjc7FJYS4Och", - "arkGrk08u1bbY3uLnTgMO6tdZ7X7tax2b9mwZlLPlzzut1td3RltqaUlpsH0odGruDVkKd0q8AuKVW6Q", - "KBpCCnYJafMwnIZnEItY4NLTbXPO7+KxSqhkBprNYar/LGn/KU418IhryaNI7N2gk1G3lzwlQMmmtbwP", - "CgMHrcp1vupc6hYWLHAppq3uW/GsL+6d3+DVtIaEXeqUdcC+luKuqj4tNHcDxlelxRcMw6pb6MlnnlZG", - "+ybPa7+wUOOVZn2yCxbjNUx4mLJWc8xMVRRF3ywNWwWi8rAvMq4OlwWUsCJk5vDTVS0SQS+FhpOXfZOR", - "JTzGjRwBzvDeiWLsJGn8GPjQ7znASUHkx3PRiRYrmkBnCiOYijuAepQdrQ3j7dHsbycBLrY3myZlCWcj", - "songNJdo3aizZlH8WHloFboYGZPfeO+AYd9o5AKIfKXiPRtqsfuyXT1HHeh5RUemjp3GvoFq/7i+vnJY", - "I8eLfUnBKUe+fYDBHVCK9xUmvrVEeD0JiaL29eeooHnR2jrrkpYCFqadaiW/z4Nrt+deXY7pf26uqRZi", - "OiGZXxqqc1pDLIiOZ+LwQOQkMCV0tdcqSzJ4BAG9CYpyQQ0RUNVp4Q/oZRg6XhzxJGLhs+HhK0AJvZam", - "uosJobq8uBRAKJhG0HfyTtRsc3MzPHM4+2z+OhaCCQxRfQY12oayVMFEyI4B69LUMD0n4+i2LAQI/wFB", - "iicQWJQX5FtFE+IhAiBwZqJ38Up7dHB01D886h++uz78cHzw2/H73/d+//33dx9+7x98OD44sI/BAoyZ", - "iXowQBhMQmqp2kJI5+CHmfB5wdvVMcD69Q6zvpFCD8o0cMgUaEbayEqXxetqCwIeFefSFbfKIrIlw+g+", - "tuOGkdKBZsePTScBEsVLWWFNxogLLqRUCFUXSyANTzr/LHqsVvZGHAknp9fDbwMaXSf/vDq5GRs8nW18", - "eRiypB8PO5mMpUH5WckkagnIZlsT633TpH3ejM41w7dVRml7rSKhCMvKOVobUC9CKUnXVcfT12TaZBk2", - "GyavD1qvwcPrh0oZ1W4J5KjI/KU0myCaZvzFxVosjM++IHbwsM5KlspqEJ9eMeISafADp0DbAPkP5mEr", - "i6MQqerf5fkJq83yr+s/qP3++l9Xg/HpaHh1rbeh5JysDDMenH/643LMIiS+nlycsPCq74OPf1xefjEO", - "JPIUl8xwKm3q3ZHkLxavnb0WyeOo7VOmj9MnHfsznhgEK/miA8iKPv8RT1ZaLqTN2WzEnEg1pFGPwHTx", - "tUr7HdAq/7bJ9W1WwB8Q2skJ5a1CZiKps1tqzoW6BPpiilOhmeny+k4hVr7LWjWl5/VIxAqx4O4pxIji", - "xMu7OlPSV551iml2z5hXeoxTgOG0Mb+JAuF5oV97HTZXU4vpNctJJ94dNV/9xdTl1fS0WK3bouGZLr5e", - "Ajg80+JQ9P4SRIXL9qebi9PrIRWzZzejk4/nRLU6O/lcKyDJIOL8bEXBdHYNe4nv+kN5KVfKDZ/n9Pyw", - "M4bw1saQS8okX2CdVyRNx6+jWMljD/DZ4LQhhidkaed4Ke45wEEJ9IL7wMsncf6WAISg7zwGwGEJFP6u", - "5wojIlp49OirCOA0g5rxm97QVNcYeXE+PDg46K29oLP0M2m1IFYV154u84rOKzzKmRcEOxE3bVxic4/V", - "MpqbBmGxkrQWnhfqo7rW/aKqETEHQuh/fG4x+LXSq+oP0VIlMXpULFKWoDqQ6iuhgH1bL0y25IaneFXY", - "HwqjLLpMfZh+fD4LUujhUiGBk/EpOaYH49Paczof5VMAw8K5r0bf5LRckGKKZGyYZCy8RTrZ3cnuTna/", - "luw2zPELivYad7MFRDMdbYjh3OzAZrivNHc2pm4d03Dl+jw1S+YxyyOiVx7ovIIBDTK9nMmmHGLCF9Wr", - "IFIZtYl6agpCtyj4LLOy1BV7rky70L25KFDMxHhdFCdln4w4ulIkfwVW0mDszaCfhTVZ5Aydlz6Ovrcr", - "RZ+nZazfbMQSUBs9VQpRVmtkR0OEC5+2aRFGIwFNVNSGjsRQp6xjkxZaal6ZP2cIbU6muvRXgum0Hzlz", - "ab8JHm2fVKtusddgqkNvyFTG5U3+0YpjpLhZl0FYRz9cKJym5CJzr5cLWpZmfHkXGLixaULqAK2dkcqR", - "O/7kuOppkX6F7TWDEt40khdKn/ZFBpb4Wa1yz9QtPfpyDeyOv0K0RzMrjrSCskjNL1t1YCjabJllC08Y", - "NhuivnrQtAP3IAvxVW2wJG9kDJq0eiTIn+5e6UEuTn3mVWcBKuKqwXUwh7EhuSvCgffwbHLyIN8cxJ8+", - "7F77FJ5uwVqoVA/MnNPEBgi1fq2t/b/2Tma+KwmYxc4UBrptZge6r6t8QGlDIG8K4czrIH85KWL8PoXU", - "E+rUnK5yDn40tHhqpxGbclYyF/qMCCmi3c8ZhBMIUpieZCzBCsUolb3053xTZhgn9G4Qxw8BFM0Dsqvs", - "J/HAfOzyQMq8L0iCL5D7swTchUXjV826OSdXQ5oAGVNLUPFXSVnu4d7B3gElTBYb6h677/YO9w54mCdd", - "Gg3lDINHyB+tq/N+Fo/SpFUEEXKkFYLsIhCZMN1z/v0zXZdw9aazHB0cVAf+A4IQz6hU/qD7fhFjOWdh", - "Z9zjf9/2XCSScxII84bC6+HffHxvBr0H95b0p2tNIfCfmxdLmgV1qx2JBqtcLgWOlo1k5Q5xCu7veeX3", - "utVLaBuX/3i4D3hNyz4tRdSnz5Jo/yf9Wf3thcEYQqzRxc/o78gBshgLLZ3KCi7R7hWMlcrkshEoLaaA", - "lnMmYNe4jFRmcHjyK/eYhS1L7qosxVW5n1mbmVxc+ur6clvZ+/dVbI0zz4MI3Wdh+OwwlPqFSjYV5L30", - "3PeMSrw4wjwrPkiSMPAoRvf/ROz0yNfRcFoN0jROeVGtskfEHIQEC9B34tSZAF8EOjAw3q0cDB0Un+J0", - "Evg+ZLpsTt+MTurITFA8LyR023N/9GWVWWoYYx96GsK4pZco7GlSZzHlfRkSZyP8GiRO6eFjzGTnSojB", - "ooS2hkxqsYVjJxM4L2LjRS+iV7IQ7RJ0sBfEAAO0EwOWYoBRy/rEgHpAJkGflcze/yn/pqdhEiON0jCC", - "j/EDdEBENDBWbJv7/sgZS2IiCWg1b2EeIN1tpIQc3iATBKxbddyldHmczil0vzZRozZUzUmHbOw13zlB", - "xvlvdZQst7xAwV4YZ/6+epU1a7uVxE7iOkEHcYIIYRB5sELEp+SzcFYwK8Hrxy0FxMkiGXS4NQTWoLUz", - "BKuvv3zrvyrvNT/6Yoh+nDDXCX6iKfvNjKv7P+l/X+r2m0gpWSKxuKHUxso2slES0SGMygn9ulEhtLrN", - "5plQGg5vlljzkYs1hg26Y51sK5C4gpmcvBmKa6Qao59bM4XvN4k1ui1SqjXQ/JkUYG+d7s8oCXe0v120", - "P4cLn+HG03tzBzfPodSGpuSRuCMH+SqOcDLGPjVos11Cxh0/DxC5AIVOobVpg0nrYbHh2nabzMV3XJmy", - "5eaLnBuF1W0TIcitpxtR2oTq/hc2OY4CHBNpvv+TcfzLfpLGE2i+XIpXOgfkD8E4dqhdl+KrGA9uZng5", - "9VWM8CiLrui89rYp06EnJdeGT70aguK5Exg9UfzubfRUuIixAzI8i9Pg/wgUsciiwrI8sJi/ipkTs9LV", - "zG7v0O1xPnF5Psy3VX9wFMgMhcB72P9J/2NhxXfGpKEIra9QDv3K09HYG+0LYxqJh4K4ldb5Ik62SbU5", - "3AwYN1FOwmziD5uZmGU5osniQBjGT2R63YtAmWqF6KW/16lYjOiKHBOh/Z8oQlbccjFWpX6VXyLUgk2K", - "g5kZhZ/cW8cmJWR0jLKFjFIhWMkqF+NaRomQhk2E4qJYm/SqC5lXXIkrLNL6bezV9I+e2RDwAJ8XtQQo", - "MBx9+FAA4nAVOlCSxuQf0O/OsC1iTdMlkqZbd0CSCGqvHmusTYkfMZiEcN8HU7QvMzUbL42I3hppOwfP", - "AHYmMIyjqRqjLhMHg2n1Svnt8AzQGiTXvK5Ws7lM1FzJ032wJL+UZf7KYPqc84wPpneBX3/MrSvewEru", - "lOB9rYuPNfWurDDaGZjKgnLaDEw1cohMKV7/6Kxv20rYcz9sSviRW2gwT0I4hxGu6AbUeCGrwYqnc4Ae", - "tBKGNtz/Sf7T8LzEEtNPnhnflAUImcDS1M4K1ZkOfQLoho/8YkU+g1AQNf1UWCqRNeu045dS8LcyvVGs", - "vnX+fM/uPuuf9VotykY0hfs4Yyl/tkRE5PxcERHmOwO2ESH7YTxt0lXCeOqEQQRFHh0OR1minMfT8yBi", - "FRa2XKqsl+1VRLQ4lHlcVvd2VzwZJfUppH8eT5enfPL//TwYzvzCo9R2MRK/LN2yC+Tfq0mZhWMHPQSJ", - "4VCN7+8RLJ6paojNb++12bPqp6Op5ZzJs2FK+rnljOs/1vO9XuCRvlO9u6O9ION0Emb5Y562UMyEFrKO", - "GSaK4MjiaGWZ107ebaPt8FeSeb1qek1hZHiAz4jMyjIcmqcl7VytkafRX4mGzFUMEuuVw/YyuNOymrQs", - "KOq+WbpHWQkcqmHN8yQgNbYKDo6sp5dmkcN71nvws+OYrIHlmhE5R3ZVJNE0X7xoRuxMmU+TQIOBb1HA", - "HBIsAK3JF2Z4XpHljmyhySIchO5qZs8zLvhMgEW+zOhqmF7mimAW7NexWLNsIJSU/wOpxncD0Dx7CGl/", - "J1rfsXqb6yS2/DGAvaLRtwiZ0Sy3RBnM7qxhEE3vWG6U9UC+fmV+lEVCbLQ306miqjOpb4+9jO7NXJ4G", - "duq0/bGWxEGELQ+3eRBlGDqTZ/lXCsGDHz9F8rxrcdZ9hviKTL7rJx09VcA9hkoVe6WOXrkg1GH/gPzv", - "+uDgmP7vfwxSSeSIJQOv6BSikE7gfZzCEqgxgW8JYEUK14908Pbgrl82FkhtAelI+aSTj1sqH4u7s3Ip", - "ifY9msfQ7JnE8hzK50+dvGNNdsY/evXh+d8OGQqoqtIQj89cSmLHE0izCr6vYWk2UNg5DnTWS40gKbHv", - "ysVHCpMQPNcF/NOq6nXigzV50+KDoaCN+EgF0pYVH2ygTnp00kMjPUrcu0LpIYxA/TSLml48ClWyGh89", - "yqU6uqePrX76YDF50Or5Q7Rt/wTCXb0Myc4NhlWQhgFEmFWVtwFvjVbeEOA2oKzKxHsii5DkgdMP/Ucn", - "AUFqtWV5FZM7JfRXs3sLWXEV6zPaKfMzr14tjOb1OGSN12xtlmmUYyeIvDDzqf85IodyHIXP6u/SJVon", - "kKLw+U40MDNCNSVzg5G+4B9vgbNXstdrvPiNhnv1WPvVDfjcybbtW3Cnj26bI0xBF1NUQqF1ObQM1Qp1", - "w31eq6tPWKJJU+RtybC0/AGN9Terj/Xa41leJAzttCapCKVKJWOGFP50zNHHUWc+QhUp9CoxSOuVV3oS", - "6ERXJ7raii5+fDdmLHGAE8GnskJQI5pO6RPamzaZcdQpSGkwnanYpQZ4gcPNZb8tyhZRB65BpvDn0qrK", - "2HnI5VZuiqMyA62AwYv8/PPxsK/+0hTfVyA5EPlOoCYgw7E8cOOIbu//uj4liv91nQRMYb0MsIwNLMDA", - "7k5T2lMjDUrL29n4ngW4rDu5dyj21pKhexWCXoDF7aOQ8owB7I5hf5zLeBXre8YvbRw2iC2tce/XFGDt", - "Apg62dXJrhrZhYN5EE1RYw5g3q619PoM8TWfYmf1kZ6+Fl+CZ+yxnXnNOaI4sCktC+mwdZGQbHM6SbLz", - "kqSOP1ctXmDCZYr482UfpN4seIRNWhBvxcEk3bUihJfqp3lwxcAW4kOMZ87+x+Htnuu3Mzqb7zvf8y5A", - "eyeclCTXlRyVqkKqwP4K88s0iuSnUVZbqUWycLNMap0dwkYeDUSoZSeN3og06pJF/IqySGH89UuiBdI0", - "CaCqXowtMzV1Yuh1fRhD+AhDK3c41rIwc22VCE4HpNenAIa+MSgdkoPXobMpcNREpNMObQEZs15a9zGA", - "ycS03LV5/fTzx2e2lpaTX6p9DXhg0/tBCj2ev7kGijOl2SKQ5P3Xe0h16cpeOV2Z/hjgj/81oXP0VRLx", - "132DyxErwXmqPkav+rGcDc4msisX+zrP47waaZsHcY7UrvZr6SVcST5VX+lVR9HSnYWSdl3FZ1ou7keA", - "cBBN6wl8d3LYb6CEsx0TJilBJA5YbMSrFWvu+HFltZhbVF6u5ct9WpU5iKZqYWZUX/cO5BnNDXWhUVON", - "9l1xgr3ddAHzBSwH5k3oeKf46lFDrfbM1Guhomkrptcyxpt3x1Q1zBJu7M66CspbqaCH62XvBU5ADQ11", - "fK3VUbXcttpTch9BjJtcIBDdPdHFEV3q40UUcgmi6Zj3sa3a8DaOSQUxS5yR6p50rFTME2RC08r4KAn6", - "OH6ATYV2T66GDmtXzzUnSXBNmnX6pKwDTPGBRnyWlnwi/Dg6W19ZeSQUyVCrMIP8cXGF0QFRTu12xN7p", - "iBQBgtYVtXCdJozypB1/rTjkJmemlgxWd+CsNg9+lwW/y4JfnwW/BibpZjk8Q7a5OpisaA2gcN0cni0I", - "YppFy6fTsYFwlEUslQ43fL3KkzTdz9d5kKZTb8FztAqH+hhdQyx5Fh/47DyCMIP6XD4yWe6/CbsdHtOm", - "h26P/OuI/euIiPf6nD9fV5vyJ18GS6ois/7U0zltPNxMtp913hW6chqvWU7DzoRMxzXoIN0VgCKA4qLB", - "LMyTJr2KGwKjhDY2X1bC/M17gR7912ZmHXH+5Oop/OFB6Fcz37ILiigvv0jZHMPFZH+ShQ9mt5+PWfjA", - "yQPlMgHVCgXS5w0LBrL8lsIBvaZ0QO3FQ+clvmXygbKpKiTQiqWEXWZ9ZshQUpMVVFyT1GBuJW8+8T5D", - "gL1CwS8M7ZJnW0OaO2yRfz3ll2Vy91hjglHxQzz5E3oWmgtFGsxzKXRCamuFFE/AvRb5RM1oljZWZpuz", - "sLN+gc/ds15ubFzotk6R3d3YdTd2h9t+V8kHdiUsULuj+c0XtWAI2JajeTVmtUKtjO7AfDMHZhA9Bhi2", - "dbAWvfROY0P6tTsrha+Ygo+FvMQEtjvfMJ37dE6La/KZZhPU0npn/la8pBlK7JyjGW5f1SOagbuIIzQn", - "jI4t9d7Pkm9W46rJ+Vz80Gf/fmFMHEIMq+x8Rn9H8mJnw8qsz+4Wti3wVT1sfYmOXT9bG7mXUcg2c2+B", - "kRgR5uRqit4u7iM91+piWttxwu7Ete4KJ6w39Haxc/fVgm8tOZfBtzOcy4NiW3Nu3ck3h/MJZb5WdzTR", - "S8/iX+nX7o4mqFHBx0J3NIHtThnU3dFyWlyNLsjH2//J/rBQAh3AgXDu03jeFPbGqOHXUAX5sk2wsc8b", - "5d33a+HdRXTAt8G1W5Tl7sKQ1E4yaWFjViYv/spgBvtzIri95mzhtLXDW8tX5FqB8Rnif5JeX/kUuygz", - "dioyYJecvdevvRRob7EIMOcRpiiII0H3nUx8bZlIxJHcnbkULOV06IvKxBRg2KcPTjauEqQ1e55q8pUY", - "AQzPScMuLm2bq9CsIobJovLy+iKVJJ1tQbRSGZZNpc8s8loLZxyFnTtvnNKdVcVNLm4Jqp1z9uuiEpf3", - "6CdxGHjPzSlbRAeHdbBJ2CJcCa5ojy5dy74OLYuZeEq70Zl6Np71CIXAe6hP1DImTZwnOJnF8UPV+Ek/", - "f2dfO+Mny9Gi4qTN7aGE6m1ihw1VNrqJQIZncRr8H/TZxB82M/FXiGexT0sEgDCMn/RVldgGUT2QsYB6", - "ntGPSzHiPsIgxUZ2HJOv7By7PMnwzKGXlTJD3iCYsjcTCtAlQSjtuYuc+e7gSIMHlXsoyvixUsDKDAKf", - "v/GEMSOYIq2U56ZUgaCXpQF+pvjx4vghgGRQmvz4VqUHitLijIIQyA4sTAdNebPGF+MyAZYEcoQ6Oczl", - "8MV4qKKqhSQuY7mTxVsni6uMICXxxXiJdF2lgXUM1nknUgQU+as2S9fqaLY4qbWXYXlXO4beIoY2cp4l", - "R9eeqLweR38TT1a8RNiuvVyt31ygQ0w7m4GsW1XYme5RZRseVeTeVB9VlrRPaKqn1bJuXijNmTwzhtKW", - "btwRO15vWyu4baDO4oLyoZMIW1dgURURKymqaCUnGnNqnGAM5wlPDkPbWtR83bVkGp0EqXNgCxB17+ci", - "hBFBuH0XhFd+xGtilE0xdApJx5rYe5qkxJaHafOOhbcxG0CaRXyrGoIvgijJqD8Ee9zVLfdlKzSVLhdA", - "jXyhG/4aAiVfU60tgDWzLAr/GeIxG7YTLa+nHbTLcmWwNPDhugvFNl8oxC6tRWrwt/j+U5w+1AWM5W6d", - "RkeJzkcid1FnqPhOkUoQUldrgyBDutGzjo7Yjs6Iv22vcgr5L54qhA9iYqE3//pW4B+GjQ2VyNHM7LdK", - "9CG2tuPc7Xt+UxlvEWM9k8r15nlyQjLhXe97m58Nb/6wzDHRVaJa+qopQoCKsdMMx4s+UglEs+tl+wyR", - "ak0eTaJIpZBOly5SSRep4AU1mIkKVY9eL3mkDm7rInOKBalAMN31dCuTShb3qBpkWH9BbSNwfqr/bHod", - "L3BC4wnMyXSXH8tLrK8HTcXgDqsJfLsWjVfuHs/N0cJFu3RzpHCvSFOL8/M+feJoNFGzhxDG0CrQew18", - "PaSjd8z9+syd50a4UkpDMBiXsWYXcUS3uzNob8ig/V3FfWSTlSDfpLYqw+okDpqBBK5JjxjTsTt5szPK", - "BNuwTqP4hTQK6RFvUTq7UDU7DOWrG9LoGnWsT8Ox2AP5QKTb72TAygE8Bwg7wzOatHIGnRCIHTQlPwEI", - "D31j9pN3R7rsJxvw3GtTZkOVPJ1vzZa+2C8gS+yf8+1kIbJ6maAt7TSaN5mOyYf3IAuxe3zQK4iKTSRm", - "knN/WGRyVv7dmTw7dAL9pPyTOUp8E2pX99izen1rlYne5JiWZTsd4EwA9maVx546jenN1+tU30kYMmyd", - "gbmPevWp5E0X8Qy716OGpEuMbDbxcoP2vTSOmjUS0sr5M57kQOE0mE4b3SdO0zh602rKzmSNlBsb+GTa", - "KcRSJd5rSA5surit4a5LZm4L3kWTKqWdklJ8m+lIh/ZT7Wbe45pMnJNn555n+1xZQlBViiD7pKCT5/Xl", - "BVWUgg1nBi0gYwkNvTt2NVp65Zxbk7pODt39n+Q/ffGrXamL6kFs/fBBCGfHC1/I1ZvAKmB086UvLGtU", - "aDexyzparhmhR1O7t4oiQdy+9OoeE5dkrl12T9pizlrT0dkdm7tg2G91WK9APtid35QGbK346tNCs29C", - "d0ve5luyKPtveymk7Td4P97Gy3sCUoI0w3t1CSzW+LtqwdwQfJpocy1s/GV4vXCdaIMyHIQBzhC0Kt0k", - "2i5ypR3TvvxyaQPcQxD5VlDRhq1B+hJEfjM0O29BwcEcOuCeAFrxmHwCSAQwqktwjw6ODvsH5H/XBwfH", - "9H//Y7RQ0e4nZAI98foAwz6BwrWtREggnsD7OIXrBPkjnWGVMNdg+T6IAjRbHGbRf6N4XhXQK8X0+iyC", - "VfPbm7UHlnXH7lqzFh/J9RgCqVukTSpg4HDQyEFXZH81N7Cl9/MuF7Ps1PBODd+8Gt7plp1u+SpxD2jJ", - "4q9UAHVJypvP9zUUYs3PeQKqn4XkeGywGsqWi9gPx6JzZ0XcZivi+u5FkgB2yl2iU6Y6ZWpnlKl8Gbmo", - "Xolt1qqqvmRwaaXdcFn6qoTprA6r1UoMGsB69ZL9n/LPfiWPS6NXkh7kljrLjvsmaXBgzFusRfXWuivp", - "d7fzVyr7Kxnw1M4hwUAbDZ5LK2HAna5FtFPct87juDuKd92vab1yxE4xkKkaXvIIodpqpcCJ4JM5Tsg+", - "TOiaddid5MrNESv1uRlqQdtoHVXNNrSpe2Lc/I0mt2zn5KnmhDbD34nFzRd33LqEmlzQ1VH5ekI0FVlc", - "sCPr5bHQCLhEttcHK6rEKIs6KbxJKSx2QNmANvLXqDdssBBVe3VUlcBv8qbZiV8r8csVkiadeOUil2Vp", - "73txFuEGFx3aRuS8EuUFwCMIQjAJIZW+irjR38Y/Q8yywKNTOuPOi96m1GQ7npqwsFkLXr0ZqTDy6azh", - "hjf6ApIWS1hYZP8MwRTte1mawnrORux2wBo6pFuFe28QTD9DfMoHWyPdkZla0hmFuCt08/qFbqCXpQF+", - "pmLci+OHAJ5kRHb9+5aIqlJwW5HcBLnT7deQ8TTAs2yy74EwnADvwUjOp/E8CSGGjKYvyfyO9jwiE7Ey", - "H5/p0JcEl6di+BKBvzs4anhP8Pi8fnXeGQQ+r2kXxmwztDUUpVh/KSGzgDuxwOIcluhDGKRmUTAmXxdD", - "HO3aHmsUnvXjjELXEmFxPA3heuiNDv2L0xtD34rpLUfcL0dvQfQYYGhT+FJow6wDVbqtjm8ywjXtO+Rz", - "rfEUVyey8p8IAyQ2prjATl+0PlZp7tcS9nLKu9bcEAu0tw88DybYbHk7od+RtLDxSSrUpm4+6+Oux57E", - "BmcTNRdmrKE+tnId/XVeAJK8GLYre29PXymkWRRrKraR7+3oi/Vx11X/jAy+AvpiK+/oq6E6PUHSAvQV", - "xtMgMpPVeTxFThA5gJ6NezUKxjkdaD20RI9gMv6GKsha3aPDeDqFvhNE3fV5q67PxWOdUI3tPTmMp3GG", - "G5ghzrAdN8TZ69t6OI3GW1ZPqSPSBmWUUo8t2c7hfAJTNAuSFlcgpZPdNYgdIV/zbjyMaK0Erp+0/X1I", - "RVF3J1rkTqRisJkkE4DQU5zWeCIwMcklqSPa14nUKzHm+nSM0xmIpnKibVI2PAqZLxHVifMdEueMrIqU", - "bsFEKZwSQZbWXfpYC1SrkUg/nXWxjQBjmxhGIK975toJPV2QkK3Og0LgPazlhWFMRt7iB4YGUdPyxeER", - "poiDUFu6l7cT/isIpo8aHXEY3cefIf7GB11p4RIF0jyjw+Hewd6BLmeE4jbyb9n11qImyXXNYkuucjXk", - "/B06KcRZGhWQV9KziZTKoiiIpvkUP/piyH6csBDVfDaxaU9wMovjhz73Itr/yX+wiMcjJwVvXfUyYr/b", - "h9rxgcxePHKiDTvxWMauCfi6c+H1z4VyvJxKpkbXHd7i1oo59jmebS7Joqko+lfPMVzvQbaJNbaWb1bj", - "/MagZ75vHDUEMyM+oUnqyryhHDtyuzr23CL2pDaByha15VHJm/SPF4s63hptg1GYZWAq9xCsczjVnPG7", - "427a2vGPr7izhlU8SivROkRprncgpWo1oULszWpsXbWEzFrtDC2vwZRAEVA4N0xnBcdAJlC2uSAWS15j", - "kHWcpuc0zhDLMFvpNClHZlhlJpHu41apEFrci7YyvKFNVg8JYBddtfnoKt11SKGYBYMbek0alj0ntFC5", - "3kKUz4KRPR1vvTZvqSFEyzCWjdpnz13t9MCtYLD11dVmyLANdGZaV5HLNq0cWkmEsnrYyQOjgrgcczao", - "iVbp9ckmFfPoS8Z7lC8dxpOyRTr9beBnTUpLlpByBfWGFq82pAdsmsZZQvOE5iCIjTKCQjt9gc9uYw6H", - "NQuJJXN3i0elLn33FmoTC+ULbyW4RF4Zo2+ISInQNtPLQgletlJyXWvYZc8Z3lPrNsoIdUC/R7kqBBgi", - "LHkqQM49xN4M+qZs0rng33JFipPBglljXi1XjAJvqyQxXWqYLjXMGlLDtBLNXDYgi1etwkluJZa5b80O", - "mWB+Bbm8ZiknHKaWUwU7ebdVKmBOiouqgGXHvwkEKUyl419P6wpIPcmYPMjS0D123Zfbl/8XAAD//674", - "AZlEhQIA", + "H4sIAAAAAAAC/+y9+2/bOpY4/q8I/n6BnQHsvNrevRtgf3ATt9fTNMnYTovZ2SCgJcbWjSzpilTSbJH/", + "/QM+RUmkRPkVuxEwmJtafBwenhcPD8/52XGjRRyFMMSoc/qzg9w5XAD6Z/96OEiSKCF/x0kUwwT7kH5x", + "Iw+S/3oQuYkfYz8KO6cd4LgpwtHC+QNgdw6xA0lvhzbuduAPsIgD2Dk9fn901O3cR8kC4M5pJ/VD/Nv7", + "TreDn2PYOe34IYYzmHReuvnhy7Mp/3buo8TBcx+xOdXpOv2s4SPkMC0gQmAGs1kRTvxwRieNXHQX+OGD", + "bkryu4MjB8+h40VuuoAhBhoAuo5/7/jYgT98hFEOnJmP5+n0wI0Wh3OGp54HH8XfOojufRh4ZWgIDPST", + "g+cAK5M7PnIAQpHrAww958nHcwoPiOPAd8E0yG1HJwQLDSJeup0E/pX6CfQ6p//OTX0rG0fTP6GLCYyC", + "VlCZWKD83cdwQf/4/xN43znt/H+HGe0dcsI7lFT3IqcBSQKeSyDxcQ3QfIUYlGEBQRA9nc1BOIPXAKGn", + "KNEg9mkO8RwmTpQ4YYSdFMEEOS4IHZd2JJvvJ04s+iu4xEkKJTjTKAogCAk8bNoEAgwnMAQhbjIp7eaE", + "8MnBtC+ynnEYPvqYLdxyMp/2cCL6lf1Mqd1Hjh8iDEIXWs8+9mdhGjeYHPmz0EnjjJUaTZniuQVpEbLo", + "k6Yv3U4cITyPZpa9rnlr0vE5iMJ+HA8NXHlNvhN2c4bndDUpgrQP4XpCRdhBaRxHCc4x4vHJu/cffvvP", + "33vkj8L/kd//6+j4RMuoJvrvc5zkeYCuS0cVBHQOF/QcMihyonuHYBaG2HepoFMh/ndnCpDvdrqdWRTN", + "Akh4UfJ4SYyVmNkE9pBogAQIsV+QJiERYBVcyylHDkGkIe/kRCGV3ApdlQmJikMtbsgXghA2RAZjWbrX", + "ilMuc8ViKmTYdUakBVEW+39ECBsoMEL4j2jm9K+Hzpy0UmGcYxyj08NDTv8H/AshTp36AbH/BT7Xz/MA", + "n3PTxPOHu4x0wdT14L01+Y4gitLEhXoxzmSi1zesHvsLqCjFhI/lPAHExWlOandOjk5OescnveN3k+MP", + "p0e/nb7//eD3339/9+H33tGH06OjjmKueADDHplAhyrfIBB8j9GNAkzX8UPn5oYJCDK0CtB0enL8/vej", + "/+ydvP8N9t6/Ax964OSD13t//J+/HXvH7v39f5H5F+DHBQxnhMnf/aYBJ429ZdEUAIQd3n8TuCrwg08m", + "yXZVBd3AG5PoAerEw4/YTyDSLfn7HDL2J8SKSXeHtz6w3uAFxMADjCRrdEaOgo1yZVKQKxK2g/z+nnz4", + "UIdDCVtXiheJDC0SXRfGmNkII/hXCpkwyeOTGQQMs6tR58IPzcTa7fzoRSD2e+SwMINhD/7ACehhMKNQ", + "PILAJ/vSOZUr7qap73VeSoTE4NWt92MaPDAbbPAIQ2xcMnwUZyEre1UzZK3lyma4fel2zogeCiwAGnp5", + "kBpvR3bgSim3NdkeqwURCOmSotBNkwSG7vOFv/DxGCcAw9kz097pgnQ461+eDS7uhpd316Orz6PBeNzp", + "ds5HV9d3l4Pvg/Gk0+3882ZwM8j++Xl0dXN9N7q6uTy/G119HF4qe5xByTZDiAczRhljDEM9Q3ppkh3q", + "nua+O6e8yWSGjxxKjged5Yk4Wvg49IOumIgiVC8g+kw8MJt4JflAx9cxRhFpKI5CBMtYw0LkljGWA6sa", + "DDaKGY6zJAq/R8nDfRA9TRJ/NoOJcR+B5/kEChB8VQRzaWA3icLBjziBCHGbskQ4pMkl34CyWg/jFGtH", + "jhM/SnxMaVsymB/idydse/wFofd3lL3Y38dlR0dJhJHZurrFKXCWVnUrMVgtTfQ4KxCdbOMIrSIpkPK6", + "ss0ZMvRjUYayG+BBZ2aS/g/w2dg92yZ1M8pjiK9C08pxSvtWdkQhN4oNypt+osDRAZ17P8CQQFTPCcxg", + "pljLNm98OVbOP8ZdxFHsu/3ExI4L8H9R6AgTxCEU4/ytP7r8u1j9+HLs0DFWEWNSFy/88L+Puwvw479P", + "PvxWVsoSWDPXM7dIP4AJHiyAH3xOojQ2y2/SBOmEZeAjTNbIWojDd4I61ifTJZbv+Y+wS2csr52DWrfy", + "GjOMDa7da/pJbCtZq4Mj7sdZy96KdXU7SRTAOmuIreYrXExhMiLttfjo8MHqsGLEh50xzfxl68ACXQYK", + "0plBCATpbP2TdrlPmMr7F4MLgQKlx2OmR5GtGsh+vVZa53xuebWq5SfFR1P2r0hl2miuFQ5eC4jnkVdv", + "xivo+sq6KEZZpbJZWvN3O4xahp52jicOT81no90iGnyDCTERtMOYD5ESNN1AhdlzsHLKyOhA7kEtnV74", + "Om6PwcwPpT+wahevZUtpxlLB9dTkPKfyjZXfUkc7ymHnfPCpf3NBDjH966Hh2KIMcJV4MPn4/Enc+ohh", + "QmH2wZJnJBuJ2n7bNPpWtNlW4Gssb1LqFVKR1crgDs/zArx4g8bv14wLEfQ/SsNxuliA5LkOMrpV38vd", + "KliSWYxyIbdiw8+BzkvaxB53/vaP8dWlM33GEP293nSVRiud/stqNCDG2AHml8sp870AdFegrACRS5Bz", + "P4GuAElIEYDcDrtZN8sPkwSyED1jCBJ3rtVGJnove/epT0x7yUMtvJQYl4RbZUMnSUNUPMsZggrugW8x", + "NGvVZNwYhh5Zac3AvFmTkf9KYVoPMWvVZNwkDUMLiHmzJiOj1HUh9OqBlg3tR5dUjqpct5pzEv12oB4E", + "l+CxFTSWWawr/uB/RFONIK+Kg6HyXImE4Vrsz2h6sKEbjNKYCMPYXnqNMYx1iK00hbG/gFGK9cvnH+uW", + "/riqGfyomL/i+EWXrrNr/xFNR2lYId3YHZXdvZPsJAOyzE1GECDDwezeD300bzb1n4wiq3aUEC1radi9", + "FYgugSgN9M5XhEGCmy0GYYBTZLEeop9YW07fozRsRuJk85tTufsAk2oWaLJcxSitA1lRzIWeqx8b2SCC", + "QOQumLlmLLdJmB7Xg8vz4eXnTrczurm8ZH+Nb87OBoPzwXmn2/nUH17QP9jNEvv7Y//sy9WnT1prhZhx", + "+ngT2yi1YlfNZvNJ6L0KMl+sbNV4lHfnWvuRQJx3QaNXhjcPTe1VpAIbn0hHZnSZAXAfvsPpPIoeXn2R", + "CizrWmI0u/BD2Ch4hihT+pkYEkSyCJUaRDMn8EPYJFKCRdhq5yDD8Qa1RoqpN2uh8UkUsKVGlWRhv3KG", + "2wxVF/ARBnnHzccbImiGl5+uOt3O9/7ostPtDEajq5FepijjyMOT1f7nINAJEv799c+egqz00oN9XOH8", + "mR+h4QmUd644g2oQoMZS/OywyAV8F1PaPel2QvhD/OtdtxOmC/oP1Dk9PqJe4Bxn5TrrQq54CydmVCgn", + "PrE6VimwaOMT4Y/yyO/sRs7WpY0UizAI1EMsaUo9O4GPMLvdyOL7j2xOcRqJ9U9ygv0KceK7Gnkcpotr", + "uyM2pWNx0D4wrfefVqdqNpbPAsfoEds44MjuOM1G5Ifqg05tOEAGam6WrooQnfwfAQxp/E0ZlVY+24SI", + "/4AMoBXRAUB4BO/9wHAtSQMIeYShOhiNLkxoR0hjaDYQhkkn+gaC1KB++PWM6uNgF43IoZHr3OXLd/3J", + "D73oSb/t6/Ap1yD60bwOIU0061gAD9ougn3TT8G+0WWQvfRDJR4qQzOLsb6PEhd6tnEPyjlB2S+xXglV", + "jtJuVbreAWWY8ZhWHcrPKyjE4hgllciwKbCmoFI7GnRhiMfKebZwT0TBM9Ez++roYt9UB0STE+oyHokV", + "vAkbcxlwlGY+g9IBuhh/Wc0jciO66tmaw1IcXSv+Ifnr7UT3jmAcgOdfKpCWLUlxzCDjynL08LrrU5p/", + "ODqqWW8BbtOqTY4Tpbu90C54umzhE9AlhMsps1ewlT5eVBvoSUYt+Dg0A84gwjeJwda6GV04OHIQDD0a", + "2MePucjB0WYu3U0KIg39v4g14MEQ+/c+TKQ1yQ0g/tqExR+qj7SmMIjCmYC4RlZ2Nxn+aOfarAxpHLtz", + "6KUBVCht1RDmDYcgdzuYhVrba8YmUcvZ4LcKerz1eXrpYwHyx/jsj8H5DflRZ/7ImTcbGLejIW7l1Wdx", + "btsIZ2tMYuuLgBul4Znq9mx8fcIA2LYuVQCwWeLYylT9XurwmqGCGVFURgmWaXcHjn8acWIVL2hkxEZB", + "g+VRTEdEFcfVHtQxXIB4HiVwHER4zefD3NlLf4nPHCIoiJibiPewv3RY8qzG73dNyyKfnSQVC6s3TtSL", + "2vqF+kEgIhjsV1oSTRrXDW9iD3qBwTO0dNXzaPFWV9zmEvJRr7HKF09zEIYwMMHLPzu+p/eTITK488RG", + "13sg2AiXxscBYgr6SGDJSVYynsHCtHrybYWlk+7mddPBV1n0Tpj9doa5QIREd54uugoZahUNhrFJ7unj", + "buZ+4CUwHzpQc+rfUKxMDJLS0+VaSBIIPDANoGlzxXeZxIAJxFoyWSmEyzCDmQKUVeTIQYSc8A1kd2gV", + "W7+BkK0+HsRR7j5SMZjXFNhFifC7yRtSSwO57ugsSkOsBxcaoVzGkZv1qcBQ8eSbi0yzCGzicXiy/frZ", + "LkqxCcQlOZJeNPbvMUzskbn2QDnWpWJnVrC2bGNESVuTOLGQNU1WLLtUrJiYPob4PCvlJClQrqwyGI6j", + "rp+4c/8R7qVcan7o3ikRE5ETlb5TBdcnECfPFVJ0Y/yoHGO2wxIVJwYFCQKP+tOnid534YCfZ0DtJS9v", + "Y3h455qpwOzr9fQdlJA6DckJHrRYD78loz0I3cBHKHx/tr3Hoo8V3X3yE4THkBnJ9rR3AZr2ahi2zE4Z", + "OQALM0vMKmhS4wjZ/lYQ8668GcuRaS0hZyJd+JBGA+Zjv7u8uvt+NfoyGHW62Y+j/mRwdzH8OpxkPvjh", + "5ee7yfDr4Pzu6ob6scbj4edL5qWf9EcT+lf/7Mvl1feLwfln5twfXg7Hf+T9/KPBZPQvdg+guvzJ0Fc3", + "k7vR4NNowPuMBsok6tzjiyvS8mLQH8sxh4Pzu4//ursZ06WQNX26uPp+N7q5vGPJhr4M/nWn3jwYmnBA", + "te40HccoSFUCS/kCR8PJ8Kx/UTVa1ZUJ/+uOoeHr4LKA+AZXKvxv1roqkj7LaFrMtQoTngliYMjX8V3k", + "bIwc2lr4Cxa0FzrQJmgEIQiese+iqxhfpbhi1MwBMQfIiWIMPYcfMuUg+jk2nufNlCVi5TQT2Tsnu5fR", + "3E9vkU2OwpWNrpN52iwu203fsqEXeuYsLto174DA1++FLtvNLOoxou2M6BXGS35VfjgbQ0z+g7bH5Cx1", + "xOBH7JNdpg9WKDDV47NebBrkPNGEj/TtjQMS6IA4TiLgzv1wxjI/UgRXzS+y0DAioWF4S0LBlixSbJbh", + "oXF7lbhQvDufgB+kCbQAhYaEqIColwKIvnLWzxkAxJZqvrDJInxByHeWXtoU02pVx/KBH4LIPlG/R+g+", + "G4N2nXvRxAFYBKJyqlqvr94sCbQAm+XCUEbYbSah04vM8ll52SRyvPL83tvMe7pc1qi6KwfOUKYLE/HZ", + "jDXWourKhI6QS75o1Lk1ikOku8r2Sk3iUUM7O6NKOCk30yBsT8vwvxpB2eeLIaxX1/oGwYT1uE6nge9W", + "kQIdryLxmQrzzmw6379lNn3E90mcUq6+X9KTVv/86/Cy0+18HXz9OBhVHCmq3wNRHzkyR1npPCglnNOH", + "TXWYyMGhOBmq5m4yXjFKVCJAUL6KRXn2Zn/ckZNtp9sZfGNnPfWMSs7A/fEX/ufZ6OpSCZCrwHvO3tGZ", + "fCBZVLyuod8d+iBBL5zZOyAcOU8gofkqSoYQ661/rdLs4ZH+zdF6nhGxsc1L1MO/Wi4ESQ/1rCupx+4R", + "Ud2GNX87tIAYJuIFkdChbCznb/4BPHCOHQ88d51j5wnCB/LfRRTi+d+XvPqX6NG+KDKLXIGo6yjwXU0+", + "ImabVx1XZep81lRjMDQQuXn2q4tQ58CZV8e9RrbC1CiMvmVeBSGLvh11up1vx3pRwmLOthACbYyqv6E1", + "BN5iZll15TVPgNaS1NVo06iAmPd/jx2FrZ/idf0UG/QfbCSdfwM/8NJuXAMXfqfBC+ZHS+gapEj3Il5l", + "ExYB4fjIiWlrB4Se44IwjLADaEERWqlMZHMrbpgWOqQ7INY6SIDnJRAh1VGSM+3EybvsLyEf/gBorpPy", + "c4Dm6pD/gQrTcbnPrCNW6GvMamY5Z3OAjRN+g4l/79ehl7p7iAx65M15sbkcDHpOmANkLmmnnQPIGnYO", + "gniLFyGej+IAPOcYQexfY89KHru3BgLL1/wz5xWHT2YkUt6FTxnWhJmnh30JdS9rCr7Q2LEqQCQQlfhb", + "DYZS3iFZ8VDFkwnlF9HMD5fPaL8cf6+U4H7nMC7WGNfhegRnPsIV0n0X0W2nIQ2CYQd3S1Tdst001axG", + "cz9G++r1K3lBt6jNN6Fl2GS6bft2zAp2TQB6qKguhWESgoC/xDceJHkzZ3iOutwadUHoJPCe2+c+09kA", + "PThRkn/Po3ZWT6BrTU7Q7bBqNnVE8+2Y4OMTa6u18b4ds+eXrE1VXS6lSJOGVuR3qXs5hFr58BxEwCge", + "6EfjMErwdF3tH63LjI134NwQG5lMgtIpYrfoZJM9KlZ4K0ROI/ARBCl7FGP3GLbiwRfd+ZUowZAGgQUs", + "U4R01c3Ss8o5mJ0pj2OKj8E0z2bqiUwmyS/TqgdmtplWNMC2hRP2pnAC36yVSido573NKGEHVLGgSW2w", + "phkHv0g6/TbpfUWcfSKTAP4lUgNms8s9UZ5ocNL+JNX50nq3StutWU4ZFbgicIUep0K3DNt+KPZ1CNiK", + "LWhgN9SNvU6jojp/hMnUyMhCJemdENfCALZJAfHt2JifGWAMF7Eh6I5/VKRJMT2z5i3XVhI+ByJ7cjWS", + "ipmOXy9PdPGdlu7pM06eHfrEwwbTzRNPF9CxQurpbKRd4ITKJNHfjlmKt/YM3fQMTVqYzinNhQbBiV5g", + "0CeYXytem1FrfvIcQ7tVDWTzJV+3Wb+1rDTpIE58iOqXT76cM+eRMesSaWN12mTvyRAGi7jZMzbxzrhZ", + "UgzWhAGnTq3uWYbr22oy2wlZkhG9QZrkKWzzT9YavlETY+XephXfo+kfsxXfqI0Hl5O7iboYuYY7phpK", + "D+rORoP+pJAi78vw+trwYC0nhSydIfYPc5AfsuC1JtmcYFNiyTINFOdPQ8y80E2Tz+VBqOf4qoAthgQz", + "511HfohZoFZ5BzjBaQVo9qJPH/vqL+CSuR15I82TQatlaOLNWLKKpjurosbSyqZGRhqa8OlWPtK3yG6R", + "J7nSc3iertn8SrgAYVOMZEvTkHsONkUuSkmQvQY9u/p6fTGYlB6BVrxtzftjl8sUpxxt89o4m2ZVByy1", + "/JI01B4A1mo1qR5tXbAws1hFKzoQso0MrXV+15zxpCGQ4eQJIIf3sj/keXmzyO7STbMFyohplvRYMxz/", + "Whyq6/ihs/CDwEfQjUIP2RmyGs8WaSHOmMVZnL/JqGWAIcLkt7/XZ7G3Qj8ZXnSzx7+MgS3PQT9VoJxT", + "Pb/DFT/GMASxf3AZhZdpEIBpAP8xpu8EZKuev4ijBGdV9zvlxjEgp6HOzMfzdHrgRotDXrq+58FH8fch", + "iP3Dx+NDBJNHmBxGgOroH72Qj9U5vQcBgisGHaWLcQyeQuidVbKj4o1lzcuMWZXypTwg+9aQgvZoTzZw", + "zrJICKphmg0lBS3ajlnmKUNC0LLuWtVvQM4x0u1hcsRLl8UGZrfwQFcemochgklzLeTzbvZb2sxhnq8y", + "tM0qD3UkJ67vhG8k76mvPWc0SX+9BGWpzjKbvLfaHNLlmXhcs2aiVVKAqt5W1UrpMpeISE+v0Q9Sriv1", + "VQq8qLpU8nSezz3KvCu5Kwz9FtwWDejNulrM5ug6rdClylSyMSf+gl/KbtCt6cEYz00FtGM8zylvUTcL", + "YJjcgyDQD7k1w2/lrLCT5bQL67wuJcNuqxsii6gI1tEeXW/NWtG4rNdwNmstkl/IIrHIGF9jA6yUopsJ", + "34KKPc8p6mWU7m1BhbymHiXURLMkNVKnXPWtTZtu7d1tvmiMJsSWfzWRkj69rmrPVj8MEK3r47kL9Uey", + "F8KZBa7N+71R32Nzj51Qhq3XrvXa/Vpeu7fsWDOZ5yuq+902V/fGWmroialxfWjsKu4NWcm28r2cYZU5", + "JPKOkJxfQvo8DNrwHGKRiqFwdVtfciGvVgmVzEG9O0yNZSftP0WJBh5xLHkUdRVqbDIa9pJlZCn4tFaP", + "QWHgoHU9YykHendyCxa4FNOW9y2v6/N759VENW0gX6I6ZRWwr2W4q6ZPA8vdgPF1WfE5x7AaFtr/zLN6", + "ae/keektlulhrUn37N7q8hJSPEuE1nJMTUVsRd80CRrlAeCvbsm4OlzmUMIeIZpf/69rkQi6CTRoXvZN", + "vvLiT4yJCnCG904YYSdOokffg17XAU4CQi9aiE60VtwUOjMYwkScAVRVdrIxjDdHs7ebBLjc3myblCWc", + "tcgmgtNcIXurwZp58WMVoZXrYmRMfuK9A4Z9oy8XQOhlef8SNtRy52W7cro60LOCuswcO4s8A9X+MZlc", + "O6yR40aepOCEI9/+gcEdUGqn5ia+tUR4NQlxVNboUUHzorV10jstBSxNO+VCqp8Hk063c301pv+5mVAr", + "xKQhWVwaqgpaQ+xNFE+E5ILQiWFC6OqgUZJ68Ah8ehIU1dpqXiOWp4U/oJti6LhRyHM4Bs+Giy8fxfRY", + "mugOJoTqstp+ACF/FkLPyTpRt83NzfDc4eyz/eNYAKYwQNUJLGkbylI5FyFTA3akyAQqGUe3ZQFA+A8I", + "EjyFwKK6K98qmo8UEQCBMxe980fak6OTk97xSe/43eT4w+nRb6fvfz/4/fff3334vXf04fToyP4NFmDM", + "TMyDAcJgGlBP1Q5CugA/zITP642vjwE2b3eY7Y0EulBm4USmh2akjSw0nD+uNiDgUX4uXW3BNCRbMgzv", + "IztuGCkdaHGSyKQJkKgdzeoaM0ZcciGFOtS6twTS8aSLz6JqtbQ3QiX0zybDbwP6uk7+ed2/GRsinW1i", + "eRiyZBwP00zGB7dcVzKJWgCy3tfEet/UWZ83owvN8E2NUdpea0gowrKkRyuTW4inlKTrut+MVyQ6ZgmO", + "ayavTiBRgYfXfyplNLslkKM88xeyHINwlvIbF2uxMD7/gpjiYZ2VJMHlR3x6w4hLpMEPnABtA+Q9mIct", + "LY5CpJp/Vxd9VhrrX5M/qP9+8q/rwfhsNLye6H0oGScrw4wHF5/+uBqzFxJf+5d99rzq++DjH1dXX4wD", + "iTTxBTecSpv6cCT5i8VtZ7dB7k72zl9k79TnfPwzmhoEK/miA8iKPv8RTddaramJbjZiTmR605hHYLb8", + "WqX/DmiNf9vaJjYr4BcIzeSEclchswJV+S01eqGqfomY4kxYZrq06jOIle+yVFjhej0Ub4XY4+4ZxIji", + "xM26OjPSV+o6xTV7YEzrP8YJwHBWm2tIgfAi16+5DZuZqfnsxsUEMO9O6o/+YuriarparFZt0fBc975e", + "Ajg81+JQ9P7ih7nD9qeby7PJkIrZ85tR/+MFMa3O+58rBSQZROjPRhRMZ9ewl/iuV8orhVJuWZ9T/WHn", + "DOGtjU8uKZN8gVVRkbQaio5iJY89wGdD0IYYnpClXeClOOcAB8XQ9e99N5vE+VsMEIKe8+gDnn7m73qu", + "MCKiQUSPvogLTlKoGb/uDk0NjZEH5+Ojo6PuxuvpyziTRgtiRcnt6TIrqL9GVc6iIJhG3LZzic09VqsY", + "bxuE5SqCW0ReqJfq2vCLskXEAgih9/G5weATpVc5HqKhSWKMqFgloVQ2kBoroYB9Wy1MduSEp0RV2CuF", + "URpeJR5MPj6f+wl0caGOS398RtT0YHxWqaezUT75MMjpffX1TUbLOSmmSMaaScYiWqSV3a3sbmX3a8lu", + "wxy/oGivCDdbQjTT0YYYLswBbIbzSn1nYxrlMX2uXJ2nZsU8ZtmL6LU/dF7DgAaZXsxkU3xiwhfVLSFS", + "GbWOeirq8Teoty+zslTV2i9Nu9S5OS9QzMQ4yYuTYkxGFF4rkr8EK2kwdufQS4OKLHKGziuro+/F506W", + "AqZmsxHL/2+MVMm9stogOxpeuPBp6xZhdBLQREVN6EgMdcY61lmhheal+TOG0OZkqkp/JZhO+5Ezl/ab", + "4NHmSbWqFjsBMx16A2Yyru7yD9f8Roq7dRmEVfTDhcJZQg4y93q5oGVpxpd3voEb6yakAdDaGakcueNX", + "juueFulX2NwyKOBNI3mhjGlfZmCJn/Ua98zc0qMvs8Du+C1EczSz2nRrqEpXf7NVBYZizRZZNneFYbMh", + "6q0HTTtwD9IAX1c+luSNjI8mrS4Jsqu7V7qQixKPRdVZgIq4aTDxFzAyJHdF2Hcfnk1BHuSbg/jVh91t", + "n8LTDVgLFcoxmnOa2AChlg+39f9XnsnMZyUBs9iZ3EC39exA93WdFyhNCORNIZxFHWQ3J3mM3yeQRkKd", + "mdNVLsCPmhZPzSxiU85KFkKfEiFFrPsFg3AKQQKTfsoSrFCMUtlLf842ZY5xTM8GUfTgQ9HcJ7vKfhIX", + "zKcd/pAy6wti/wvk8Sw+D2HRxFWzbk7/ekgTIGPqCcr/Kimrc3xwdHBECZO9De2cdt4dHB8c8WeedGn0", + "KWfgP0J+aV2e97O4lCatQoiQI70QZBeByITZueDfP9N1iVBvOsvJ0VF54D8gCPCcSuUPuu+XEZZz5nam", + "c/rv224HieScBMKsoYh6+Dcf351D96FzS/rTtSYQeM/1iyXN/KrVjkSDdS6XAker9rJqszgB9/e+W7t6", + "CW3t8h+PDwEvKdyjleB69FoSHf6kP6u/vTAYA4g1tvg5/R05QNbtoJWrWb072r2EsUKVcjYCpcUE0Gr6", + "BOyKkJHSDA5PftU5Zc+WJXeVltJRuZ95m5lcXPno+nJb2vv3ZWyNU9eFCN2nQfDsMJR6uaInJeS9dDvv", + "GZW4UYh5VnwQx4HvUowe/omY9sjWUaOtBkkSJbymYTEiYgECggXoOVHiTIEnHjowMN6tHQwdFJ+iZOp7", + "HmS2bEbfjE6qyExQPC/qddvt/OjJIt/UMcY+dDWEcUsPUdjVpM5ixvsqJM5G+DVInNLDx4jJzrUQg1o+", + "v4A4+VKmTCaV2MKRkwqc57HxohfRa1mIdgk62HNigAHaigFLMcCoZXNiQFWQsd/D0QMMiVYUf1NtGEdI", + "YzSM4GP0AB0QEgvMoa157I+csSAmYn9CWgn3AOluIyXk8AaZIGDdKXWX0OVxOqfQ/dpEjZpQNScdsrET", + "vnOCjLPfqihZbnmOgt0gSr1D9ShrtnZLiZ3EcYIO4vghwiB0YYmIz8hnEaxgNoI3j1sKiJOG8tHhzhBY", + "jdXOEKze/vKt/6rc1/zoiSF6UcxCJ7hGU/abOVcPf9L/vlTtN5FSslxpfkOpj5VtZK0kokMYjRP6datC", + "aH2bzTOh1ChvlljzkYs1hg26Y61sy5G4gpmMvBmKK6Qao59bM4Uf1ok1ui1SqtXQ/LkUYG+d7s8pCbe0", + "v1u0v4BL63Cj9t6e4uY5lJrQlFSJe6LI16HCyRiH1KHNdgkZd/zCR+QAFDi51qYNJq2H+YYb220yF99x", + "ZcqGmy9ybuRWt0uEILeebkRhE8r7n9vkKPRxRKT54U/G8S+HcRJNoflwKW7pHJBdBOPIoX5diq/8e3Az", + "w8upryOER2l4Tee1902ZlJ6UXFvWehUExXMnMHqi+D3Yqla4jLADUjyPEv//CBSRyKLCsjywN38lNydm", + "ZeSZ396h2+N84vJ8mG2rXnHkyAwFwH04/En/Y+HFd8akoXhaX6Ic+pWno7F32ufGNBIPBXEnvfN5nOyS", + "aXO8HTBuwoyE2cQftjMxy3JEk8WBIIieyPS6G4Ei1QrRS3+vMrEY0eU5JkSHP1GIrLjlcqxK/TK/hKgB", + "m+QHMzMK19w7xyYFZLSMsoOMUiJYySqX40pGCZGGTYThonib9KYLmVcciUss0vhu7NXsj67ZEfAAn5f1", + "BCgwnHz4kAPieB02UJxE5B/Qa3XYDrGm6RBJ0607II4FtZfVGmtT4EcMpgE89MAMHcpMzcZDI6KnRtrO", + "wXOAnSkMonCmvlGXiYPBrHyk/HZ8DmgNkgmvq1XvLhM1V7J0HyzJL2WZv1KYPGc844HZne9Vq7lNvTew", + "kjsFeF/r4GNNvWsrjHYOZrKgnDYDU4UcIlOK2z8669v2EnY7H7Yl/Mgp1F/EAVzAEJdsA+q8kNVgxdU5", + "QA9aCUMbHv4k/6m5XmKJ6afPjG+KAoRMYOlqZ4XqTEqfALpllZ+vyGcQCqKmnwpL6WXNJv34hRT8jVxv", + "FKtvnT/fs7PP5medqEXZiKVwH6Us5c+OiIiMn0siwnxmwDYi5DCIZnW2ShDNnMAPocijw+EoSpSLaHbh", + "h6zCwo5Llc2yvYqIBkqZv8tq7+7ymlFSn0L6F9Fsdcon/9/LHsOZb3iU2i5G4pelW/aB/LsVKbNw5KAH", + "PzYo1ej+HsG8TlWf2Pz2Xps9q3o6mlrOmT4bpqSfG864ebWe7fUSl/St6d2q9pyM00mY1dU8baG4CS1k", + "HXNM5MGRxdGKMq+ZvNtF3+GvJPO65fSawsnwAJ8RmZVlODRPS9p1tE6e2ngl+mSu5JDYrBy2l8GtlVVn", + "ZUFR980yPMpK4DB6s5E4vGWtyPlE27UyZ1dlziSXJcGzkjrZ02WD8KmcwgEIRa5PH/A8+XjOAjvYnOuo", + "Dm/hnkZuFEO6VJROEcSOC0LPp49kBF0bF8/62q6btq5csXODoEfZiMGC/XCmgQcQjgdByq66jdXmtynN", + "FdZuIM6FiGnleV6eC7xkAp3ht0qidw23x6zOngOcED7xgY2imbXdm2C29b+l/HbMUMDQUfN8kpXlxpFC", + "yqyMHK/cubWHk2L7almPZ2BlufGTKHT+jKZKOCQLkWyPt9s63l5mJ9qcDJAsK9nVXgzYG3aHPx+Pe+xv", + "m3AtUCc8Gici2C3LjjOwT+NMPbEWDXgSa3vr4m6lxf5LC1uJ0FXolRgJFTfO0tR3fIz0985sNtub551m", + "8zfO3LMIK3ZLa4EXr2sbq948o1moYXqDtciSrFZwJgdOhBw5SRo6vGd1hgR23UHOFCyXr8jpuq/cS9Oo", + "86KkkTNjb8YEGkyHdJ89+LAAtCIfuyF8VZaTtoUmDbEfdNYzu+pIoa6a0JMVc2rcNTxC8HUiAlm2VUrK", + "/4HU4EYD0Dw7K2l/J1rf0dYbJbYs2JJFKdNYT5kxPov0MYQ1soZ+OLtjuWc3A/nmL0tHaSjERvMwKFVU", + "tSGLuxOPRPdmIbWB3XWlvVqLIz/Elspt4YcphsTsFH8lEDx40VMo9V0DXfcZ4msy+b5rOqpVwD3myYvl", + "LTg/lBULbh/3jsj/JkdHp/R//2OQSqIGzz0zbtahhSikU3gfJbAAakTgWwFYUSLnIx28Obibl405UltC", + "OlI+aeXjjsrH/O6sXUqiQ5fWiTC//GJ1JGR4uU7esSZv22VPUUBNlZp8h+zJTuS4AmlWPvoqFxkdKGgf", + "ZrTRYRpBUmDftYuPBMYBeK5KqEi+V4oP1uRNiw+GgibiIxFIW1V8sIFa6dFKD430KHDvGqWHcAL1kjSs", + "i+/KVSGvjfAqlkJtw7x2OrSU5TyCVoFeom3zEFP+lM5QTM7gWAVJ4ENE875BK/A26OUNAG4CyrpcvH1Z", + "5DVLTPfQe3Ri4CdWW5ZVib1TUqtpdm8pL245UnBP3M9PtIyKdJrXxzfCZMPeZlmmKnL80A1Sj77vR0Qp", + "R2HwrP4un5zrBFIYPN+JBmZGKJe8qnHS5/IPWODslfz1miwJRse9qtZ+dQc+f8TcNDiztUd37aFRzhZT", + "TEJhdTm0zPcabcNDXgu9R1iizlLkbcmwtLwkzaVoNh+rrcfzrAg72mtLUhFKqFiIjCGFXx1z9HHUmVWo", + "IoVeJcfLZuWVngRa0dWKrqaiS8Qn1mWE5ZHxBYOgQjS1QfLHHHUKUmpcZyp2qQNe4HCbQfK6OvsNQmBz", + "FNLGyxWj1QsMtAYGz/Pzz8fjnvpLXf6kHMmB0KNBrfIcjSOpcKOQbu//djxKFP/bcWIwg9UywDICNgcD", + "OzvNaE995Km6vL2NP12Cy1rNvUe5zSwZulsi6CVY3D7LS5aRkZ0x7NW5zAdifc54O2+AM7Glde79mgKs", + "WYKYVna1sqtCdmF/4YczVFtjibdrLL0+QzzhU+ytPaKVQR6M8ZxdtrOoOccVqVMNaW9Jh53LNMU2p5Uk", + "ey9Jqvhz3eIFxlymiD9fDkHizv1HWGcF8VYcTNJdK0LGGMb8ir4vBrYQH2I8c3UFDm97Xb+b2e/4vvM9", + "bxPg7UWQkuS6QqBSWUjl2F9hflmmgvw0Sisr4UoWrpdJjbNv2sijgUhl1UqjNyKN2mScv6IsUhh/85Jo", + "iTTYAqhyFGPDTNitGHrdGMYAPsLAKhyOtczNXFmFk9MB6fXJh4FnfJQOieJ16GwKHBUv0mmHpoCMWS9t", + "+BigeaiixKtaP/388ZmtpeHkV2pfAx7Y9J6fQJenwKqA4lxptgwkWf/NKqk2Hfwrp4PXqwF++V/xdI7e", + "SiJ+u28IOZrQn8/Uy+h1X5azwdlEde9LWLjA61yPMwgbXYhzpP7aNL7ETbiS3Js/2eB33EUi11G0DGeh", + "pA2wOy/TNqv5R8vx//ARTRJaSeD7UyNwQxErDAFNmDBOCCKxz95GpAKBLT/uEz9yPrHmR20wSinMDASE", + "MMJZDy6AH/RmSZTGlRc8RAnJinGMvOgYDh3A4QMUWbdPmgxIi8+kwb4Ewd5unCd0iGmYesG4CS3v5G89", + "KqjVnpm6DUy08lx1jPHmwzFVC7OAGztdV0J5IxP0eLPsvYQG1NBQy9daG1XLbevVkoc8jX2NhiS7J7o4", + "okv1exGFXPxwNuZ99iQ36ZbUpIKYFXSkuictK+XzBJnQtDY+iv0ejh5gzUN9p389dFi7aq7px/6ENGvt", + "SXRI4x+uhxQfaMRnacgnIo6j9fUVjUdCkQy1CjPIH1cpZRFm1G5H7K2NSBEgaF0xCzfpwihO2vLXmp/c", + "ZMzUkMGqFM566wy2VQbbKoPVVQYrYJJhlsPzRrXIlgBQhG4Oz5cEMUnD1dPp2EA4SkOWSoc7vl7lSpru", + "5+tcSNOpd+A6WoVDvYyuIJYsiw98dh5BkEJ9Lh+ZLPffhN2OT2nT406X/OuE/euEiPfqnD9f15vyJ1sG", + "S6piWR+QNh5uJ9vPJs8KbbnS1yxXaudCpuMabJD2CEARQHFR4xbmSZNeJQyBUUITny9kPd56FOjJf21n", + "1hHnT26ewh8uhB40VLBje7NcWWLDweRwmgYP5rCfj2nwwMkDZTIBVQoF0ucNCway/IbCAb2mdEDNxUMb", + "Jb5j8oGyqSok0JqlhF1mfebIUFKT5Uxck9RgYSVvPvE+Q4C9QcEPDM2SZ1tDmgVskX89ZYdlcvbYYIJR", + "8UM0/RO6FpYLRRrMcim0QmpnhRRPwL0R+UTdaJY+Vuabs/CzfoHP7bVe5mxc6rROkd2e2HUndof7ftfJ", + "B3YlLFAz1fzmi1owBOyKal6PWy1XK6NVmG9GYfrho49h0wBr0UsfNDakX1tdKWLFFHwsFSUmsN3GhunC", + "pzNa3FDMNJugktZb97cSJc1QYhcczXD7qhHRDNxlAqE5YbRsqY9+lnyznlBNzufihx779wtj4gBiWGbn", + "c/o7kgc7G1Zmffa3sG2Or6ph60l07LtureVeRiG7zL05RmJEmJGr6fV2fh+pXqt609qME/bnXeu+cMJm", + "n94up3df7fGtJecy+PaGc/mj2MacW6X5FnAxpczX6IwmeulZ/Cv92p7RBDUq+FjqjCaw3RqDujNaRovr", + "sQX5eIc/2R8WRqADOBDOfRIt6p69MWr4NUxBvmwTbOzzVnn3/UZ4dxkb8G1w7Q5lubs0JLWTTJrbmLXJ", + "i79SmMLegghutz5bOG3t8NbyFrlSYHyG+J+k11c+xT7KjL16GbBPwd6bt15ytLfcCzDnESbIj0JB961M", + "fG2ZSMSR3J2FFCzFdOjLysQEYNijF042oRKkNbueqouVGAEML0jD9l3aLlehWccbJovKy5t7qSTpbAde", + "KxVh2Vb6zDyvNQjGUdi5jcYpnFlV3GTilqDauWC/LitxeY9eHAW++1yfskV0cFgHm4QtIpTgmvZo07Uc", + "6tCynIunsButq2frWY9QANyH6kQtY9LEeYLTeRQ9lJ2f9PN39rV1frIcLSpOmpweCqjeJXbYUmWjmxCk", + "eB4l/v9Bj038YTsTf4V4Hnm0RAAIguhJX1WJbRC1AxkLqPqMflyJEQ8RBgk2suOYfGV67Kqf4rlDDytF", + "hrxBMGF3JhSgK4JQ2nMfOfPd0YkGDyr3UJRxtZLDyhwCj9/xBBEjmDytFOemVIGgmyY+fqb4caPowYdk", + "UJr8+FalB4rS/IyCEMgOLE0HdXmzxpfjIgEWBHKIWjnM5fDleKiiqoEkLmK5lcU7J4vLjCAl8eV4hXRd", + "hYF1DNZGJ1IE5PmrMkvX+mg2P6l1lGFxV1uG3iGGNnKeJUdXalRej6O3jSsrXiJs326uNu8u0CGmmc9A", + "1q3K7Ux7qbILlypyb8qXKiv6JzTV0ypZNyuU5kyfGUNpSzfuiR+vu6sV3LZQZ3FJ+dBKhJ0rsKiKiLUU", + "VbSSE7U5NfoYw0XMk8PQthY1X/ctmUYrQaoC2HxEw/u5CGFEEOzeAeGVL/HqGGVbDJ1A0rHi7T1NUmLL", + "w7R5y8K7mA0gSUO+VTWPL/wwTmk8BLvc1S33ZScslTYXQIV8oRv+GgIlW1OlL4A1sywK/xniMRu2FS2v", + "Zx00y3Jl8DTw4doDxS4fKMQubURq8Lv43lOUPFQ9GMvCOo2BEm2MRBaizlDxnSKVIKSq1gZBhgyjZx0d", + "sR2tE3/XbuUU8l8+VQgfxMRCb/72Lcc/DBtbKpGjmdlrlOhDbG3Lubt3/aYy3jLOeiaVq93zREMy4V0d", + "e5vphjevLDNMtJWoVj5qiidA+bfTDMfLXlIJRLPjZfMMkWpNHk2iSKWQTpsuUkkXqeAF1biJclWPXi95", + "pA5u6yJzigcpRzDt8XQnk0rm96j8yLD6gNpE4PxU/1l3O57jhFoNzMl0ny/LC6yvB03F4B6bCXy7ln2v", + "3F6em18L5/3S9S+Fu3maWp6fD+kVR62Lml2EMIZWgT6o4eshHb1l7tdn7iw3wrVSGoLBuIo3O48jut2t", + "Q3tLDu3vKu5Dm6wE2SY1NRnWJ3HQHMRwQ3bEmI7dypu9MSbYhrUWxS9kUciIeIvS2bmq2UEgb92Qxtao", + "Yn36HItdkA9Euv1WBqwdwAuAsDM8p0kr59AJgNhBU/ITgPDQM2Y/eXeiy36yhci9JmU2VMnTxtbs6I39", + "ErLE/jrfThYiq5sJ2tLOonmT6Zg8eA/SAHdOj7o5UbGNxExy7g/LTM7KvzvTZ4dOoJ+UfzK/Et+G2dVe", + "9qzf3lpnojc5pmXZTgc4U4Ddeemyp8pievP1OtV7EoYM22BgHqNevip500U8g/b2qCbpEiObbdzcoEM3", + "icJ6i4S0cv6MphlQOPFns9rwibMkCt+0mbI3WSPlxvoemXYGsTSJD2qSA5sObhs465KZm4J3WWdKaaek", + "FN9kOtKh+VT7mfe4IhPn9Nm559k+15YQVJUiyD4p6PR5c3lBFaNgy5lBc8hYwUJv1a7GSi/puQ2Z60Tp", + "Hv4k/+mJX+1KXZQVsfXFByGcPS98IVdvAiuH0e2XvrCsUaHdxDbraLFmhB5Nze4q8gRx+9Ktukxckbn2", + "OTxphzlrQ6qzVZv74NhvpKzXIB/s9DelAVsvvnq1UB+b0J6Sd/mULMr+2x4Kafstno938fAeg4QgzXBf", + "XQCLNf6uejC3BJ/mtbkWNn4zvFm4+tpHGQ7CAKcIWpVuEm2XOdKOaV9+uLQB7sEPPSuoaMPGIH3xQ68e", + "mr33oGB/AR1wTwAtRUw+ASQeMKpL6JwcnRz3jsj/JkdHp/R//2P0UNHufTKBnng9gGGPQNGxrURIIJ7C", + "+yiBmwT5I51hnTBXYPneD300Xx5m0X+reF4X0GvF9OY8gmX325v1BxZtx/ZYs5EYyc04AmlYpE0qYOBw", + "0Iiiy7O/mhvYMvp5n4tZtmZ4a4Zv3wxvbcvWtnyVdw9oxeKvVAC1Scrr9fsGCrFmep6A6qUBUY81XkPZ", + "chn/4Vh0br2Iu+xF3Ny5SBLAXoVLtMZUa0ztjTGVLSMT1WvxzVpV1ZcMLr20Wy5LX5YwrddhvVaJwQLY", + "rF1y+FP+2SvlcamNStKD3NBm2fPYJA0OjHmLtaje2XAl/e628UrFeCUDnpoFJBhooyZyaS0MuNe1iPaK", + "+zapjltVvO9xTZuVI3aGgUzV8JK9EKqsVgqcED6Z3wnZPxOasA77k1y5/sVKdW6GStC2WkdVsw1N6p4Y", + "N3+ryS2bBXmqOaHN8LdicfvFHXcuoSYXdFVUvpknmooszvmR9fJYWARcItvbgyVTYpSGrRTephQWO6Bs", + "QBP5a7QbtliIqrk5qkrgN3nSbMWvlfjlBkmdTbx2kcuytPfcKA1xTYgObSNyXonyAuAR+AGYBpBKX0Xc", + "6E/jnyFmWeDRGZ1x70VvXWqyPU9NmNusJY/ejFQY+bTecMMdfQ5JyyUszLN/imCCDt00SWA1ZyN2OmAN", + "HdKtxL03CCafIT7jg22Q7shMDemMQtwWunn9QjfQTRMfP1Mx7kbRgw/7KZFd/74loqrwuC1PboLc6fZr", + "yHjm43k6PXRBEEyB+2Ak57NoEQcQQ0bTV2R+R6uPyESszMdnOvQVweWZGL5A4O+OTmruE1w+r1eedw6B", + "x2vaBRHbDG0NRSnWXwrIzOFOLDA/hyX6EAaJWRSMydflEEe7NscahWfzOKPQNURYFM0CuBl6o0P/4vTG", + "0LdmessQ98vRmx8++hjaFL4U1jDrQI1uK/VNRpjQvkM+1wa1uDqRVfxE4COxMfkFtvaitVqluV8L2Mso", + "b6I5IeZo7xC4Loyx2fPWp9+R9LDxSUrUpm4+69PZjD+JDc4mqi/MWEF9bOU6+mujACR5MWyX9t6evhJI", + "syhWVGwj35vRF+vT2VT9MzL4GuiLrbylr5rq9ARJS9BXEM380ExWF9EMOX7oAKobDyoMjAs60GZoiapg", + "Mv6WKshanaODaDaDnuOH7fF5p47PebVOqMb2nBxEsyjFNcwQpdiOG6L09X09nEajHaun1BJpjTFKqceW", + "bBdwMYUJmvtxgyOQ0snuGMRUyNesG39GtFEC10/a/Dykoqg9Ey1zJlIxWE+SMUDoKUoqIhGYmOSS1BHt", + "q0TqtRhzczbG2RyEMznRLhkbLoXMk4hqxfkeiXNGVnlKt2CiBM6IIEuqDn2sBaq0SGSczqbYRoCxSwwj", + "kNdec+2FnS5IyNbmQQFwHzZywzAmI+/wBUONqGl44/AIE8RBqCzdy9uJ+BUEk0eNjTgM76PPEH/jg661", + "cIkCaZbR4fjg6OBIlzNCCRv5t+x6a1GTZFKx2EKoXAU5f4dOAnGahDnkFexsIqXSMPTDWTbFj54YshfF", + "7IlqNpvYtCc4nUfRQ49HER3+5D9YvMcjmoK3LkcZsd/tn9rxgcxRPHKiLQfxWL5dE/C1euH19ULxvZxK", + "psbQHd7i1oo5DjmebQ7Joqko+lfNMdzuQbaJNXaWb9YT/MagZ7FvHDUEMyM+oUnqyryhHDtyu1r23CH2", + "pD6B0hY15VHJm/SPF4s63hprg1GY5cNUHiFYFXCq0fH7E27aOPCPr7j1hpUiSkuvdYjRXB1ASs1qQoXY", + "nVf4uioJmbXaG1regCuBIiCnN0y6gmMgFSjb3iMWS15jkLWcpuc0zhCrMFtBmxRfZlhlJpHh41apEBqc", + "i3byeUOTrB4SwPZ11fZfV+mOQwrFLPm4oVtnYdlzQgOT6y288lnyZU/LW6/NW+oTolUYy8bss+euZnbg", + "TjDY5upqM2TYPnRmVleey7ZtHFpJhKJ52MoDo4G4GnPWmIlW6fXJJuXz6EvGe5Q3HUZN2SCd/i7wsyal", + "JUtIuYZ6Q8tXG9IDNkuiNKZ5QjMQxEYZQaGdvsDnTm0Ohw0LiRVzd4tLpTZ99w5aE0vlC28kuEReGWNs", + "iEiJ0DTTy1IJXnZSck007HLgDO+pdxulhDqg16VcFQAMEZY85SPnHmJ3Dj1TNulM8O+4IcXJYMmsMa+W", + "K0aBt1GSmDY1TJsaZgOpYRqJZi4bkMWtVk6TW4llHluzRy6YX0Eub1jKiYCp1UzBVt7tlAmYkeKyJmAx", + "8G8KQQITGfjX1YYC0kgyJg/SJOicdjovty//LwAA//+yZDXAbJwCAA==", } // GetSwagger returns the content of the embedded swagger specification file diff --git a/api/v1/server/oas/transformers/v1/filters.go b/api/v1/server/oas/transformers/v1/filters.go new file mode 100644 index 000000000..2662b080d --- /dev/null +++ b/api/v1/server/oas/transformers/v1/filters.go @@ -0,0 +1,40 @@ +package transformers + +import ( + "github.com/google/uuid" + "github.com/hatchet-dev/hatchet/api/v1/server/oas/gen" + "github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1" +) + +func ToV1Filter(filter *sqlcv1.V1Filter) gen.V1Filter { + var payload map[string]interface{} + + if filter.Payload != nil { + payload = jsonToMap(filter.Payload) + } + + return gen.V1Filter{ + Expression: filter.Expression, + Metadata: gen.APIResourceMeta{ + CreatedAt: filter.InsertedAt.Time, + UpdatedAt: filter.UpdatedAt.Time, + Id: filter.ID.String(), + }, + Payload: payload, + Scope: filter.Scope, + TenantId: filter.TenantID.String(), + WorkflowId: uuid.MustParse(filter.WorkflowID.String()), + } +} + +func ToV1FilterList(filters []*sqlcv1.V1Filter) gen.V1FilterList { + rows := make([]gen.V1Filter, len(filters)) + + for i, filter := range filters { + rows[i] = ToV1Filter(filter) + } + + return gen.V1FilterList{ + Rows: &rows, + } +} diff --git a/api/v1/server/run/run.go b/api/v1/server/run/run.go index eaf7813e2..5ac20695b 100644 --- a/api/v1/server/run/run.go +++ b/api/v1/server/run/run.go @@ -27,6 +27,7 @@ import ( "github.com/hatchet-dev/hatchet/api/v1/server/handlers/tenants" "github.com/hatchet-dev/hatchet/api/v1/server/handlers/users" eventsv1 "github.com/hatchet-dev/hatchet/api/v1/server/handlers/v1/events" + filtersv1 "github.com/hatchet-dev/hatchet/api/v1/server/handlers/v1/filters" "github.com/hatchet-dev/hatchet/api/v1/server/handlers/v1/tasks" workflowrunsv1 "github.com/hatchet-dev/hatchet/api/v1/server/handlers/v1/workflow-runs" webhookworker "github.com/hatchet-dev/hatchet/api/v1/server/handlers/webhook-worker" @@ -61,6 +62,7 @@ type apiService struct { *tasks.TasksService *workflowrunsv1.V1WorkflowRunsService *eventsv1.V1EventsService + *filtersv1.V1FiltersService } func newAPIService(config *server.ServerConfig) *apiService { @@ -84,6 +86,7 @@ func newAPIService(config *server.ServerConfig) *apiService { TasksService: tasks.NewTasksService(config), V1WorkflowRunsService: workflowrunsv1.NewV1WorkflowRunsService(config), V1EventsService: eventsv1.NewV1EventsService(config), + V1FiltersService: filtersv1.NewV1FiltersService(config), } } @@ -390,6 +393,19 @@ func (t *APIServer) registerSpec(g *echo.Group, spec *openapi3.T) (*populator.Po return workflowRun, sqlchelpers.UUIDToStr(workflowRun.WorkflowRun.TenantID), nil }) + populatorMW.RegisterGetter("v1-filter", func(config *server.ServerConfig, parentId, id string) (result interface{}, uniqueParentId string, err error) { + filter, err := t.config.V1.Filters().GetFilter( + context.Background(), + parentId, + id, + ) + + if err != nil { + return nil, "", err + } + + return filter, sqlchelpers.UUIDToStr(filter.TenantID), nil + }) authnMW := authn.NewAuthN(t.config) authzMW := authz.NewAuthZ(t.config) diff --git a/cmd/hatchet-migrate/migrate/migrations/20250512141736_v1_0_20.sql b/cmd/hatchet-migrate/migrate/migrations/20250512141736_v1_0_20.sql new file mode 100644 index 000000000..70bf98289 --- /dev/null +++ b/cmd/hatchet-migrate/migrate/migrations/20250512141736_v1_0_20.sql @@ -0,0 +1,30 @@ +-- +goose Up +-- +goose StatementBegin +CREATE TABLE v1_filter ( + id UUID NOT NULL DEFAULT gen_random_uuid(), + tenant_id UUID NOT NULL, + workflow_id UUID NOT NULL, + scope TEXT NOT NULL, + expression TEXT NOT NULL, + payload JSONB NOT NULL DEFAULT '{}'::JSONB, + + inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + + PRIMARY KEY (tenant_id, id) +); + +CREATE UNIQUE INDEX v1_filter_unique_idx ON v1_filter ( + tenant_id ASC, + workflow_id ASC, + scope ASC, + expression ASC +); + + +-- +goose StatementEnd + +-- +goose Down +-- +goose StatementBegin +DROP TABLE v1_filter; +-- +goose StatementEnd diff --git a/frontend/app/src/lib/api/generated/Api.ts b/frontend/app/src/lib/api/generated/Api.ts index e6901a369..54af3ee6b 100644 --- a/frontend/app/src/lib/api/generated/Api.ts +++ b/frontend/app/src/lib/api/generated/Api.ts @@ -85,8 +85,11 @@ import { UserRegisterRequest, UserTenantMembershipsList, V1CancelTaskRequest, + V1CreateFilterRequest, V1DagChildren, V1EventList, + V1Filter, + V1FilterList, V1LogLineList, V1ReplayTaskRequest, V1TaskEventList, @@ -595,6 +598,107 @@ export class Api< format: "json", ...params, }); + /** + * @description Lists all filters for a tenant. + * + * @tags Filter + * @name V1FilterList + * @summary List filters + * @request GET:/api/v1/stable/tenants/{tenant}/filters + * @secure + */ + v1FilterList = ( + tenant: string, + query?: { + /** + * The number to skip + * @format int64 + */ + offset?: number; + /** + * The number to limit by + * @format int64 + */ + limit?: number; + /** The workflow ids to filter by */ + workflowIds?: string[]; + /** The scopes to subset candidate filters by */ + scopes?: string[]; + }, + params: RequestParams = {}, + ) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters`, + method: "GET", + query: query, + secure: true, + format: "json", + ...params, + }); + /** + * @description Create a new filter + * + * @tags Filter + * @name V1FilterCreate + * @summary Create a filter + * @request POST:/api/v1/stable/tenants/{tenant}/filters + * @secure + */ + v1FilterCreate = ( + tenant: string, + data: V1CreateFilterRequest, + params: RequestParams = {}, + ) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters`, + method: "POST", + body: data, + secure: true, + type: ContentType.Json, + format: "json", + ...params, + }); + /** + * @description Get a filter by its id + * + * @tags Filter + * @name V1FilterGet + * @summary Get a filter + * @request GET:/api/v1/stable/tenants/{tenant}/filters/{v1-filter} + * @secure + */ + v1FilterGet = ( + tenant: string, + v1Filter: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters/${v1Filter}`, + method: "GET", + secure: true, + format: "json", + ...params, + }); + /** + * @description Delete a filter + * + * @tags Filter + * @name V1FilterDelete + * @request DELETE:/api/v1/stable/tenants/{tenant}/filters/{v1-filter} + * @secure + */ + v1FilterDelete = ( + tenant: string, + v1Filter: string, + params: RequestParams = {}, + ) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters/${v1Filter}`, + method: "DELETE", + secure: true, + format: "json", + ...params, + }); /** * @description Gets the readiness status * diff --git a/frontend/app/src/lib/api/generated/data-contracts.ts b/frontend/app/src/lib/api/generated/data-contracts.ts index d49668fa2..56d632e0e 100644 --- a/frontend/app/src/lib/api/generated/data-contracts.ts +++ b/frontend/app/src/lib/api/generated/data-contracts.ts @@ -743,6 +743,46 @@ export interface V1EventList { rows?: V1Event[]; } +export interface V1Filter { + metadata: APIResourceMeta; + /** The ID of the tenant associated with this filter. */ + tenantId: string; + /** + * The workflow id associated with this filter. + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + workflowId: string; + /** The scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope: string; + /** The expression associated with this filter. */ + expression: string; + /** Additional payload data associated with the filter */ + payload: object; +} + +export interface V1FilterList { + pagination?: PaginationResponse; + rows?: V1Filter[]; +} + +export interface V1CreateFilterRequest { + /** + * The workflow id + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + workflowId: string; + /** The expression for the filter */ + expression: string; + /** The scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope: string; + /** The payload for the filter */ + payload?: object; +} + export interface APIMetaAuth { /** * the supported types of authentication @@ -1189,6 +1229,8 @@ export interface CreateEventRequest { * @format int32 */ priority?: number; + /** The scope for event filtering. */ + scope?: string; } export interface BulkCreateEventRequest { diff --git a/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts b/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts index cc37aadae..c4ef370ab 100644 --- a/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts +++ b/frontend/app/src/next/lib/docs/generated/sdks/python/feature-clients/_meta.ts @@ -7,6 +7,13 @@ const meta = { }, href: '/sdks/python/feature-clients/cron', }, + filters: { + title: 'Filters', + theme: { + toc: true, + }, + href: '/sdks/python/feature-clients/filters', + }, logs: { title: 'Logs', theme: { diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts b/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts index c6e75a58b..cf4cf9ce0 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/run/event.ts @@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\n\tv1_workflows 'github.com/hatchet-dev/hatchet/examples/go/workflows'\n\tv1 'github.com/hatchet-dev/hatchet/pkg/v1'\n\t'github.com/joho/godotenv'\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t'simple-event:create',\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: 'Hello, World!',\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\n\tv1_workflows 'github.com/hatchet-dev/hatchet/examples/go/workflows'\n\tv1 'github.com/hatchet-dev/hatchet/pkg/v1'\n\t'github.com/joho/godotenv'\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t'simple-event:create',\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: 'Hello, World!',\n\t\t},\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n", source: 'out/go/run/event.go', blocks: { pushing_an_event: { start: 23, - stop: 29, + stop: 31, }, }, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts index 1019392b5..f77f461b5 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-affinity/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t'model': 'fancy-ai-model-v2',\n\t\t\t'memory': 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:affinity'),\n\t\t\tName: 'affinity',\n\t\t\tDescription: 'affinity',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()['model']\n\n\t\t\t\t\tif model != 'fancy-ai-model-v3' {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t'model': nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t'model': 'fancy-ai-model-v3',\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName('step-one').\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t'model': {\n\t\t\t\t\t\t\tValue: 'fancy-ai-model-v3',\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t'memory': {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:affinity',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t'model': 'fancy-ai-model-v2',\n\t\t\t'memory': 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:affinity'),\n\t\t\tName: 'affinity',\n\t\t\tDescription: 'affinity',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()['model']\n\n\t\t\t\t\tif model != 'fancy-ai-model-v3' {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t'model': nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t'model': 'fancy-ai-model-v3',\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName('step-one').\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t'model': {\n\t\t\t\t\t\t\tValue: 'fancy-ai-model-v3',\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t'memory': {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:affinity',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/assignment-affinity/run.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts index 84cdab5d2..17f91a4ba 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/assignment-sticky/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:sticky'),\n\t\t\tName: 'sticky',\n\t\t\tDescription: 'sticky',\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow('sticky-child', nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf('error spawning workflow: %w', err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-three').AddParents('step-two'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: 'sticky-child',\n\t\t\tDescription: 'sticky',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:sticky',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:sticky'),\n\t\t\tName: 'sticky',\n\t\t\tDescription: 'sticky',\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow('sticky-child', nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf('error spawning workflow: %w', err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-three').AddParents('step-two'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: 'sticky-child',\n\t\t\tDescription: 'sticky',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:sticky',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/assignment-sticky/run.go', blocks: { stickyworker: { diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts index 1326acbe9..906e0225f 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/cancellation/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/google/uuid'\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/rest'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:cancellation'),\n\t\t\tName: 'cancellation',\n\t\t\tDescription: 'cancellation',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- 'done'\n\t\t\t\t\t\tlog.Printf('context cancelled')\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf('workflow never cancelled')\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: 'done',\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:cancellation',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := 'cancellation'\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error listing workflows: %w', err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf('no workflows found'))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf('no workflows found'))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error listing workflow runs: %w', err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf('no workflow runs found'))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error cancelling workflow run: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/google/uuid'\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/rest'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:cancellation'),\n\t\t\tName: 'cancellation',\n\t\t\tDescription: 'cancellation',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- 'done'\n\t\t\t\t\t\tlog.Printf('context cancelled')\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf('workflow never cancelled')\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: 'done',\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:cancellation',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := 'cancellation'\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error listing workflows: %w', err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf('no workflows found'))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf('no workflows found'))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error listing workflow runs: %w', err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf('no workflow runs found'))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error cancelling workflow run: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/cancellation/run.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts index ccadee28b..3202ba7bc 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/compute/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/compute'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\tpool := 'test-pool'\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region('ewr')},\n\t}\n\n\tperformancePool := 'performance-pool'\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region('ewr')},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one').SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one').SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/compute'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\tpool := 'test-pool'\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region('ewr')},\n\t}\n\n\tperformancePool := 'performance-pool'\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region('ewr')},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one').SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one').SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/compute/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts index 565fe761b..79c6c50a9 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/dag/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'post-user-update',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 1 got username: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 2 got username: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-one', step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-two', step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Username was: ' + input.Username + ', Step 3: has parents 1 and 2' + step1Out.Message + ', ' + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-three').AddParents('step-one', 'step-two'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-one', step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-three', step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 4: has parents 1 and 3' + step1Out.Message + ', ' + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-four').AddParents('step-one', 'step-three'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-four', step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 5: has parent 4' + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-five').AddParents('step-four'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\tlog.Printf('pushing event user:create:simple')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create:simple',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'post-user-update',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 1 got username: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 2 got username: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-one', step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-two', step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Username was: ' + input.Username + ', Step 3: has parents 1 and 2' + step1Out.Message + ', ' + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-three').AddParents('step-one', 'step-two'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-one', step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-three', step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 4: has parents 1 and 3' + step1Out.Message + ', ' + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-four').AddParents('step-one', 'step-three'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput('step-four', step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: 'Step 5: has parent 4' + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-five').AddParents('step-four'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\tlog.Printf('pushing event user:create:simple')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create:simple',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/dag/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts index 3575ca6fe..aecd4847b 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/requeue/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction('requeue:requeue', func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'example:event',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction('requeue:requeue', func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'example:event',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/deprecated/requeue/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts index 42f1450ca..41c364562 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/joho/godotenv'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println('step should have timed out')\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/joho/godotenv'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println('step should have timed out')\n}\n", source: 'out/go/z_v0/deprecated/schedule-timeout/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts index 652bca398..99d942159 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/timeout/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction('timeout:timeout', func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println('context cancelled in ', time.Since(timeStart).Seconds(), ' seconds')\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction('timeout:timeout', func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println('context cancelled in ', time.Since(timeStart).Seconds(), ' seconds')\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/deprecated/timeout/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts index 86a65e1d4..d82118c78 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/deprecated/yaml/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype actionInput struct {\n\tMessage string `json:'message'`\n}\n\ntype actionOut struct {\n\tMessage string `json:'message'`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService('echo')\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up worker: %w', err))\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype actionInput struct {\n\tMessage string `json:'message'`\n}\n\ntype actionOut struct {\n\tMessage string `json:'message'`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService('echo')\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up worker: %w', err))\n\t}\n}\n", source: 'out/go/z_v0/deprecated/yaml/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts index 9bff5a47f..6aadefd66 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/errors-test/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'os'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/errors/sentry'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf('this is an error')\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv('SENTRY_DSN'),\n\t\tEnvironment: os.Getenv('SENTRY_ENVIRONMENT'),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event('user:create'), &worker.WorkflowJob{\n\t\tName: 'failing-workflow',\n\t\tDescription: 'This is a failing workflow.',\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction('echo:echo', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t'message': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction('echo:object', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'os'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/errors/sentry'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf('this is an error')\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv('SENTRY_DSN'),\n\t\tEnvironment: os.Getenv('SENTRY_ENVIRONMENT'),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event('user:create'), &worker.WorkflowJob{\n\t\tName: 'failing-workflow',\n\t\tDescription: 'This is a failing workflow.',\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction('echo:echo', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t'message': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction('echo:object', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf('error starting worker: %w', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n", source: 'out/go/z_v0/errors-test/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts index 4db5d75f5..0c9c0b612 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:'index'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('concurrency-test-event'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'concurrency-limit',\n\t\t\tDescription: 'This limits concurrency to 1 run at a time.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println('context done, returning')\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'concurrency-test-event',\n\t\t\tfirstEvent,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'concurrency-test-event',\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:'index'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('concurrency-test-event'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'concurrency-limit',\n\t\t\tDescription: 'This limits concurrency to 1 run at a time.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println('context done, returning')\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'concurrency-test-event',\n\t\t\tfirstEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'concurrency-test-event',\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/limit-concurrency/cancel-in-progress/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts index e24a32c98..d9dd8dc05 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:'user_id'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn '', fmt.Errorf('error getting input: %w', err)\n\t}\n\n\treturn fmt.Sprintf('%d', input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('concurrency-test-event-rr'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'concurrency-limit-round-robin',\n\t\t\tDescription: 'This limits concurrency to 2 runs at a time.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf('error getting input: %w', err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println('received event', input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println('processed event', input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(context.Background(), 'concurrency-test-event-rr', event)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf('error cleaning up: %w', err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/client/types'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:'user_id'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn '', fmt.Errorf('error getting input: %w', err)\n\t}\n\n\treturn fmt.Sprintf('%d', input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('concurrency-test-event-rr'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'concurrency-limit-round-robin',\n\t\t\tDescription: 'This limits concurrency to 2 runs at a time.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf('error getting input: %w', err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println('received event', input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println('processed event', input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(\n\t\t\t\tcontext.Background(),\n\t\t\t\t'concurrency-test-event-rr',\n\t\t\t\tevent,\n\t\t\t\tnil,\n\t\t\t\tnil,\n\t\t\t)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println('interrupted')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf('error cleaning up: %w', err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/limit-concurrency/group-round-robin/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts index 7b6403519..b0acbb4dc 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/logging/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:log:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf('step-one: %d', i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:log:simple',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:log:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf('step-one: %d', i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:log:simple',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/logging/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts index 5cdfd8280..bc69a40ac 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/middleware/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf('1st-middleware')\n\t\tevents <- '1st-middleware'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'testkey', 'testvalue'))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf('2nd-middleware')\n\t\tevents <- '2nd-middleware'\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf('step function took %s\\n', duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService('test')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- 'svc-middleware'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'svckey', 'svcvalue'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:middleware'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'middleware',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\ttestVal := ctx.Value('testkey').(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value('svckey').(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event user:create:middleware')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:middleware',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf('1st-middleware')\n\t\tevents <- '1st-middleware'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'testkey', 'testvalue'))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf('2nd-middleware')\n\t\tevents <- '2nd-middleware'\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf('step function took %s\\n', duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService('test')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- 'svc-middleware'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'svckey', 'svcvalue'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:middleware'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'middleware',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\ttestVal := ctx.Value('testkey').(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value('svckey').(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event user:create:middleware')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:middleware',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/middleware/run.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts index f362877f4..6941cfd6c 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/namespaced/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace('sample'),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace('sample'),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/namespaced/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts index 5dbc49ee0..15f15d8ca 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/register-action/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value('testkey').(string)\n\t// svcVal := ctx.Value('svckey').(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: 'Username is: ' + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: 'Above message is: ' + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'testkey', 'testvalue'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName('step-one'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName('step-two'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create', 'user:update'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'post-user-update',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call('test:step-one'),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call('step-two'),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction('echo:echo', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t'message': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction('echo:object', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'time'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserId string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value('testkey').(string)\n\t// svcVal := ctx.Value('svckey').(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: 'Username is: ' + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: 'Above message is: ' + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), 'testkey', 'testvalue'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName('step-one'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName('step-two'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create', 'user:update'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'post-user-update',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call('test:step-one'),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call('step-two'),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction('echo:echo', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t'message': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction('echo:object', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserId: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n", source: 'out/go/z_v0/register-action/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts index 1f97e94ee..2e9c1845e 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/retries/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf('error')\n\t}\n\n\tlog.Printf('finished step-one')\n\treturn &stepOneOutput{\n\t\tMessage: 'Username is: ' + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName('step-one').SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\tlog.Printf('pushing event user:create:simple')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create:simple',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf('error cleaning up worker: %w', err)\n\t}\n\n\treturn nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn 'user-create', nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf('error')\n\t}\n\n\tlog.Printf('finished step-one')\n\treturn &stepOneOutput{\n\t\tMessage: 'Username is: ' + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\ttestSvc := w.NewService('test')\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events('user:create:simple'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName('step-one').SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\tlog.Printf('pushing event user:create:simple')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t'user:create:simple',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf('error cleaning up worker: %w', err)\n\t}\n\n\treturn nil\n}\n", source: 'out/go/z_v0/retries/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts index 4656fa2a0..4358ad8a8 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/simple/main.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\n\t'github.com/joho/godotenv'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/cmdutils'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:'username'`\n\tUserID string `json:'user_id'`\n\tData map[string]string `json:'data'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:'message'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf('error cleaning up: %w', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events('user:create:simple'),\n\t\t\tName: 'simple',\n\t\t\tDescription: 'This runs after an update to the user model.',\n\t\t\tConcurrency: worker.Expression('input.user_id'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-one')\n\t\t\t\t\tevents <- 'step-one'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Username is: ' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName('step-one'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput('step-one', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf('step-two')\n\t\t\t\t\tevents <- 'step-two'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: 'Above message is: ' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName('step-two').AddParents('step-one'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf('pushing event user:create:simple')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:simple',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t'hello': 'world',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/simple/main.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts index 1c58a3cca..755d85cc0 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/timeout/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events('user:create:timeout'),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:timeout',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- 'done'\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'fmt'\n\t'log'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating client: %w', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error creating worker: %w', err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events('user:create:timeout'),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error registering workflow: %w', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf('pushing event')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: 'echo-test',\n\t\t\tUserID: '1234',\n\t\t\tData: map[string]string{\n\t\t\t\t'test': 'test',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t'user:create:timeout',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf('error pushing event: %w', err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- 'done'\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf('error starting worker: %w', err)\n\t}\n\n\treturn cleanup, nil\n}\n", source: 'out/go/z_v0/timeout/run.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts index 1c16f57a3..1f166fe44 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/go/z_v0/webhook/run.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'go', content: - "package main\n\nimport (\n\t'context'\n\t'errors'\n\t'fmt'\n\t'log'\n\t'net/http'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc('/webhook', handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: ':' + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := 'secret'\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: 'test-' + name,\n\t\tURL: fmt.Sprintf('http://localhost:%s/webhook', port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf('error setting up webhook: %w', err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf('pushing event')\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n", + "package main\n\nimport (\n\t'context'\n\t'errors'\n\t'fmt'\n\t'log'\n\t'net/http'\n\t'time'\n\n\t'github.com/hatchet-dev/hatchet/pkg/client'\n\t'github.com/hatchet-dev/hatchet/pkg/worker'\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc('/webhook', handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: ':' + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := 'secret'\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: 'test-' + name,\n\t\tURL: fmt.Sprintf('http://localhost:%s/webhook', port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf('error setting up webhook: %w', err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf('pushing event')\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: 'echo-test',\n\t\tUserID: '1234',\n\t\tData: map[string]string{\n\t\t\t'test': 'test',\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf('error pushing event: %w', err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n", source: 'out/go/z_v0/webhook/run.go', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts index 04b92b579..9a9348bc9 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/dag/worker.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name='DAGWorkflow')\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = (await ctx.task_output(step2)).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n 'executed step4',\n time.strftime('%H:%M:%S', time.localtime()),\n input,\n ctx.task_output(step1),\n await ctx.task_output(step3),\n )\n return {\n 'step4': 'step4',\n }\n\n\ndef main() -> None:\n worker = hatchet.worker('dag-worker', workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", + "import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name='DAGWorkflow')\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = ctx.task_output(step2).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n 'executed step4',\n time.strftime('%H:%M:%S', time.localtime()),\n input,\n ctx.task_output(step1),\n ctx.task_output(step3),\n )\n return {\n 'step4': 'step4',\n }\n\n\ndef main() -> None:\n worker = hatchet.worker('dag-worker', workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", source: 'out/python/dag/worker.py', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts index ab2820368..074eb9d42 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/events/test_event.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "import pytest\n\nfrom hatchet_sdk.clients.events import BulkPushEventOptions, BulkPushEventWithMetadata\nfrom hatchet_sdk.hatchet import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push('user:create', {'test': 'test'})\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push('user:create', {'test': 'test'})\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n\n events = [\n BulkPushEventWithMetadata(\n key='event1',\n payload={'message': 'This is event 1'},\n additional_metadata={'source': 'test', 'user_id': 'user123'},\n ),\n BulkPushEventWithMetadata(\n key='event2',\n payload={'message': 'This is event 2'},\n additional_metadata={'source': 'test', 'user_id': 'user456'},\n ),\n BulkPushEventWithMetadata(\n key='event3',\n payload={'message': 'This is event 3'},\n additional_metadata={'source': 'test', 'user_id': 'user789'},\n ),\n ]\n opts = BulkPushEventOptions(namespace='bulk-test')\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = 'bulk-test'\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(sorted_events, sorted_returned_events):\n assert returned_event.key == namespace + original_event.key\n", + "import asyncio\nimport json\nfrom contextlib import asynccontextmanager\nfrom typing import AsyncGenerator, cast\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.events.worker import EventWorkflowInput, event_workflow\nfrom hatchet_sdk.clients.events import (\n BulkPushEventOptions,\n BulkPushEventWithMetadata,\n PushEventOptions,\n)\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.contracts.events_pb2 import Event\nfrom hatchet_sdk.hatchet import Hatchet\n\n\nclass ProcessedEvent(BaseModel):\n id: str\n payload: dict[str, str | bool]\n meta: dict[str, str | bool | int]\n should_have_runs: bool\n test_run_id: str\n\n def __hash__(self) -> int:\n return hash(self.model_dump_json())\n\n\n@asynccontextmanager\nasync def event_filter(\n hatchet: Hatchet,\n test_run_id: str,\n expression: str | None = None,\n payload: dict[str, str] = {},\n) -> AsyncGenerator[None, None]:\n expression = (\n expression\n or f'input.should_skip == false && payload.testRunId == '{test_run_id}''\n )\n\n f = await hatchet.filters.aio_create(\n workflow_id=event_workflow.id,\n expression=expression,\n scope=test_run_id,\n payload={'testRunId': test_run_id, **payload},\n )\n\n yield\n\n await hatchet.filters.aio_delete(f.metadata.id)\n\n\nasync def fetch_runs_for_event(\n hatchet: Hatchet, event: Event\n) -> tuple[ProcessedEvent, list[V1TaskSummary]]:\n runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId)\n\n meta = (\n cast(dict[str, str | int | bool], json.loads(event.additionalMetadata))\n if event.additionalMetadata\n else {}\n )\n payload = (\n cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {}\n )\n\n return (\n ProcessedEvent(\n id=event.eventId,\n payload=payload,\n meta=meta,\n should_have_runs=meta.get('should_have_runs', False) is True,\n test_run_id=cast(str, meta['test_run_id']),\n ),\n runs.rows or [],\n )\n\n\nasync def wait_for_result(\n hatchet: Hatchet, events: list[Event]\n) -> dict[ProcessedEvent, list[V1TaskSummary]]:\n await asyncio.sleep(3)\n\n persisted = (await hatchet.event.aio_list(limit=100)).rows or []\n\n assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted})\n\n iters = 0\n while True:\n print('Waiting for event runs to complete...')\n if iters > 15:\n print('Timed out waiting for event runs to complete.')\n return {}\n\n iters += 1\n\n event_runs = await asyncio.gather(\n *[fetch_runs_for_event(hatchet, event) for event in events]\n )\n\n all_empty = all(not event_run for _, event_run in event_runs)\n\n if all_empty:\n await asyncio.sleep(1)\n continue\n\n event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs}\n\n any_queued_or_running = any(\n run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING]\n for runs in event_id_to_runs.values()\n for run in runs\n )\n\n if any_queued_or_running:\n await asyncio.sleep(1)\n continue\n\n break\n\n return event_id_to_runs\n\n\nasync def assert_event_runs_processed(\n event: ProcessedEvent,\n runs: list[V1TaskSummary],\n) -> None:\n if event.should_have_runs:\n assert len(runs) > 0\n else:\n assert len(runs) == 0\n\n\ndef bpi(\n index: int = 1,\n test_run_id: str = '',\n should_skip: bool = False,\n should_have_runs: bool = True,\n key: str = 'user:create',\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> BulkPushEventWithMetadata:\n return BulkPushEventWithMetadata(\n key=key,\n payload={\n 'should_skip': should_skip,\n **payload,\n },\n additional_metadata={\n 'should_have_runs': should_have_runs,\n 'test_run_id': test_run_id,\n 'key': index,\n },\n scope=scope,\n )\n\n\ndef cp(should_skip: bool) -> dict[str, bool]:\n return EventWorkflowInput(should_skip=should_skip).model_dump()\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push('user:create', cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push('user:create', cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n events = [\n BulkPushEventWithMetadata(\n key='event1',\n payload={'message': 'This is event 1', 'should_skip': False},\n additional_metadata={'source': 'test', 'user_id': 'user123'},\n ),\n BulkPushEventWithMetadata(\n key='event2',\n payload={'message': 'This is event 2', 'should_skip': False},\n additional_metadata={'source': 'test', 'user_id': 'user456'},\n ),\n BulkPushEventWithMetadata(\n key='event3',\n payload={'message': 'This is event 3', 'should_skip': False},\n additional_metadata={'source': 'test', 'user_id': 'user789'},\n ),\n ]\n opts = BulkPushEventOptions(namespace='bulk-test')\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = 'bulk-test'\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(sorted_events, sorted_returned_events):\n assert returned_event.key == namespace + original_event.key\n\n\n@pytest.fixture(scope='function')\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_engine_behavior(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n events = [\n bpi(\n test_run_id=test_run_id,\n ),\n bpi(\n test_run_id=test_run_id,\n key='thisisafakeeventfoobarbaz',\n should_have_runs=False,\n ),\n ]\n\n print('Events:', events)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n print('Result:', result)\n\n runs = await wait_for_result(hatchet, result)\n\n for event, r in runs.items():\n await assert_event_runs_processed(event, r)\n\n\ndef gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]:\n return [\n bpi(\n index=1,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n ),\n bpi(\n index=2,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=True,\n ),\n bpi(\n index=3,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n scope=test_run_id,\n ),\n bpi(\n index=4,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n ),\n bpi(\n index=5,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n key='thisisafakeeventfoobarbaz',\n ),\n bpi(\n index=6,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n scope=test_run_id,\n key='thisisafakeeventfoobarbaz',\n ),\n ]\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id):\n events = gen_bulk_events(test_run_id)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n runs = await wait_for_result(hatchet, result)\n for e, r in runs.items():\n await assert_event_runs_processed(e, r)\n\n\nasync def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event:\n return await hatchet.event.aio_push(\n event_key=event.key,\n payload=event.payload,\n options=PushEventOptions(\n scope=event.scope,\n additional_metadata=event.additional_metadata,\n priority=event.priority,\n ),\n )\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_skipping_filtering_no_bulk(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(hatchet, test_run_id):\n raw_events = gen_bulk_events(test_run_id)\n events = await asyncio.gather(\n *[bulk_to_single(hatchet, event) for event in raw_events]\n )\n\n result = await wait_for_result(hatchet, events)\n for event, runs in result.items():\n await assert_event_runs_processed(event, runs)\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n 'input.should_skip == false && payload.foobar == 'baz'',\n {'foobar': 'qux'},\n ):\n event = await hatchet.event.aio_push(\n event_key='user:create',\n payload={'message': 'This is event 1', 'should_skip': False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n 'should_have_runs': False,\n 'test_run_id': test_run_id,\n 'key': 1,\n },\n ),\n )\n\n runs = await wait_for_result(hatchet, [event])\n assert len(runs) == 0\n\n\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_event_payload_filtering_with_payload_match(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n 'input.should_skip == false && payload.foobar == 'baz'',\n {'foobar': 'baz'},\n ):\n event = await hatchet.event.aio_push(\n event_key='user:create',\n payload={'message': 'This is event 1', 'should_skip': False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n 'should_have_runs': True,\n 'test_run_id': test_run_id,\n 'key': 1,\n },\n ),\n )\n runs = await wait_for_result(hatchet, [event])\n assert len(runs) == 1\n", source: 'out/python/events/test_event.py', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts index 711d24f6c..55f942b46 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/events/worker.ts @@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n# > Event trigger\nevent_workflow = hatchet.workflow(name='EventWorkflow', on_events=['user:create'])\n\n\n@event_workflow.task()\ndef task(input: EmptyModel, ctx: Context) -> None:\n print('event received')\n\n\ndef main() -> None:\n worker = hatchet.worker(name='EventWorker', workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", + "from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet()\nEVENT_KEY = 'user:create'\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\n# > Event trigger\nevent_workflow = hatchet.workflow(\n name='EventWorkflow',\n on_events=[EVENT_KEY],\n input_validator=EventWorkflowInput,\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> None:\n print('event received')\n\n\ndef main() -> None:\n worker = hatchet.worker(name='EventWorker', workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", source: 'out/python/events/worker.py', blocks: { event_trigger: { - start: 6, - stop: 6, + start: 14, + stop: 18, }, }, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts b/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts index 31b421796..bd09ce5f8 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/priority/test_priority.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "import asyncio\nfrom datetime import datetime, timedelta\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, AsyncGenerator, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal['low', 'medium', 'high', 'default']\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case 'high':\n return 3\n case 'medium':\n return 2\n case 'low':\n return 1\n case 'default':\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f'Invalid priority: {priority}')\n\n\n@pytest_asyncio.fixture(loop_scope='session', scope='function')\nasync def dummy_runs() -> None:\n priority: Priority = 'high'\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'type': 'dummy',\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return None\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = ['low', 'medium', 'high', 'default']\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'test_run_id': test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = ['low', 'medium', 'high', 'default']\n run_at = datetime.now() + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'test_run_id': test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError('Timed out waiting for runs to finish')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError('One or more runs failed or were cancelled')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope='session', scope='function')\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = ['low', 'medium', 'high']\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f'{test_run_id}-cron-{i}',\n expression='* * * * *',\n input={},\n additional_metadata={\n 'trigger': 'cron',\n 'test_run_id': test_run_id,\n 'priority': (priority := choice(choices)),\n 'key': str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now()\n next_minute = now.replace(second=0, microsecond=0, minute=now.minute + 1)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError('Timed out waiting for runs to finish')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError('One or more runs failed or were cancelled')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n", + "import asyncio\nfrom datetime import datetime, timedelta\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, AsyncGenerator, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal['low', 'medium', 'high', 'default']\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case 'high':\n return 3\n case 'medium':\n return 2\n case 'low':\n return 1\n case 'default':\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f'Invalid priority: {priority}')\n\n\n@pytest_asyncio.fixture(loop_scope='session', scope='function')\nasync def dummy_runs() -> None:\n priority: Priority = 'high'\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'type': 'dummy',\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return None\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = ['low', 'medium', 'high', 'default']\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'test_run_id': test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = ['low', 'medium', 'high', 'default']\n run_at = datetime.now() + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n 'priority': priority,\n 'key': ix,\n 'test_run_id': test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError('Timed out waiting for runs to finish')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError('One or more runs failed or were cancelled')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope='session', scope='function')\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = ['low', 'medium', 'high']\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f'{test_run_id}-cron-{i}',\n expression='* * * * *',\n input={},\n additional_metadata={\n 'trigger': 'cron',\n 'test_run_id': test_run_id,\n 'priority': (priority := choice(choices)),\n 'key': str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now()\n next_minute = (now + timedelta(minutes=1)).replace(second=0, microsecond=0)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.parametrize(\n 'on_demand_worker',\n [\n (\n ['poetry', 'run', 'python', 'examples/priority/worker.py', '--slots', '1'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError('Timed out waiting for runs to finish')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n 'test_run_id': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError('One or more runs failed or were cancelled')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get('priority') or 'low',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n '''Run start times should be in order of priority'''\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n '''Runs should proceed one at a time'''\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n '''Runs should finish after starting (this is mostly a test for engine datetime handling bugs)'''\n assert curr.finished_at >= curr.started_at\n", source: 'out/python/priority/test_priority.py', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts index 245a4ee68..20dbfdc2e 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/simple/index.ts @@ -1,5 +1,7 @@ +import test_simple_workflow from './test_simple_workflow'; import trigger from './trigger'; import worker from './worker'; +export { test_simple_workflow }; export { trigger }; export { worker }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts new file mode 100644 index 000000000..a8522ade3 --- /dev/null +++ b/frontend/app/src/next/lib/docs/generated/snips/python/simple/test_simple_workflow.ts @@ -0,0 +1,12 @@ +import { Snippet } from '@/next/lib/docs/generated/snips/types'; + +const snippet: Snippet = { + language: 'python', + content: + "import pytest\n\nfrom examples.simple.worker import simple, simple_durable\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.standalone import Standalone\n\n\n@pytest.mark.parametrize('task', [simple, simple_durable])\n@pytest.mark.asyncio(loop_scope='session')\nasync def test_simple_workflow_running_options(\n task: Standalone[EmptyModel, dict[str, str]]\n) -> None:\n x1 = task.run()\n x2 = await task.aio_run()\n\n x3 = task.run_many([task.create_bulk_run_item()])[0]\n x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0]\n\n x5 = task.run_no_wait().result()\n x6 = (await task.aio_run_no_wait()).result()\n x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0]\n x8 = [\n x.result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n x9 = await task.run_no_wait().aio_result()\n x10 = await (await task.aio_run_no_wait()).aio_result()\n x11 = [\n await x.aio_result()\n for x in task.run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n x12 = [\n await x.aio_result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n assert all(\n x == {'result': 'Hello, world!'}\n for x in [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12]\n )\n", + source: 'out/python/simple/test_simple_workflow.py', + blocks: {}, + highlights: {}, +}; // Then replace double quotes with single quotes + +export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts index 8b98f634d..8b60eb2b0 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/simple/trigger.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', - content: 'from examples.simple.worker import step1\n\nstep1.run()\n', + content: 'from examples.simple.worker import simple\n\nsimple.run()\n', source: 'out/python/simple/trigger.py', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts index 55f24a824..e653a836c 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/simple/worker.ts @@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task(name='SimpleWorkflow')\ndef step1(input: EmptyModel, ctx: Context) -> None:\n print('executed step1')\n\n\ndef main() -> None:\n worker = hatchet.worker('test-worker', slots=1, workflows=[step1])\n worker.start()\n\n\n\nif __name__ == '__main__':\n main()\n", + "# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef simple(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {'result': 'Hello, world!'}\n\n\n@hatchet.durable_task()\ndef simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {'result': 'Hello, world!'}\n\n\ndef main() -> None:\n worker = hatchet.worker('test-worker', workflows=[simple, simple_durable])\n worker.start()\n\n\n\nif __name__ == '__main__':\n main()\n", source: 'out/python/simple/worker.py', blocks: { simple: { start: 2, - stop: 17, + stop: 22, }, }, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts b/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts index 9dffd8cef..66c931ba4 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/python/worker.ts @@ -3,7 +3,7 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'python', content: - "from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n 'e2e-test-worker',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n lifespan_task,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", + "from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.events.worker import event_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.simple.worker import simple, simple_durable\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n 'e2e-test-worker',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n event_workflow,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n lifespan_task,\n simple,\n simple_durable,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == '__main__':\n main()\n", source: 'out/python/worker.py', blocks: {}, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts new file mode 100644 index 000000000..40e5dfcb2 --- /dev/null +++ b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.e2e.ts @@ -0,0 +1,12 @@ +import { Snippet } from '@/next/lib/docs/generated/snips/types'; + +const snippet: Snippet = { + language: 'typescript ', + content: + "import sleep from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/util/sleep';\nimport { randomUUID } from 'crypto';\nimport { Event } from '@hatchet-dev/typescript-sdk-dev/typescript-sdk/protoc/events';\nimport { SIMPLE_EVENT, lower, Input } from './workflow';\nimport { hatchet } from '../hatchet-client';\nimport { Worker } from '../../client/worker/worker';\n\nxdescribe('events-e2e', () => {\n let worker: Worker;\n let testRunId: string;\n\n beforeEach(async () => {\n testRunId = randomUUID();\n\n worker = await hatchet.worker('event-worker');\n await worker.registerWorkflow(lower);\n\n void worker.start();\n });\n\n afterAll(async () => {\n await worker.stop();\n await sleep(2000);\n });\n\n async function setupEventFilter(expression?: string, payload: Record = {}) {\n const finalExpression =\n expression || `input.ShouldSkip == false && payload.testRunId == '${testRunId}'`;\n\n const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id;\n\n const filter = await hatchet.filters.create({\n workflowId,\n expression: finalExpression,\n scope: testRunId,\n payload: { testRunId, ...payload },\n });\n\n return async () => {\n await hatchet.filters.delete(filter.metadata.id);\n };\n }\n\n // Helper function to wait for events to process and fetch runs\n async function waitForEventsToProcess(events: Event[]): Promise> {\n await sleep(3000);\n\n const persisted = (await hatchet.events.list({ limit: 100 })).rows || [];\n\n // Ensure all our events are persisted\n const eventIds = new Set(events.map((e) => e.eventId));\n const persistedIds = new Set(persisted.map((e) => e.metadata.id));\n expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy();\n\n let attempts = 0;\n const maxAttempts = 15;\n const eventToRuns: Record = {};\n\n while (true) {\n console.log('Waiting for event runs to complete...');\n if (attempts > maxAttempts) {\n console.log('Timed out waiting for event runs to complete.');\n return {};\n }\n\n attempts += 1;\n\n // For each event, fetch its runs\n const runsPromises = events.map(async (event) => {\n const runs = await hatchet.runs.list({\n triggeringEventExternalId: event.eventId,\n });\n\n // Extract metadata from event\n const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {};\n\n const payload = event.payload ? JSON.parse(event.payload) : {};\n\n return {\n event: {\n id: event.eventId,\n payload,\n meta,\n shouldHaveRuns: Boolean(meta.should_have_runs),\n testRunId: meta.test_run_id,\n },\n runs: runs.rows || [],\n };\n });\n\n const eventRuns = await Promise.all(runsPromises);\n\n // If all events have no runs yet, wait and retry\n if (eventRuns.every(({ runs }) => runs.length === 0)) {\n await sleep(1000);\n\n continue;\n }\n\n // Store runs by event ID\n for (const { event, runs } of eventRuns) {\n eventToRuns[event.id] = runs;\n }\n\n // Check if any runs are still in progress\n const anyInProgress = Object.values(eventToRuns).some((runs) =>\n runs.some((run) => run.status === 'QUEUED' || run.status === 'RUNNING')\n );\n\n if (anyInProgress) {\n await sleep(1000);\n\n continue;\n }\n\n break;\n }\n\n return eventToRuns;\n }\n\n // Helper to verify runs match expectations\n function verifyEventRuns(eventData: any, runs: any[]) {\n if (eventData.shouldHaveRuns) {\n expect(runs.length).toBeGreaterThan(0);\n } else {\n expect(runs.length).toBe(0);\n }\n }\n\n // Helper to create bulk push event objects\n function createBulkPushEvent({\n index = 1,\n ShouldSkip = false,\n shouldHaveRuns = true,\n key = SIMPLE_EVENT,\n payload = {},\n scope = null,\n }: {\n index?: number;\n ShouldSkip?: boolean;\n shouldHaveRuns?: boolean;\n key?: string;\n payload?: Record;\n scope?: string | null;\n }) {\n return {\n key,\n payload: {\n ShouldSkip,\n Message: `This is event ${index}`,\n ...payload,\n },\n additionalMetadata: {\n should_have_runs: shouldHaveRuns,\n test_run_id: testRunId,\n key,\n index,\n },\n scope: scope || undefined,\n };\n }\n\n // Helper to create payload object\n function createEventPayload(ShouldSkip: boolean): Input {\n return { ShouldSkip, Message: 'This is event 1' };\n }\n\n it('should push an event', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should push an event asynchronously', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it('should bulk push events', async () => {\n const events = [\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 1', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user123' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 2', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user456' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: 'This is event 3', ShouldSkip: false },\n additionalMetadata: { source: 'test', user_id: 'user789' },\n },\n ];\n\n const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events);\n\n expect(result.events.length).toBe(3);\n\n // Sort and verify namespacing\n const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key));\n const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key));\n\n sortedEvents.forEach((originalEvent, index) => {\n const returnedEvent = sortedResults[index];\n expect(returnedEvent.key).toBe(originalEvent.key);\n });\n }, 15000);\n\n it('should process events according to event engine behavior', async () => {\n const eventPromises = [\n createBulkPushEvent({}),\n createBulkPushEvent({\n key: 'thisisafakeeventfoobarbaz',\n shouldHaveRuns: false,\n }),\n ].map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n }, 30000);\n\n function generateBulkEvents() {\n return [\n createBulkPushEvent({\n index: 1,\n ShouldSkip: false,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 2,\n ShouldSkip: true,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 3,\n ShouldSkip: false,\n shouldHaveRuns: true,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 4,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 5,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n createBulkPushEvent({\n index: 6,\n ShouldSkip: false,\n shouldHaveRuns: false,\n scope: testRunId,\n key: 'thisisafakeeventfoobarbaz',\n }),\n ];\n }\n\n async function convertBulkToSingle(event: any) {\n return hatchet.events.push(event.key, event.payload, {\n scope: event.scope,\n additionalMetadata: event.additionalMetadata,\n priority: event.priority,\n });\n }\n\n it('should handle event skipping and filtering without bulk push', async () => {\n const cleanup = await setupEventFilter();\n\n try {\n const rawEvents = generateBulkEvents();\n const eventPromises = rawEvents.map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event's runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || '{}');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n } finally {\n await cleanup();\n }\n }, 30000);\n\n it('should filter events by payload expression not matching', async () => {\n const cleanup = await setupEventFilter('input.ShouldSkip == false && payload.foobar == 'baz'', {\n foobar: 'qux',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'false',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n expect(Object.keys(eventToRuns).length).toBe(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n\n it('should filter events by payload expression matching', async () => {\n const cleanup = await setupEventFilter('input.ShouldSkip == false && payload.foobar == 'baz'', {\n foobar: 'baz',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: 'This is event 1', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: 'true',\n test_run_id: testRunId,\n key: '1',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n const runs = Object.values(eventToRuns)[0] || [];\n expect(runs.length).toBeGreaterThan(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n});\n", + source: 'out/typescript/on_event/event.e2e.ts', + blocks: {}, + highlights: {}, +}; // Then replace double quotes with single quotes + +export default snippet; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts index ef9af985a..537993f6c 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/event.ts @@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'typescript ', content: - "import { hatchet } from '../hatchet-client';\nimport { Input } from './workflow';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push('simple-event:create', {\n Message: 'hello',\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n", + "import { hatchet } from '../hatchet-client';\nimport { Input } from './workflow';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push('simple-event:create', {\n Message: 'hello',\n ShouldSkip: false,\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n", source: 'out/typescript/on_event/event.ts', blocks: { pushing_an_event: { start: 6, - stop: 8, + stop: 9, }, }, highlights: {}, diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts index f3801bc52..060a81c93 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/index.ts @@ -1,7 +1,9 @@ +import evente2e from './event.e2e'; import event from './event'; import worker from './worker'; import workflow from './workflow'; +export { evente2e }; export { event }; export { worker }; export { workflow }; diff --git a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts index 7aef6781b..0e53e3e77 100644 --- a/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts +++ b/frontend/app/src/next/lib/docs/generated/snips/typescript/on_event/workflow.ts @@ -3,12 +3,12 @@ import { Snippet } from '@/next/lib/docs/generated/snips/types'; const snippet: Snippet = { language: 'typescript ', content: - "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n};\n\nexport const SIMPLE_EVENT = 'simple-event:create';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n});\n\nlower.task({\n name: 'lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: 'upper',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: 'upper',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n", + "import { hatchet } from '../hatchet-client';\n\nexport type Input = {\n Message: string;\n ShouldSkip: boolean;\n};\n\nexport const SIMPLE_EVENT = 'simple-event:create';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: 'lower',\n // 👀 Declare the event that will trigger the workflow\n onEvents: ['simple-event:create'],\n});\n\nlower.task({\n name: 'lower',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: 'upper',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: 'upper',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n", source: 'out/typescript/on_event/workflow.ts', blocks: { run_workflow_on_event: { - start: 16, - stop: 20, + start: 17, + stop: 21, }, }, highlights: {}, diff --git a/frontend/docs/lib/generated/snips/go/run/event.ts b/frontend/docs/lib/generated/snips/go/run/event.ts index a2287b548..b1017b391 100644 --- a/frontend/docs/lib/generated/snips/go/run/event.ts +++ b/frontend/docs/lib/generated/snips/go/run/event.ts @@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\n\tv1_workflows \'github.com/hatchet-dev/hatchet/examples/go/workflows\'\n\tv1 \'github.com/hatchet-dev/hatchet/pkg/v1\'\n\t\'github.com/joho/godotenv\'\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t\'simple-event:create\',\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: \'Hello, World!\',\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\n\tv1_workflows \'github.com/hatchet-dev/hatchet/examples/go/workflows\'\n\tv1 \'github.com/hatchet-dev/hatchet/pkg/v1\'\n\t\'github.com/joho/godotenv\'\n)\n\nfunc event() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\thatchet, err := v1.NewHatchetClient()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\t// > Pushing an Event\n\terr = hatchet.Events().Push(\n\t\tcontext.Background(),\n\t\t\'simple-event:create\',\n\t\tv1_workflows.SimpleInput{\n\t\t\tMessage: \'Hello, World!\',\n\t\t},\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n}\n', 'source': 'out/go/run/event.go', 'blocks': { 'pushing_an_event': { 'start': 23, - 'stop': 29 + 'stop': 31 } }, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts index 472de9213..1fa29574a 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/assignment-affinity/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t\'model\': \'fancy-ai-model-v2\',\n\t\t\t\'memory\': 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:affinity\'),\n\t\t\tName: \'affinity\',\n\t\t\tDescription: \'affinity\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()[\'model\']\n\n\t\t\t\t\tif model != \'fancy-ai-model-v3\' {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\'model\': nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\'model\': \'fancy-ai-model-v3\',\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName(\'step-one\').\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t\'model\': {\n\t\t\t\t\t\t\tValue: \'fancy-ai-model-v3\',\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\'memory\': {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:affinity\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithLabels(map[string]interface{}{\n\t\t\t\'model\': \'fancy-ai-model-v2\',\n\t\t\t\'memory\': 1024,\n\t\t}),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:affinity\'),\n\t\t\tName: \'affinity\',\n\t\t\tDescription: \'affinity\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tmodel := ctx.Worker().GetLabels()[\'model\']\n\n\t\t\t\t\tif model != \'fancy-ai-model-v3\' {\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\'model\': nil,\n\t\t\t\t\t\t})\n\t\t\t\t\t\t// Do something to load the model\n\t\t\t\t\t\tctx.Worker().UpsertLabels(map[string]interface{}{\n\t\t\t\t\t\t\t\'model\': \'fancy-ai-model-v3\',\n\t\t\t\t\t\t})\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).\n\t\t\t\t\tSetName(\'step-one\').\n\t\t\t\t\tSetDesiredLabels(map[string]*types.DesiredWorkerLabel{\n\t\t\t\t\t\t\'model\': {\n\t\t\t\t\t\t\tValue: \'fancy-ai-model-v3\',\n\t\t\t\t\t\t\tWeight: 10,\n\t\t\t\t\t\t},\n\t\t\t\t\t\t\'memory\': {\n\t\t\t\t\t\t\tValue: 512,\n\t\t\t\t\t\t\tRequired: true,\n\t\t\t\t\t\t\tComparator: types.ComparatorPtr(types.WorkerLabelComparator_GREATER_THAN),\n\t\t\t\t\t\t},\n\t\t\t\t\t}),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:affinity\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/assignment-affinity/run.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts index 317090687..5add613bd 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/assignment-sticky/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:sticky\'),\n\t\t\tName: \'sticky\',\n\t\t\tDescription: \'sticky\',\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow(\'sticky-child\', nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\'error spawning workflow: %w\', err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-three\').AddParents(\'step-two\'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: \'sticky-child\',\n\t\t\tDescription: \'sticky\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:sticky\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run() (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\t// > StickyWorker\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:sticky\'),\n\t\t\tName: \'sticky\',\n\t\t\tDescription: \'sticky\',\n\t\t\t// 👀 Specify a sticky strategy when declaring the workflow\n\t\t\tStickyStrategy: types.StickyStrategyPtr(types.StickyStrategy_HARD),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\n\t\t\t\t\tsticky := true\n\n\t\t\t\t\t_, err = ctx.SpawnWorkflow(\'sticky-child\', nil, &worker.SpawnWorkflowOpts{\n\t\t\t\t\t\tSticky: &sticky,\n\t\t\t\t\t})\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\'error spawning workflow: %w\', err)\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-three\').AddParents(\'step-two\'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\t// > StickyChild\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.NoTrigger(),\n\t\t\tName: \'sticky-child\',\n\t\t\tDescription: \'sticky\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: ctx.Worker().ID(),\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:sticky\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/assignment-sticky/run.go', 'blocks': { 'stickyworker': { diff --git a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts index c051ab2c3..3be4f6e55 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/cancellation/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/google/uuid\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/rest\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:cancellation\'),\n\t\t\tName: \'cancellation\',\n\t\t\tDescription: \'cancellation\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- \'done\'\n\t\t\t\t\t\tlog.Printf(\'context cancelled\')\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf(\'workflow never cancelled\')\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: \'done\',\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:cancellation\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := \'cancellation\'\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error listing workflows: %w\', err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\'no workflows found\'))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf(\'no workflows found\'))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error listing workflow runs: %w\', err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\'no workflow runs found\'))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error cancelling workflow run: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/google/uuid\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/rest\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:cancellation\'),\n\t\t\tName: \'cancellation\',\n\t\t\tDescription: \'cancellation\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tselect {\n\t\t\t\t\tcase <-ctx.Done():\n\t\t\t\t\t\tevents <- \'done\'\n\t\t\t\t\t\tlog.Printf(\'context cancelled\')\n\t\t\t\t\t\treturn nil, nil\n\t\t\t\t\tcase <-time.After(30 * time.Second):\n\t\t\t\t\t\tlog.Printf(\'workflow never cancelled\')\n\t\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\t\tMessage: \'done\',\n\t\t\t\t\t\t}, nil\n\t\t\t\t\t}\n\t\t\t\t}).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:cancellation\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(10 * time.Second)\n\n\t\tworkflowName := \'cancellation\'\n\n\t\tworkflows, err := c.API().WorkflowListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowListParams{\n\t\t\tName: &workflowName,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error listing workflows: %w\', err))\n\t\t}\n\n\t\tif workflows.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\'no workflows found\'))\n\t\t}\n\n\t\trows := *workflows.JSON200.Rows\n\n\t\tif len(rows) == 0 {\n\t\t\tpanic(fmt.Errorf(\'no workflows found\'))\n\t\t}\n\n\t\tworkflowId := uuid.MustParse(rows[0].Metadata.Id)\n\n\t\tworkflowRuns, err := c.API().WorkflowRunListWithResponse(context.Background(), uuid.MustParse(c.TenantId()), &rest.WorkflowRunListParams{\n\t\t\tWorkflowId: &workflowId,\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error listing workflow runs: %w\', err))\n\t\t}\n\n\t\tif workflowRuns.JSON200 == nil {\n\t\t\tpanic(fmt.Errorf(\'no workflow runs found\'))\n\t\t}\n\n\t\tworkflowRunsRows := *workflowRuns.JSON200.Rows\n\n\t\t_, err = c.API().WorkflowRunCancelWithResponse(context.Background(), uuid.MustParse(c.TenantId()), rest.WorkflowRunsCancelRequest{\n\t\t\tWorkflowRunIds: []uuid.UUID{uuid.MustParse(workflowRunsRows[0].Metadata.Id)},\n\t\t})\n\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error cancelling workflow run: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/cancellation/run.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts index 8ea621316..9c56a19a1 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/compute/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/compute\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\tpool := \'test-pool\'\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region(\'ewr\')},\n\t}\n\n\tperformancePool := \'performance-pool\'\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region(\'ewr\')},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\').SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\').SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/compute\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\tpool := \'test-pool\'\n\tbasicCompute := compute.Compute{\n\t\tPool: &pool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 1,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindSharedCPU,\n\t\tRegions: []compute.Region{compute.Region(\'ewr\')},\n\t}\n\n\tperformancePool := \'performance-pool\'\n\tperformanceCompute := compute.Compute{\n\t\tPool: &performancePool,\n\t\tNumReplicas: 1,\n\t\tCPUs: 2,\n\t\tMemoryMB: 1024,\n\t\tCPUKind: compute.ComputeKindPerformanceCPU,\n\t\tRegions: []compute.Region{compute.Region(\'ewr\')},\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\').SetCompute(&basicCompute),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\').SetCompute(&performanceCompute),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/compute/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts index 92dd209ca..1bb80ecbc 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/dag/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'post-user-update\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 1 got username: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 2 got username: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-one\', step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-two\', step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Username was: \' + input.Username + \', Step 3: has parents 1 and 2\' + step1Out.Message + \', \' + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-three\').AddParents(\'step-one\', \'step-two\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-one\', step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-three\', step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 4: has parents 1 and 3\' + step1Out.Message + \', \' + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-four\').AddParents(\'step-one\', \'step-three\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-four\', step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 5: has parent 4\' + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-five\').AddParents(\'step-four\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\tlog.Printf(\'pushing event user:create:simple\')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create:simple\',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'post-user-update\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(1 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 1 got username: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\ttime.Sleep(2 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 2 got username: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\t\t\t\t\tctx.WorkflowInput(input)\n\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-one\', step1Out)\n\n\t\t\t\t\tstep2Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-two\', step2Out)\n\n\t\t\t\t\ttime.Sleep(3 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Username was: \' + input.Username + \', Step 3: has parents 1 and 2\' + step1Out.Message + \', \' + step2Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-three\').AddParents(\'step-one\', \'step-two\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep1Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-one\', step1Out)\n\n\t\t\t\t\tstep3Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-three\', step3Out)\n\n\t\t\t\t\ttime.Sleep(4 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 4: has parents 1 and 3\' + step1Out.Message + \', \' + step3Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-four\').AddParents(\'step-one\', \'step-three\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOutput, err error) {\n\t\t\t\t\tstep4Out := &stepOutput{}\n\t\t\t\t\tctx.StepOutput(\'step-four\', step4Out)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\treturn &stepOutput{\n\t\t\t\t\t\tMessage: \'Step 5: has parent 4\' + step4Out.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-five\').AddParents(\'step-four\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\tlog.Printf(\'pushing event user:create:simple\')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create:simple\',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn cleanup()\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/dag/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts index ad361ffa0..b8eee7251 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/requeue/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\'requeue:requeue\', func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'example:event\',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype sampleEvent struct{}\n\ntype requeueInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\'requeue:requeue\', func(ctx context.Context, input *requeueInput) (result any, err error) {\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'example:event\',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// wait to register the worker for 10 seconds, to let the requeuer kick in\n\ttime.Sleep(10 * time.Second)\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/deprecated/requeue/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts index 8cb0a8dd3..27a3c3dfa 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/schedule-timeout/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/joho/godotenv\'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println(\'step should have timed out\')\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/joho/godotenv\'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttime.Sleep(35 * time.Second)\n\n\tfmt.Println(\'step should have timed out\')\n}\n', 'source': 'out/go/z_v0/deprecated/schedule-timeout/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts index 81a78dd38..85191a280 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/timeout/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\'timeout:timeout\', func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println(\'context cancelled in \', time.Since(timeStart).Seconds(), \' seconds\')\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\tevent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype sampleEvent struct{}\n\ntype timeoutInput struct{}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = worker.RegisterAction(\'timeout:timeout\', func(ctx context.Context, input *timeoutInput) (result any, err error) {\n\t\t// wait for context done signal\n\t\ttimeStart := time.Now().UTC()\n\t\t<-ctx.Done()\n\t\tfmt.Println(\'context cancelled in \', time.Since(timeStart).Seconds(), \' seconds\')\n\n\t\treturn map[string]interface{}{}, nil\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(cmdutils.InterruptChan())\n\tdefer cancel()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\tevent := sampleEvent{}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\tevent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/deprecated/timeout/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts index 529411e78..8e2fa7a95 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/deprecated/yaml/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype actionInput struct {\n\tMessage string `json:\'message\'`\n}\n\ntype actionOut struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService(\'echo\')\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up worker: %w\', err))\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype actionInput struct {\n\tMessage string `json:\'message\'`\n}\n\ntype actionOut struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc echo(ctx context.Context, input *actionInput) (result *actionOut, err error) {\n\treturn &actionOut{\n\t\tMessage: input.Message,\n\t}, nil\n}\n\nfunc object(ctx context.Context, input *userCreateEvent) error {\n\treturn nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New(\n\t\tclient.InitWorkflows(),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tworker, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\techoSvc := worker.NewService(\'echo\')\n\n\terr = echoSvc.RegisterAction(echo)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = echoSvc.RegisterAction(object)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := worker.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\ttime.Sleep(1 * time.Second)\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up worker: %w\', err))\n\t}\n}\n', 'source': 'out/go/z_v0/deprecated/yaml/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts index 73f7bcb88..0b2d1cde5 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/errors-test/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'os\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/errors/sentry\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf(\'this is an error\')\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv(\'SENTRY_DSN\'),\n\t\tEnvironment: os.Getenv(\'SENTRY_ENVIRONMENT\'),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event(\'user:create\'), &worker.WorkflowJob{\n\t\tName: \'failing-workflow\',\n\t\tDescription: \'This is a failing workflow.\',\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\'echo:echo\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\'message\': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\'echo:object\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'os\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/errors/sentry\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc StepOne(ctx context.Context) (result *stepOneOutput, err error) {\n\treturn nil, fmt.Errorf(\'this is an error\')\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tsentryAlerter, err := sentry.NewSentryAlerter(&sentry.SentryAlerterOpts{\n\t\tDSN: os.Getenv(\'SENTRY_DSN\'),\n\t\tEnvironment: os.Getenv(\'SENTRY_ENVIRONMENT\'),\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t\tworker.WithErrorAlerter(sentryAlerter),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = w.On(worker.Event(\'user:create\'), &worker.WorkflowJob{\n\t\tName: \'failing-workflow\',\n\t\tDescription: \'This is a failing workflow.\',\n\t\tSteps: []*worker.WorkflowStep{\n\t\t\t{\n\t\t\t\tFunction: StepOne,\n\t\t\t},\n\t\t},\n\t})\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\'echo:echo\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\'message\': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\'echo:object\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tch := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(fmt.Errorf(\'error starting worker: %w\', err))\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n', 'source': 'out/go/z_v0/errors-test/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts index 7c07e668b..9dca443e8 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/cancel-in-progress/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:\'index\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'concurrency-test-event\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'concurrency-limit\',\n\t\t\tDescription: \'This limits concurrency to 1 run at a time.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println(\'context done, returning\')\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'concurrency-test-event\',\n\t\t\tfirstEvent,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'concurrency-test-event\',\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype concurrencyLimitEvent struct {\n\tIndex int `json:\'index\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'concurrency-test-event\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'concurrency-limit\',\n\t\t\tDescription: \'This limits concurrency to 1 run at a time.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(1),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\t<-ctx.Done()\n\t\t\t\t\tfmt.Println(\'context done, returning\')\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfirstEvent := concurrencyLimitEvent{\n\t\t\tIndex: 0,\n\t\t}\n\n\t\t// push an event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'concurrency-test-event\',\n\t\t\tfirstEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): // timeout\n\t\t}\n\n\t\t// push a second event\n\t\terr = c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'concurrency-test-event\',\n\t\t\tconcurrencyLimitEvent{\n\t\t\t\tIndex: 1,\n\t\t\t},\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/limit-concurrency/cancel-in-progress/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts index ac80c01cf..aff2a46ac 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/limit-concurrency/group-round-robin/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:\'user_id\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn \'\', fmt.Errorf(\'error getting input: %w\', err)\n\t}\n\n\treturn fmt.Sprintf(\'%d\', input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'concurrency-test-event-rr\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'concurrency-limit-round-robin\',\n\t\t\tDescription: \'This limits concurrency to 2 runs at a time.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\'error getting input: %w\', err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println(\'received event\', input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println(\'processed event\', input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(context.Background(), \'concurrency-test-event-rr\', event)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf(\'error cleaning up: %w\', err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/client/types\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype concurrencyLimitEvent struct {\n\tUserId int `json:\'user_id\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\tinput := &concurrencyLimitEvent{}\n\terr := ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn \'\', fmt.Errorf(\'error getting input: %w\', err)\n\t}\n\n\treturn fmt.Sprintf(\'%d\', input.UserId), nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'concurrency-test-event-rr\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'concurrency-limit-round-robin\',\n\t\t\tDescription: \'This limits concurrency to 2 runs at a time.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey).MaxRuns(2).LimitStrategy(types.GroupRoundRobin),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &concurrencyLimitEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\'error getting input: %w\', err)\n\t\t\t\t\t}\n\n\t\t\t\t\tfmt.Println(\'received event\', input.UserId)\n\n\t\t\t\t\ttime.Sleep(5 * time.Second)\n\n\t\t\t\t\tfmt.Println(\'processed event\', input.UserId)\n\n\t\t\t\t\treturn nil, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tinterruptCtx, cancel := cmdutils.InterruptContextFromChan(ch)\n\tdefer cancel()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\tgo func() {\n\t\t// sleep with interrupt context\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(2 * time.Second): // timeout\n\t\t}\n\n\t\tfor i := 0; i < 20; i++ {\n\t\t\tvar event concurrencyLimitEvent\n\n\t\t\tif i < 10 {\n\t\t\t\tevent = concurrencyLimitEvent{0}\n\t\t\t} else {\n\t\t\t\tevent = concurrencyLimitEvent{1}\n\t\t\t}\n\n\t\t\tc.Event().Push(\n\t\t\t\tcontext.Background(),\n\t\t\t\t\'concurrency-test-event-rr\',\n\t\t\t\tevent,\n\t\t\t\tnil,\n\t\t\t\tnil,\n\t\t\t)\n\t\t}\n\n\t\tselect {\n\t\tcase <-interruptCtx.Done(): // context cancelled\n\t\t\tfmt.Println(\'interrupted\')\n\t\t\treturn\n\t\tcase <-time.After(10 * time.Second): //timeout\n\t\t}\n\t}()\n\n\tfor {\n\t\tselect {\n\t\tcase <-interruptCtx.Done():\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\treturn fmt.Errorf(\'error cleaning up: %w\', err)\n\t\t\t}\n\t\t\treturn nil\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/limit-concurrency/group-round-robin/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts index 23b15aeb0..05fb357ce 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/logging/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:log:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf(\'step-one: %d\', i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:log:simple\',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:log:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\tfor i := 0; i < 1000; i++ {\n\t\t\t\t\t\tctx.Log(fmt.Sprintf(\'step-one: %d\', i))\n\t\t\t\t\t}\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:log:simple\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/logging/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts index 5770716c2..eaa89c4d0 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/middleware/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\'1st-middleware\')\n\t\tevents <- \'1st-middleware\'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'testkey\', \'testvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\'2nd-middleware\')\n\t\tevents <- \'2nd-middleware\'\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf(\'step function took %s\\n\', duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService(\'test\')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- \'svc-middleware\'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'svckey\', \'svcvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:middleware\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'middleware\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\ttestVal := ctx.Value(\'testkey\').(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value(\'svckey\').(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event user:create:middleware\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:middleware\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\'1st-middleware\')\n\t\tevents <- \'1st-middleware\'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'testkey\', \'testvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\tw.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tlog.Printf(\'2nd-middleware\')\n\t\tevents <- \'2nd-middleware\'\n\n\t\t// time the function duration\n\t\tstart := time.Now()\n\t\terr := next(ctx)\n\t\tduration := time.Since(start)\n\t\tfmt.Printf(\'step function took %s\\n\', duration)\n\t\treturn err\n\t})\n\n\ttestSvc := w.NewService(\'test\')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tevents <- \'svc-middleware\'\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'svckey\', \'svcvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:middleware\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'middleware\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\ttestVal := ctx.Value(\'testkey\').(string)\n\t\t\t\t\tevents <- testVal\n\t\t\t\t\tsvcVal := ctx.Value(\'svckey\').(string)\n\t\t\t\t\tevents <- svcVal\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event user:create:middleware\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:middleware\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/middleware/run.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts index 7d86d2db5..8fd39d0bd 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/namespaced/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace(\'sample\'),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New(\n\t\tclient.WithNamespace(\'sample\'),\n\t)\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/namespaced/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts index adc2bf1d4..4436e1e06 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/register-action/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value(\'testkey\').(string)\n\t// svcVal := ctx.Value(\'svckey\').(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: \'Username is: \' + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: \'Above message is: \' + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'testkey\', \'testvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName(\'step-one\'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName(\'step-two\'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create\', \'user:update\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'post-user-update\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call(\'test:step-one\'),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call(\'step-two\'),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\'echo:echo\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\'message\': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\'echo:object\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'time\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserId string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc StepOne(ctx context.Context, input *userCreateEvent) (result *stepOneOutput, err error) {\n\t// could get from context\n\t// testVal := ctx.Value(\'testkey\').(string)\n\t// svcVal := ctx.Value(\'svckey\').(string)\n\n\treturn &stepOneOutput{\n\t\tMessage: \'Username is: \' + input.Username,\n\t}, nil\n}\n\nfunc StepTwo(ctx context.Context, input *stepOneOutput) (result *stepOneOutput, err error) {\n\treturn &stepOneOutput{\n\t\tMessage: \'Above message is: \' + input.Message,\n\t}, nil\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tclient, err := client.New()\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tclient,\n\t\t),\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\ttestSvc.Use(func(ctx worker.HatchetContext, next func(worker.HatchetContext) error) error {\n\t\tctx.SetContext(context.WithValue(ctx.GetContext(), \'testkey\', \'testvalue\'))\n\t\treturn next(ctx)\n\t})\n\n\terr = testSvc.RegisterAction(StepOne, worker.WithActionName(\'step-one\'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.RegisterAction(StepTwo, worker.WithActionName(\'step-two\'))\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create\', \'user:update\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'post-user-update\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\t// example of calling a registered action from the worker (includes service name)\n\t\t\t\tw.Call(\'test:step-one\'),\n\t\t\t\t// example of calling a registered action from a service\n\t\t\t\ttestSvc.Call(\'step-two\'),\n\t\t\t},\n\t\t},\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t// err = worker.RegisterAction(\'echo:echo\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn map[string]interface{}{\n\t// \t\t\'message\': input.Message,\n\t// \t}, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\t// err = worker.RegisterAction(\'echo:object\', func(ctx context.Context, input *actionInput) (result any, err error) {\n\t// \treturn nil, nil\n\t// })\n\n\t// if err != nil {\n\t// \tpanic(err)\n\t// }\n\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserId: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr = client.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create\',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tfor {\n\t\tselect {\n\t\tcase <-interrupt:\n\t\t\tif err := cleanup(); err != nil {\n\t\t\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t\t\t}\n\t\tdefault:\n\t\t\ttime.Sleep(time.Second)\n\t\t}\n\t}\n}\n', 'source': 'out/go/z_v0/register-action/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts index 44a4c7fb7..307c164c6 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/retries/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf(\'error\')\n\t}\n\n\tlog.Printf(\'finished step-one\')\n\treturn &stepOneOutput{\n\t\tMessage: \'Username is: \' + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName(\'step-one\').SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\tlog.Printf(\'pushing event user:create:simple\')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create:simple\',\n\t\ttestEvent,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf(\'error cleaning up worker: %w\', err)\n\t}\n\n\treturn nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tif err := run(cmdutils.InterruptChan(), events); err != nil {\n\t\tpanic(err)\n\t}\n}\n\nfunc getConcurrencyKey(ctx worker.HatchetContext) (string, error) {\n\treturn \'user-create\', nil\n}\n\ntype retryWorkflow struct {\n\tretries int\n}\n\nfunc (r *retryWorkflow) StepOne(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\tinput := &userCreateEvent{}\n\n\terr = ctx.WorkflowInput(input)\n\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tif r.retries < 2 {\n\t\tr.retries++\n\t\treturn nil, fmt.Errorf(\'error\')\n\t}\n\n\tlog.Printf(\'finished step-one\')\n\treturn &stepOneOutput{\n\t\tMessage: \'Username is: \' + input.Username,\n\t}, nil\n}\n\nfunc run(ch <-chan interface{}, events chan<- string) error {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t\tworker.WithMaxRuns(1),\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\ttestSvc := w.NewService(\'test\')\n\n\twk := &retryWorkflow{}\n\n\terr = testSvc.On(\n\t\tworker.Events(\'user:create:simple\'),\n\t\t&worker.WorkflowJob{\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Concurrency(getConcurrencyKey),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(wk.StepOne).SetName(\'step-one\').SetRetries(4),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\tlog.Printf(\'pushing event user:create:simple\')\n\n\t// push an event\n\terr = c.Event().Push(\n\t\tcontext.Background(),\n\t\t\'user:create:simple\',\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\t<-ch\n\n\tif err := cleanup(); err != nil {\n\t\treturn fmt.Errorf(\'error cleaning up worker: %w\', err)\n\t}\n\n\treturn nil\n}\n', 'source': 'out/go/z_v0/retries/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts b/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts index 91371f607..33baf0e75 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/simple/main.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\n\t\'github.com/joho/godotenv\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/cmdutils\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\ntype userCreateEvent struct {\n\tUsername string `json:\'username\'`\n\tUserID string `json:\'user_id\'`\n\tData map[string]string `json:\'data\'`\n}\n\ntype stepOneOutput struct {\n\tMessage string `json:\'message\'`\n}\n\nfunc main() {\n\terr := godotenv.Load()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\tevents := make(chan string, 50)\n\tinterrupt := cmdutils.InterruptChan()\n\n\tcleanup, err := run(events)\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\t<-interrupt\n\n\tif err := cleanup(); err != nil {\n\t\tpanic(fmt.Errorf(\'error cleaning up: %w\', err))\n\t}\n}\n\nfunc run(events chan<- string) (func() error, error) {\n\tc, err := client.New()\n\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.RegisterWorkflow(\n\t\t&worker.WorkflowJob{\n\t\t\tOn: worker.Events(\'user:create:simple\'),\n\t\t\tName: \'simple\',\n\t\t\tDescription: \'This runs after an update to the user model.\',\n\t\t\tConcurrency: worker.Expression(\'input.user_id\'),\n\t\t\tSteps: []*worker.WorkflowStep{\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &userCreateEvent{}\n\n\t\t\t\t\terr = ctx.WorkflowInput(input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-one\')\n\t\t\t\t\tevents <- \'step-one\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Username is: \' + input.Username,\n\t\t\t\t\t}, nil\n\t\t\t\t},\n\t\t\t\t).SetName(\'step-one\'),\n\t\t\t\tworker.Fn(func(ctx worker.HatchetContext) (result *stepOneOutput, err error) {\n\t\t\t\t\tinput := &stepOneOutput{}\n\t\t\t\t\terr = ctx.StepOutput(\'step-one\', input)\n\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, err\n\t\t\t\t\t}\n\n\t\t\t\t\tlog.Printf(\'step-two\')\n\t\t\t\t\tevents <- \'step-two\'\n\n\t\t\t\t\treturn &stepOneOutput{\n\t\t\t\t\t\tMessage: \'Above message is: \' + input.Message,\n\t\t\t\t\t}, nil\n\t\t\t\t}).SetName(\'step-two\').AddParents(\'step-one\'),\n\t\t\t},\n\t\t},\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\tlog.Printf(\'pushing event user:create:simple\')\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:simple\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t\tclient.WithEventMetadata(map[string]string{\n\t\t\t\t\'hello\': \'world\',\n\t\t\t}),\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\tpanic(err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/simple/main.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts index 60cfe661a..5b702bc39 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/timeout/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events(\'user:create:timeout\'),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:timeout\',\n\t\t\ttestEvent,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- \'done\'\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'fmt\'\n\t\'log\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(done chan<- string, job worker.WorkflowJob) (func() error, error) {\n\tc, err := client.New()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating client: %w\', err)\n\t}\n\n\tw, err := worker.NewWorker(\n\t\tworker.WithClient(\n\t\t\tc,\n\t\t),\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error creating worker: %w\', err)\n\t}\n\n\terr = w.On(\n\t\tworker.Events(\'user:create:timeout\'),\n\t\t&job,\n\t)\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error registering workflow: %w\', err)\n\t}\n\n\tgo func() {\n\t\tlog.Printf(\'pushing event\')\n\n\t\ttestEvent := userCreateEvent{\n\t\t\tUsername: \'echo-test\',\n\t\t\tUserID: \'1234\',\n\t\t\tData: map[string]string{\n\t\t\t\t\'test\': \'test\',\n\t\t\t},\n\t\t}\n\n\t\t// push an event\n\t\terr := c.Event().Push(\n\t\t\tcontext.Background(),\n\t\t\t\'user:create:timeout\',\n\t\t\ttestEvent,\n\t\t\tnil,\n\t\t\tnil,\n\t\t)\n\t\tif err != nil {\n\t\t\tpanic(fmt.Errorf(\'error pushing event: %w\', err))\n\t\t}\n\n\t\ttime.Sleep(20 * time.Second)\n\n\t\tdone <- \'done\'\n\t}()\n\n\tcleanup, err := w.Start()\n\tif err != nil {\n\t\treturn nil, fmt.Errorf(\'error starting worker: %w\', err)\n\t}\n\n\treturn cleanup, nil\n}\n', 'source': 'out/go/z_v0/timeout/run.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts b/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts index 2a93d4a6b..768e6ab2d 100644 --- a/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts +++ b/frontend/docs/lib/generated/snips/go/z_v0/webhook/run.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'go', - 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'errors\'\n\t\'fmt\'\n\t\'log\'\n\t\'net/http\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc(\'/webhook\', handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: \':\' + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := \'secret\'\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: \'test-\' + name,\n\t\tURL: fmt.Sprintf(\'http://localhost:%s/webhook\', port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf(\'error setting up webhook: %w\', err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf(\'pushing event\')\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n', + 'content': 'package main\n\nimport (\n\t\'context\'\n\t\'errors\'\n\t\'fmt\'\n\t\'log\'\n\t\'net/http\'\n\t\'time\'\n\n\t\'github.com/hatchet-dev/hatchet/pkg/client\'\n\t\'github.com/hatchet-dev/hatchet/pkg/worker\'\n)\n\nfunc run(\n\tname string,\n\tw *worker.Worker,\n\tport string,\n\thandler func(w http.ResponseWriter, r *http.Request), c client.Client, workflow string, event string,\n) error {\n\t// create webserver to handle webhook requests\n\tmux := http.NewServeMux()\n\n\t// Register the HelloHandler to the /hello route\n\tmux.HandleFunc(\'/webhook\', handler)\n\n\t// Create a custom server\n\tserver := &http.Server{\n\t\tAddr: \':\' + port,\n\t\tHandler: mux,\n\t\tReadTimeout: 10 * time.Second,\n\t\tWriteTimeout: 10 * time.Second,\n\t\tIdleTimeout: 15 * time.Second,\n\t}\n\n\tdefer func(server *http.Server, ctx context.Context) {\n\t\terr := server.Shutdown(ctx)\n\t\tif err != nil {\n\t\t\tpanic(err)\n\t\t}\n\t}(server, context.Background())\n\n\tgo func() {\n\t\tif err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {\n\t\t\tpanic(err)\n\t\t}\n\t}()\n\n\tsecret := \'secret\'\n\tif err := w.RegisterWebhook(worker.RegisterWebhookWorkerOpts{\n\t\tName: \'test-\' + name,\n\t\tURL: fmt.Sprintf(\'http://localhost:%s/webhook\', port),\n\t\tSecret: &secret,\n\t}); err != nil {\n\t\treturn fmt.Errorf(\'error setting up webhook: %w\', err)\n\t}\n\n\ttime.Sleep(30 * time.Second)\n\n\tlog.Printf(\'pushing event\')\n\n\ttestEvent := userCreateEvent{\n\t\tUsername: \'echo-test\',\n\t\tUserID: \'1234\',\n\t\tData: map[string]string{\n\t\t\t\'test\': \'test\',\n\t\t},\n\t}\n\n\t// push an event\n\terr := c.Event().Push(\n\t\tcontext.Background(),\n\t\tevent,\n\t\ttestEvent,\n\t\tnil,\n\t\tnil,\n\t)\n\tif err != nil {\n\t\treturn fmt.Errorf(\'error pushing event: %w\', err)\n\t}\n\n\ttime.Sleep(5 * time.Second)\n\n\treturn nil\n}\n', 'source': 'out/go/z_v0/webhook/run.go', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/dag/worker.ts b/frontend/docs/lib/generated/snips/python/dag/worker.ts index dc84bb3f8..cf8892420 100644 --- a/frontend/docs/lib/generated/snips/python/dag/worker.ts +++ b/frontend/docs/lib/generated/snips/python/dag/worker.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name=\'DAGWorkflow\')\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = (await ctx.task_output(step2)).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n \'executed step4\',\n time.strftime(\'%H:%M:%S\', time.localtime()),\n input,\n ctx.task_output(step1),\n await ctx.task_output(step3),\n )\n return {\n \'step4\': \'step4\',\n }\n\n\ndef main() -> None:\n worker = hatchet.worker(\'dag-worker\', workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n', + 'content': 'import random\nimport time\nfrom datetime import timedelta\n\nfrom pydantic import BaseModel\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\n\nclass StepOutput(BaseModel):\n random_number: int\n\n\nclass RandomSum(BaseModel):\n sum: int\n\n\nhatchet = Hatchet(debug=True)\n\ndag_workflow = hatchet.workflow(name=\'DAGWorkflow\')\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\ndef step1(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(execution_timeout=timedelta(seconds=5))\nasync def step2(input: EmptyModel, ctx: Context) -> StepOutput:\n return StepOutput(random_number=random.randint(1, 100))\n\n\n@dag_workflow.task(parents=[step1, step2])\nasync def step3(input: EmptyModel, ctx: Context) -> RandomSum:\n one = ctx.task_output(step1).random_number\n two = ctx.task_output(step2).random_number\n\n return RandomSum(sum=one + two)\n\n\n@dag_workflow.task(parents=[step1, step3])\nasync def step4(input: EmptyModel, ctx: Context) -> dict[str, str]:\n print(\n \'executed step4\',\n time.strftime(\'%H:%M:%S\', time.localtime()),\n input,\n ctx.task_output(step1),\n ctx.task_output(step3),\n )\n return {\n \'step4\': \'step4\',\n }\n\n\ndef main() -> None:\n worker = hatchet.worker(\'dag-worker\', workflows=[dag_workflow])\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n', 'source': 'out/python/dag/worker.py', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/events/test_event.ts b/frontend/docs/lib/generated/snips/python/events/test_event.ts index ef50c7926..6f46a186b 100644 --- a/frontend/docs/lib/generated/snips/python/events/test_event.ts +++ b/frontend/docs/lib/generated/snips/python/events/test_event.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'import pytest\n\nfrom hatchet_sdk.clients.events import BulkPushEventOptions, BulkPushEventWithMetadata\nfrom hatchet_sdk.hatchet import Hatchet\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push(\'user:create\', {\'test\': \'test\'})\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push(\'user:create\', {\'test\': \'test\'})\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n\n events = [\n BulkPushEventWithMetadata(\n key=\'event1\',\n payload={\'message\': \'This is event 1\'},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user123\'},\n ),\n BulkPushEventWithMetadata(\n key=\'event2\',\n payload={\'message\': \'This is event 2\'},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user456\'},\n ),\n BulkPushEventWithMetadata(\n key=\'event3\',\n payload={\'message\': \'This is event 3\'},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user789\'},\n ),\n ]\n opts = BulkPushEventOptions(namespace=\'bulk-test\')\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = \'bulk-test\'\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(sorted_events, sorted_returned_events):\n assert returned_event.key == namespace + original_event.key\n', + 'content': 'import asyncio\nimport json\nfrom contextlib import asynccontextmanager\nfrom typing import AsyncGenerator, cast\nfrom uuid import uuid4\n\nimport pytest\nfrom pydantic import BaseModel\n\nfrom examples.events.worker import EventWorkflowInput, event_workflow\nfrom hatchet_sdk.clients.events import (\n BulkPushEventOptions,\n BulkPushEventWithMetadata,\n PushEventOptions,\n)\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\nfrom hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary\nfrom hatchet_sdk.contracts.events_pb2 import Event\nfrom hatchet_sdk.hatchet import Hatchet\n\n\nclass ProcessedEvent(BaseModel):\n id: str\n payload: dict[str, str | bool]\n meta: dict[str, str | bool | int]\n should_have_runs: bool\n test_run_id: str\n\n def __hash__(self) -> int:\n return hash(self.model_dump_json())\n\n\n@asynccontextmanager\nasync def event_filter(\n hatchet: Hatchet,\n test_run_id: str,\n expression: str | None = None,\n payload: dict[str, str] = {},\n) -> AsyncGenerator[None, None]:\n expression = (\n expression\n or f\'input.should_skip == false && payload.testRunId == \'{test_run_id}\'\'\n )\n\n f = await hatchet.filters.aio_create(\n workflow_id=event_workflow.id,\n expression=expression,\n scope=test_run_id,\n payload={\'testRunId\': test_run_id, **payload},\n )\n\n yield\n\n await hatchet.filters.aio_delete(f.metadata.id)\n\n\nasync def fetch_runs_for_event(\n hatchet: Hatchet, event: Event\n) -> tuple[ProcessedEvent, list[V1TaskSummary]]:\n runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId)\n\n meta = (\n cast(dict[str, str | int | bool], json.loads(event.additionalMetadata))\n if event.additionalMetadata\n else {}\n )\n payload = (\n cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {}\n )\n\n return (\n ProcessedEvent(\n id=event.eventId,\n payload=payload,\n meta=meta,\n should_have_runs=meta.get(\'should_have_runs\', False) is True,\n test_run_id=cast(str, meta[\'test_run_id\']),\n ),\n runs.rows or [],\n )\n\n\nasync def wait_for_result(\n hatchet: Hatchet, events: list[Event]\n) -> dict[ProcessedEvent, list[V1TaskSummary]]:\n await asyncio.sleep(3)\n\n persisted = (await hatchet.event.aio_list(limit=100)).rows or []\n\n assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted})\n\n iters = 0\n while True:\n print(\'Waiting for event runs to complete...\')\n if iters > 15:\n print(\'Timed out waiting for event runs to complete.\')\n return {}\n\n iters += 1\n\n event_runs = await asyncio.gather(\n *[fetch_runs_for_event(hatchet, event) for event in events]\n )\n\n all_empty = all(not event_run for _, event_run in event_runs)\n\n if all_empty:\n await asyncio.sleep(1)\n continue\n\n event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs}\n\n any_queued_or_running = any(\n run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING]\n for runs in event_id_to_runs.values()\n for run in runs\n )\n\n if any_queued_or_running:\n await asyncio.sleep(1)\n continue\n\n break\n\n return event_id_to_runs\n\n\nasync def assert_event_runs_processed(\n event: ProcessedEvent,\n runs: list[V1TaskSummary],\n) -> None:\n if event.should_have_runs:\n assert len(runs) > 0\n else:\n assert len(runs) == 0\n\n\ndef bpi(\n index: int = 1,\n test_run_id: str = \'\',\n should_skip: bool = False,\n should_have_runs: bool = True,\n key: str = \'user:create\',\n payload: dict[str, str] = {},\n scope: str | None = None,\n) -> BulkPushEventWithMetadata:\n return BulkPushEventWithMetadata(\n key=key,\n payload={\n \'should_skip\': should_skip,\n **payload,\n },\n additional_metadata={\n \'should_have_runs\': should_have_runs,\n \'test_run_id\': test_run_id,\n \'key\': index,\n },\n scope=scope,\n )\n\n\ndef cp(should_skip: bool) -> dict[str, bool]:\n return EventWorkflowInput(should_skip=should_skip).model_dump()\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_push(hatchet: Hatchet) -> None:\n e = hatchet.event.push(\'user:create\', cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_async_event_push(hatchet: Hatchet) -> None:\n e = await hatchet.event.aio_push(\'user:create\', cp(False))\n\n assert e.eventId is not None\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_async_event_bulk_push(hatchet: Hatchet) -> None:\n events = [\n BulkPushEventWithMetadata(\n key=\'event1\',\n payload={\'message\': \'This is event 1\', \'should_skip\': False},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user123\'},\n ),\n BulkPushEventWithMetadata(\n key=\'event2\',\n payload={\'message\': \'This is event 2\', \'should_skip\': False},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user456\'},\n ),\n BulkPushEventWithMetadata(\n key=\'event3\',\n payload={\'message\': \'This is event 3\', \'should_skip\': False},\n additional_metadata={\'source\': \'test\', \'user_id\': \'user789\'},\n ),\n ]\n opts = BulkPushEventOptions(namespace=\'bulk-test\')\n\n e = await hatchet.event.aio_bulk_push(events, opts)\n\n assert len(e) == 3\n\n # Sort both lists of events by their key to ensure comparison order\n sorted_events = sorted(events, key=lambda x: x.key)\n sorted_returned_events = sorted(e, key=lambda x: x.key)\n namespace = \'bulk-test\'\n\n # Check that the returned events match the original events\n for original_event, returned_event in zip(sorted_events, sorted_returned_events):\n assert returned_event.key == namespace + original_event.key\n\n\n@pytest.fixture(scope=\'function\')\ndef test_run_id() -> str:\n return str(uuid4())\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_engine_behavior(hatchet: Hatchet) -> None:\n test_run_id = str(uuid4())\n events = [\n bpi(\n test_run_id=test_run_id,\n ),\n bpi(\n test_run_id=test_run_id,\n key=\'thisisafakeeventfoobarbaz\',\n should_have_runs=False,\n ),\n ]\n\n print(\'Events:\', events)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n print(\'Result:\', result)\n\n runs = await wait_for_result(hatchet, result)\n\n for event, r in runs.items():\n await assert_event_runs_processed(event, r)\n\n\ndef gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]:\n return [\n bpi(\n index=1,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n ),\n bpi(\n index=2,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=True,\n ),\n bpi(\n index=3,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=True,\n scope=test_run_id,\n ),\n bpi(\n index=4,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n ),\n bpi(\n index=5,\n test_run_id=test_run_id,\n should_skip=True,\n should_have_runs=False,\n scope=test_run_id,\n key=\'thisisafakeeventfoobarbaz\',\n ),\n bpi(\n index=6,\n test_run_id=test_run_id,\n should_skip=False,\n should_have_runs=False,\n scope=test_run_id,\n key=\'thisisafakeeventfoobarbaz\',\n ),\n ]\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(hatchet, test_run_id):\n events = gen_bulk_events(test_run_id)\n\n result = await hatchet.event.aio_bulk_push(events)\n\n runs = await wait_for_result(hatchet, result)\n for e, r in runs.items():\n await assert_event_runs_processed(e, r)\n\n\nasync def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event:\n return await hatchet.event.aio_push(\n event_key=event.key,\n payload=event.payload,\n options=PushEventOptions(\n scope=event.scope,\n additional_metadata=event.additional_metadata,\n priority=event.priority,\n ),\n )\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_skipping_filtering_no_bulk(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(hatchet, test_run_id):\n raw_events = gen_bulk_events(test_run_id)\n events = await asyncio.gather(\n *[bulk_to_single(hatchet, event) for event in raw_events]\n )\n\n result = await wait_for_result(hatchet, events)\n for event, runs in result.items():\n await assert_event_runs_processed(event, runs)\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n \'input.should_skip == false && payload.foobar == \'baz\'\',\n {\'foobar\': \'qux\'},\n ):\n event = await hatchet.event.aio_push(\n event_key=\'user:create\',\n payload={\'message\': \'This is event 1\', \'should_skip\': False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \'should_have_runs\': False,\n \'test_run_id\': test_run_id,\n \'key\': 1,\n },\n ),\n )\n\n runs = await wait_for_result(hatchet, [event])\n assert len(runs) == 0\n\n\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_event_payload_filtering_with_payload_match(\n hatchet: Hatchet, test_run_id: str\n) -> None:\n async with event_filter(\n hatchet,\n test_run_id,\n \'input.should_skip == false && payload.foobar == \'baz\'\',\n {\'foobar\': \'baz\'},\n ):\n event = await hatchet.event.aio_push(\n event_key=\'user:create\',\n payload={\'message\': \'This is event 1\', \'should_skip\': False},\n options=PushEventOptions(\n scope=test_run_id,\n additional_metadata={\n \'should_have_runs\': True,\n \'test_run_id\': test_run_id,\n \'key\': 1,\n },\n ),\n )\n runs = await wait_for_result(hatchet, [event])\n assert len(runs) == 1\n', 'source': 'out/python/events/test_event.py', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/events/worker.ts b/frontend/docs/lib/generated/snips/python/events/worker.ts index d51e7d373..e7926fdae 100644 --- a/frontend/docs/lib/generated/snips/python/events/worker.ts +++ b/frontend/docs/lib/generated/snips/python/events/worker.ts @@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'from hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet()\n\n# > Event trigger\nevent_workflow = hatchet.workflow(name=\'EventWorkflow\', on_events=[\'user:create\'])\n\n\n@event_workflow.task()\ndef task(input: EmptyModel, ctx: Context) -> None:\n print(\'event received\')\n', + 'content': 'from pydantic import BaseModel\n\nfrom hatchet_sdk import Context, Hatchet\n\nhatchet = Hatchet()\nEVENT_KEY = \'user:create\'\n\n\nclass EventWorkflowInput(BaseModel):\n should_skip: bool\n\n\n# > Event trigger\nevent_workflow = hatchet.workflow(\n name=\'EventWorkflow\',\n on_events=[EVENT_KEY],\n input_validator=EventWorkflowInput,\n)\n\n\n@event_workflow.task()\ndef task(input: EventWorkflowInput, ctx: Context) -> None:\n print(\'event received\')\n\n\ndef main() -> None:\n worker = hatchet.worker(name=\'EventWorker\', workflows=[event_workflow])\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n', 'source': 'out/python/events/worker.py', 'blocks': { 'event_trigger': { - 'start': 6, - 'stop': 6 + 'start': 14, + 'stop': 18 } }, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/priority/test_priority.ts b/frontend/docs/lib/generated/snips/python/priority/test_priority.ts index 36cb57c90..46abd7599 100644 --- a/frontend/docs/lib/generated/snips/python/priority/test_priority.ts +++ b/frontend/docs/lib/generated/snips/python/priority/test_priority.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'import asyncio\nfrom datetime import datetime, timedelta\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, AsyncGenerator, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal[\'low\', \'medium\', \'high\', \'default\']\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case \'high\':\n return 3\n case \'medium\':\n return 2\n case \'low\':\n return 1\n case \'default\':\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f\'Invalid priority: {priority}\')\n\n\n@pytest_asyncio.fixture(loop_scope=\'session\', scope=\'function\')\nasync def dummy_runs() -> None:\n priority: Priority = \'high\'\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'type\': \'dummy\',\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return None\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\'low\', \'medium\', \'high\', \'default\']\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'test_run_id\': test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = [\'low\', \'medium\', \'high\', \'default\']\n run_at = datetime.now() + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'test_run_id\': test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\'Timed out waiting for runs to finish\')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\'One or more runs failed or were cancelled\')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope=\'session\', scope=\'function\')\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\'low\', \'medium\', \'high\']\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f\'{test_run_id}-cron-{i}\',\n expression=\'* * * * *\',\n input={},\n additional_metadata={\n \'trigger\': \'cron\',\n \'test_run_id\': test_run_id,\n \'priority\': (priority := choice(choices)),\n \'key\': str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now()\n next_minute = now.replace(second=0, microsecond=0, minute=now.minute + 1)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\'Timed out waiting for runs to finish\')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\'One or more runs failed or were cancelled\')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n', + 'content': 'import asyncio\nfrom datetime import datetime, timedelta\nfrom random import choice\nfrom subprocess import Popen\nfrom typing import Any, AsyncGenerator, Literal\nfrom uuid import uuid4\n\nimport pytest\nimport pytest_asyncio\nfrom pydantic import BaseModel\n\nfrom examples.priority.worker import DEFAULT_PRIORITY, SLEEP_TIME, priority_workflow\nfrom hatchet_sdk import Hatchet, ScheduleTriggerWorkflowOptions, TriggerWorkflowOptions\nfrom hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus\n\nPriority = Literal[\'low\', \'medium\', \'high\', \'default\']\n\n\nclass RunPriorityStartedAt(BaseModel):\n priority: Priority\n started_at: datetime\n finished_at: datetime\n\n\ndef priority_to_int(priority: Priority) -> int:\n match priority:\n case \'high\':\n return 3\n case \'medium\':\n return 2\n case \'low\':\n return 1\n case \'default\':\n return DEFAULT_PRIORITY\n case _:\n raise ValueError(f\'Invalid priority: {priority}\')\n\n\n@pytest_asyncio.fixture(loop_scope=\'session\', scope=\'function\')\nasync def dummy_runs() -> None:\n priority: Priority = \'high\'\n\n await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority)),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'type\': \'dummy\',\n },\n )\n )\n for ix in range(40)\n ]\n )\n\n await asyncio.sleep(3)\n\n return None\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\'low\', \'medium\', \'high\', \'default\']\n N = 30\n\n run_refs = await priority_workflow.aio_run_many_no_wait(\n [\n priority_workflow.create_bulk_run_item(\n options=TriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'test_run_id\': test_run_id,\n },\n )\n )\n for ix in range(N)\n ]\n )\n\n await asyncio.gather(*[r.aio_result() for r in run_refs])\n\n workflows = (\n await hatchet.workflows.aio_list(workflow_name=priority_workflow.name)\n ).rows\n\n assert workflows\n\n workflow = next((w for w in workflows if w.name == priority_workflow.name), None)\n\n assert workflow\n\n assert workflow.name == priority_workflow.name\n\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow.metadata.id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(run_refs)\n assert len(runs_ids_started_ats) == N\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority_via_scheduling(\n hatchet: Hatchet, dummy_runs: None, on_demand_worker: Popen[Any]\n) -> None:\n test_run_id = str(uuid4())\n sleep_time = 3\n n = 30\n choices: list[Priority] = [\'low\', \'medium\', \'high\', \'default\']\n run_at = datetime.now() + timedelta(seconds=sleep_time)\n\n versions = await asyncio.gather(\n *[\n priority_workflow.aio_schedule(\n run_at=run_at,\n options=ScheduleTriggerWorkflowOptions(\n priority=(priority_to_int(priority := choice(choices))),\n additional_metadata={\n \'priority\': priority,\n \'key\': ix,\n \'test_run_id\': test_run_id,\n },\n ),\n )\n for ix in range(n)\n ]\n )\n\n await asyncio.sleep(sleep_time * 2)\n\n workflow_id = versions[0].workflow_id\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\'Timed out waiting for runs to finish\')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\'One or more runs failed or were cancelled\')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == len(versions)\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n\n\n@pytest_asyncio.fixture(loop_scope=\'session\', scope=\'function\')\nasync def crons(\n hatchet: Hatchet, dummy_runs: None\n) -> AsyncGenerator[tuple[str, str, int], None]:\n test_run_id = str(uuid4())\n choices: list[Priority] = [\'low\', \'medium\', \'high\']\n n = 30\n\n crons = await asyncio.gather(\n *[\n hatchet.cron.aio_create(\n workflow_name=priority_workflow.name,\n cron_name=f\'{test_run_id}-cron-{i}\',\n expression=\'* * * * *\',\n input={},\n additional_metadata={\n \'trigger\': \'cron\',\n \'test_run_id\': test_run_id,\n \'priority\': (priority := choice(choices)),\n \'key\': str(i),\n },\n priority=(priority_to_int(priority)),\n )\n for i in range(n)\n ]\n )\n\n yield crons[0].workflow_id, test_run_id, n\n\n await asyncio.gather(*[hatchet.cron.aio_delete(cron.metadata.id) for cron in crons])\n\n\ndef time_until_next_minute() -> float:\n now = datetime.now()\n next_minute = (now + timedelta(minutes=1)).replace(second=0, microsecond=0)\n\n return (next_minute - now).total_seconds()\n\n\n@pytest.mark.parametrize(\n \'on_demand_worker\',\n [\n (\n [\'poetry\', \'run\', \'python\', \'examples/priority/worker.py\', \'--slots\', \'1\'],\n 8003,\n )\n ],\n indirect=True,\n)\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_priority_via_cron(\n hatchet: Hatchet, crons: tuple[str, str, int], on_demand_worker: Popen[Any]\n) -> None:\n workflow_id, test_run_id, n = crons\n\n await asyncio.sleep(time_until_next_minute() + 10)\n\n attempts = 0\n\n while True:\n if attempts >= SLEEP_TIME * n * 2:\n raise TimeoutError(\'Timed out waiting for runs to finish\')\n\n attempts += 1\n await asyncio.sleep(1)\n runs = await hatchet.runs.aio_list(\n workflow_ids=[workflow_id],\n additional_metadata={\n \'test_run_id\': test_run_id,\n },\n limit=1_000,\n )\n\n if not runs.rows:\n continue\n\n if any(\n r.status in [V1TaskStatus.FAILED, V1TaskStatus.CANCELLED] for r in runs.rows\n ):\n raise ValueError(\'One or more runs failed or were cancelled\')\n\n if all(r.status == V1TaskStatus.COMPLETED for r in runs.rows):\n break\n\n runs_ids_started_ats: list[RunPriorityStartedAt] = sorted(\n [\n RunPriorityStartedAt(\n priority=(r.additional_metadata or {}).get(\'priority\') or \'low\',\n started_at=r.started_at or datetime.min,\n finished_at=r.finished_at or datetime.min,\n )\n for r in runs.rows\n ],\n key=lambda x: x.started_at,\n )\n\n assert len(runs_ids_started_ats) == n\n\n for i in range(len(runs_ids_started_ats) - 1):\n curr = runs_ids_started_ats[i]\n nxt = runs_ids_started_ats[i + 1]\n\n \'\'\'Run start times should be in order of priority\'\'\'\n assert priority_to_int(curr.priority) >= priority_to_int(nxt.priority)\n\n \'\'\'Runs should proceed one at a time\'\'\'\n assert curr.finished_at <= nxt.finished_at\n assert nxt.finished_at >= nxt.started_at\n\n \'\'\'Runs should finish after starting (this is mostly a test for engine datetime handling bugs)\'\'\'\n assert curr.finished_at >= curr.started_at\n', 'source': 'out/python/priority/test_priority.py', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/simple/index.ts b/frontend/docs/lib/generated/snips/python/simple/index.ts index c443f556e..8bdb03230 100644 --- a/frontend/docs/lib/generated/snips/python/simple/index.ts +++ b/frontend/docs/lib/generated/snips/python/simple/index.ts @@ -1,5 +1,7 @@ +import test_simple_workflow from './test_simple_workflow'; import trigger from './trigger'; import worker from './worker'; +export { test_simple_workflow } export { trigger } export { worker } diff --git a/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts b/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts new file mode 100644 index 000000000..3718a6ec9 --- /dev/null +++ b/frontend/docs/lib/generated/snips/python/simple/test_simple_workflow.ts @@ -0,0 +1,11 @@ +import { Snippet } from '@/lib/generated/snips/types'; + +const snippet: Snippet = { + 'language': 'python', + 'content': 'import pytest\n\nfrom examples.simple.worker import simple, simple_durable\nfrom hatchet_sdk import EmptyModel\nfrom hatchet_sdk.runnables.standalone import Standalone\n\n\n@pytest.mark.parametrize(\'task\', [simple, simple_durable])\n@pytest.mark.asyncio(loop_scope=\'session\')\nasync def test_simple_workflow_running_options(\n task: Standalone[EmptyModel, dict[str, str]]\n) -> None:\n x1 = task.run()\n x2 = await task.aio_run()\n\n x3 = task.run_many([task.create_bulk_run_item()])[0]\n x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0]\n\n x5 = task.run_no_wait().result()\n x6 = (await task.aio_run_no_wait()).result()\n x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0]\n x8 = [\n x.result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n x9 = await task.run_no_wait().aio_result()\n x10 = await (await task.aio_run_no_wait()).aio_result()\n x11 = [\n await x.aio_result()\n for x in task.run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n x12 = [\n await x.aio_result()\n for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()])\n ][0]\n\n assert all(\n x == {\'result\': \'Hello, world!\'}\n for x in [x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12]\n )\n', + 'source': 'out/python/simple/test_simple_workflow.py', + 'blocks': {}, + 'highlights': {} +}; // Then replace double quotes with single quotes + +export default snippet; diff --git a/frontend/docs/lib/generated/snips/python/simple/trigger.ts b/frontend/docs/lib/generated/snips/python/simple/trigger.ts index f3b9ae9cd..09988e00a 100644 --- a/frontend/docs/lib/generated/snips/python/simple/trigger.ts +++ b/frontend/docs/lib/generated/snips/python/simple/trigger.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'from examples.simple.worker import step1\n\nstep1.run()\n', + 'content': 'from examples.simple.worker import simple\n\nsimple.run()\n', 'source': 'out/python/simple/trigger.py', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/simple/worker.ts b/frontend/docs/lib/generated/snips/python/simple/worker.ts index 22d105e47..193b0c062 100644 --- a/frontend/docs/lib/generated/snips/python/simple/worker.ts +++ b/frontend/docs/lib/generated/snips/python/simple/worker.ts @@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': '# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task(name=\'SimpleWorkflow\')\ndef step1(input: EmptyModel, ctx: Context) -> None:\n print(\'executed step1\')\n\n\ndef main() -> None:\n worker = hatchet.worker(\'test-worker\', slots=1, workflows=[step1])\n worker.start()\n\n\n\nif __name__ == \'__main__\':\n main()\n', + 'content': '# > Simple\n\nfrom hatchet_sdk import Context, EmptyModel, Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\n@hatchet.task()\ndef simple(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\'result\': \'Hello, world!\'}\n\n\n@hatchet.durable_task()\ndef simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]:\n return {\'result\': \'Hello, world!\'}\n\n\ndef main() -> None:\n worker = hatchet.worker(\'test-worker\', workflows=[simple, simple_durable])\n worker.start()\n\n\n\nif __name__ == \'__main__\':\n main()\n', 'source': 'out/python/simple/worker.py', 'blocks': { 'simple': { 'start': 2, - 'stop': 17 + 'stop': 22 } }, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/python/worker.ts b/frontend/docs/lib/generated/snips/python/worker.ts index 7d65d851b..ab3c4993e 100644 --- a/frontend/docs/lib/generated/snips/python/worker.ts +++ b/frontend/docs/lib/generated/snips/python/worker.ts @@ -2,7 +2,7 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'python', - 'content': 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'e2e-test-worker\',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n lifespan_task,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n', + 'content': 'from examples.affinity_workers.worker import affinity_worker_workflow\nfrom examples.bulk_fanout.worker import bulk_child_wf, bulk_parent_wf\nfrom examples.cancellation.worker import cancellation_workflow\nfrom examples.concurrency_limit.worker import concurrency_limit_workflow\nfrom examples.concurrency_limit_rr.worker import concurrency_limit_rr_workflow\nfrom examples.concurrency_multiple_keys.worker import concurrency_multiple_keys_workflow\nfrom examples.concurrency_workflow_level.worker import (\n concurrency_workflow_level_workflow,\n)\nfrom examples.dag.worker import dag_workflow\nfrom examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf\nfrom examples.durable.worker import durable_workflow\nfrom examples.events.worker import event_workflow\nfrom examples.fanout.worker import child_wf, parent_wf\nfrom examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent\nfrom examples.lifespans.simple import lifespan, lifespan_task\nfrom examples.logger.workflow import logging_workflow\nfrom examples.non_retryable.worker import non_retryable_workflow\nfrom examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details\nfrom examples.simple.worker import simple, simple_durable\nfrom examples.timeout.worker import refresh_timeout_wf, timeout_wf\nfrom examples.waits.worker import task_condition_workflow\nfrom hatchet_sdk import Hatchet\n\nhatchet = Hatchet(debug=True)\n\n\ndef main() -> None:\n worker = hatchet.worker(\n \'e2e-test-worker\',\n slots=100,\n workflows=[\n affinity_worker_workflow,\n bulk_child_wf,\n bulk_parent_wf,\n concurrency_limit_workflow,\n concurrency_limit_rr_workflow,\n concurrency_multiple_keys_workflow,\n dag_workflow,\n dedupe_child_wf,\n dedupe_parent_wf,\n durable_workflow,\n child_wf,\n event_workflow,\n parent_wf,\n on_failure_wf,\n on_failure_wf_with_details,\n logging_workflow,\n timeout_wf,\n refresh_timeout_wf,\n task_condition_workflow,\n cancellation_workflow,\n sync_fanout_parent,\n sync_fanout_child,\n non_retryable_workflow,\n concurrency_workflow_level_workflow,\n lifespan_task,\n simple,\n simple_durable,\n ],\n lifespan=lifespan,\n )\n\n worker.start()\n\n\nif __name__ == \'__main__\':\n main()\n', 'source': 'out/python/worker.py', 'blocks': {}, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts b/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts new file mode 100644 index 000000000..adb0b4a93 --- /dev/null +++ b/frontend/docs/lib/generated/snips/typescript/on_event/event.e2e.ts @@ -0,0 +1,11 @@ +import { Snippet } from '@/lib/generated/snips/types'; + +const snippet: Snippet = { + 'language': 'typescript ', + 'content': 'import sleep from \'@hatchet-dev/typescript-sdk-dev/typescript-sdk/util/sleep\';\nimport { randomUUID } from \'crypto\';\nimport { Event } from \'@hatchet-dev/typescript-sdk-dev/typescript-sdk/protoc/events\';\nimport { SIMPLE_EVENT, lower, Input } from \'./workflow\';\nimport { hatchet } from \'../hatchet-client\';\nimport { Worker } from \'../../client/worker/worker\';\n\nxdescribe(\'events-e2e\', () => {\n let worker: Worker;\n let testRunId: string;\n\n beforeEach(async () => {\n testRunId = randomUUID();\n\n worker = await hatchet.worker(\'event-worker\');\n await worker.registerWorkflow(lower);\n\n void worker.start();\n });\n\n afterAll(async () => {\n await worker.stop();\n await sleep(2000);\n });\n\n async function setupEventFilter(expression?: string, payload: Record = {}) {\n const finalExpression =\n expression || `input.ShouldSkip == false && payload.testRunId == \'${testRunId}\'`;\n\n const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id;\n\n const filter = await hatchet.filters.create({\n workflowId,\n expression: finalExpression,\n scope: testRunId,\n payload: { testRunId, ...payload },\n });\n\n return async () => {\n await hatchet.filters.delete(filter.metadata.id);\n };\n }\n\n // Helper function to wait for events to process and fetch runs\n async function waitForEventsToProcess(events: Event[]): Promise> {\n await sleep(3000);\n\n const persisted = (await hatchet.events.list({ limit: 100 })).rows || [];\n\n // Ensure all our events are persisted\n const eventIds = new Set(events.map((e) => e.eventId));\n const persistedIds = new Set(persisted.map((e) => e.metadata.id));\n expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy();\n\n let attempts = 0;\n const maxAttempts = 15;\n const eventToRuns: Record = {};\n\n while (true) {\n console.log(\'Waiting for event runs to complete...\');\n if (attempts > maxAttempts) {\n console.log(\'Timed out waiting for event runs to complete.\');\n return {};\n }\n\n attempts += 1;\n\n // For each event, fetch its runs\n const runsPromises = events.map(async (event) => {\n const runs = await hatchet.runs.list({\n triggeringEventExternalId: event.eventId,\n });\n\n // Extract metadata from event\n const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {};\n\n const payload = event.payload ? JSON.parse(event.payload) : {};\n\n return {\n event: {\n id: event.eventId,\n payload,\n meta,\n shouldHaveRuns: Boolean(meta.should_have_runs),\n testRunId: meta.test_run_id,\n },\n runs: runs.rows || [],\n };\n });\n\n const eventRuns = await Promise.all(runsPromises);\n\n // If all events have no runs yet, wait and retry\n if (eventRuns.every(({ runs }) => runs.length === 0)) {\n await sleep(1000);\n\n continue;\n }\n\n // Store runs by event ID\n for (const { event, runs } of eventRuns) {\n eventToRuns[event.id] = runs;\n }\n\n // Check if any runs are still in progress\n const anyInProgress = Object.values(eventToRuns).some((runs) =>\n runs.some((run) => run.status === \'QUEUED\' || run.status === \'RUNNING\')\n );\n\n if (anyInProgress) {\n await sleep(1000);\n\n continue;\n }\n\n break;\n }\n\n return eventToRuns;\n }\n\n // Helper to verify runs match expectations\n function verifyEventRuns(eventData: any, runs: any[]) {\n if (eventData.shouldHaveRuns) {\n expect(runs.length).toBeGreaterThan(0);\n } else {\n expect(runs.length).toBe(0);\n }\n }\n\n // Helper to create bulk push event objects\n function createBulkPushEvent({\n index = 1,\n ShouldSkip = false,\n shouldHaveRuns = true,\n key = SIMPLE_EVENT,\n payload = {},\n scope = null,\n }: {\n index?: number;\n ShouldSkip?: boolean;\n shouldHaveRuns?: boolean;\n key?: string;\n payload?: Record;\n scope?: string | null;\n }) {\n return {\n key,\n payload: {\n ShouldSkip,\n Message: `This is event ${index}`,\n ...payload,\n },\n additionalMetadata: {\n should_have_runs: shouldHaveRuns,\n test_run_id: testRunId,\n key,\n index,\n },\n scope: scope || undefined,\n };\n }\n\n // Helper to create payload object\n function createEventPayload(ShouldSkip: boolean): Input {\n return { ShouldSkip, Message: \'This is event 1\' };\n }\n\n it(\'should push an event\', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it(\'should push an event asynchronously\', async () => {\n const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false));\n expect(event.eventId).toBeTruthy();\n }, 10000);\n\n it(\'should bulk push events\', async () => {\n const events = [\n {\n key: SIMPLE_EVENT,\n payload: { Message: \'This is event 1\', ShouldSkip: false },\n additionalMetadata: { source: \'test\', user_id: \'user123\' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: \'This is event 2\', ShouldSkip: false },\n additionalMetadata: { source: \'test\', user_id: \'user456\' },\n },\n {\n key: SIMPLE_EVENT,\n payload: { Message: \'This is event 3\', ShouldSkip: false },\n additionalMetadata: { source: \'test\', user_id: \'user789\' },\n },\n ];\n\n const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events);\n\n expect(result.events.length).toBe(3);\n\n // Sort and verify namespacing\n const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key));\n const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key));\n\n sortedEvents.forEach((originalEvent, index) => {\n const returnedEvent = sortedResults[index];\n expect(returnedEvent.key).toBe(originalEvent.key);\n });\n }, 15000);\n\n it(\'should process events according to event engine behavior\', async () => {\n const eventPromises = [\n createBulkPushEvent({}),\n createBulkPushEvent({\n key: \'thisisafakeeventfoobarbaz\',\n shouldHaveRuns: false,\n }),\n ].map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event\'s runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || \'{}\');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n }, 30000);\n\n function generateBulkEvents() {\n return [\n createBulkPushEvent({\n index: 1,\n ShouldSkip: false,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 2,\n ShouldSkip: true,\n shouldHaveRuns: true,\n }),\n createBulkPushEvent({\n index: 3,\n ShouldSkip: false,\n shouldHaveRuns: true,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 4,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n }),\n createBulkPushEvent({\n index: 5,\n ShouldSkip: true,\n shouldHaveRuns: false,\n scope: testRunId,\n key: \'thisisafakeeventfoobarbaz\',\n }),\n createBulkPushEvent({\n index: 6,\n ShouldSkip: false,\n shouldHaveRuns: false,\n scope: testRunId,\n key: \'thisisafakeeventfoobarbaz\',\n }),\n ];\n }\n\n async function convertBulkToSingle(event: any) {\n return hatchet.events.push(event.key, event.payload, {\n scope: event.scope,\n additionalMetadata: event.additionalMetadata,\n priority: event.priority,\n });\n }\n\n it(\'should handle event skipping and filtering without bulk push\', async () => {\n const cleanup = await setupEventFilter();\n\n try {\n const rawEvents = generateBulkEvents();\n const eventPromises = rawEvents.map((event) => convertBulkToSingle(event));\n const events = await Promise.all(eventPromises);\n\n const eventToRuns = await waitForEventsToProcess(events);\n\n // Verify each event\'s runs\n Object.keys(eventToRuns).forEach((eventId) => {\n const runs = eventToRuns[eventId];\n const eventInfo = events.find((e) => e.eventId === eventId);\n\n if (eventInfo) {\n const meta = JSON.parse(eventInfo.additionalMetadata || \'{}\');\n verifyEventRuns(\n {\n shouldHaveRuns: Boolean(meta.should_have_runs),\n },\n runs\n );\n }\n });\n } finally {\n await cleanup();\n }\n }, 30000);\n\n it(\'should filter events by payload expression not matching\', async () => {\n const cleanup = await setupEventFilter(\'input.ShouldSkip == false && payload.foobar == \'baz\'\', {\n foobar: \'qux\',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: \'This is event 1\', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: \'false\',\n test_run_id: testRunId,\n key: \'1\',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n expect(Object.keys(eventToRuns).length).toBe(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n\n it(\'should filter events by payload expression matching\', async () => {\n const cleanup = await setupEventFilter(\'input.ShouldSkip == false && payload.foobar == \'baz\'\', {\n foobar: \'baz\',\n });\n\n try {\n const event = await hatchet.events.push(\n SIMPLE_EVENT,\n { Message: \'This is event 1\', ShouldSkip: false },\n {\n scope: testRunId,\n additionalMetadata: {\n should_have_runs: \'true\',\n test_run_id: testRunId,\n key: \'1\',\n },\n }\n );\n\n const eventToRuns = await waitForEventsToProcess([event]);\n const runs = Object.values(eventToRuns)[0] || [];\n expect(runs.length).toBeGreaterThan(0);\n } finally {\n await cleanup();\n }\n }, 20000);\n});\n', + 'source': 'out/typescript/on_event/event.e2e.ts', + 'blocks': {}, + 'highlights': {} +}; // Then replace double quotes with single quotes + +export default snippet; diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/event.ts b/frontend/docs/lib/generated/snips/typescript/on_event/event.ts index 07e033507..1c6406692 100644 --- a/frontend/docs/lib/generated/snips/typescript/on_event/event.ts +++ b/frontend/docs/lib/generated/snips/typescript/on_event/event.ts @@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'typescript ', - 'content': 'import { hatchet } from \'../hatchet-client\';\nimport { Input } from \'./workflow\';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push(\'simple-event:create\', {\n Message: \'hello\',\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n', + 'content': 'import { hatchet } from \'../hatchet-client\';\nimport { Input } from \'./workflow\';\n\nasync function main() {\n // > Pushing an Event\n const res = await hatchet.events.push(\'simple-event:create\', {\n Message: \'hello\',\n ShouldSkip: false,\n });\n\n console.log(res.eventId);\n}\n\nif (require.main === module) {\n main();\n}\n', 'source': 'out/typescript/on_event/event.ts', 'blocks': { 'pushing_an_event': { 'start': 6, - 'stop': 8 + 'stop': 9 } }, 'highlights': {} diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/index.ts b/frontend/docs/lib/generated/snips/typescript/on_event/index.ts index 8d9400c37..34c1d571a 100644 --- a/frontend/docs/lib/generated/snips/typescript/on_event/index.ts +++ b/frontend/docs/lib/generated/snips/typescript/on_event/index.ts @@ -1,7 +1,9 @@ +import evente2e from './event.e2e'; import event from './event'; import worker from './worker'; import workflow from './workflow'; +export { evente2e } export { event } export { worker } export { workflow } diff --git a/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts b/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts index 60b72cc44..325e085aa 100644 --- a/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts +++ b/frontend/docs/lib/generated/snips/typescript/on_event/workflow.ts @@ -2,12 +2,12 @@ import { Snippet } from '@/lib/generated/snips/types'; const snippet: Snippet = { 'language': 'typescript ', - 'content': 'import { hatchet } from \'../hatchet-client\';\n\nexport type Input = {\n Message: string;\n};\n\nexport const SIMPLE_EVENT = \'simple-event:create\';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: \'lower\',\n // 👀 Declare the event that will trigger the workflow\n onEvents: [\'simple-event:create\'],\n});\n\nlower.task({\n name: \'lower\',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: \'upper\',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: \'upper\',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n', + 'content': 'import { hatchet } from \'../hatchet-client\';\n\nexport type Input = {\n Message: string;\n ShouldSkip: boolean;\n};\n\nexport const SIMPLE_EVENT = \'simple-event:create\';\n\ntype LowerOutput = {\n lower: {\n TransformedMessage: string;\n };\n};\n\n// > Run workflow on event\nexport const lower = hatchet.workflow({\n name: \'lower\',\n // 👀 Declare the event that will trigger the workflow\n onEvents: [\'simple-event:create\'],\n});\n\nlower.task({\n name: \'lower\',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toLowerCase(),\n };\n },\n});\n\ntype UpperOutput = {\n upper: {\n TransformedMessage: string;\n };\n};\n\nexport const upper = hatchet.workflow({\n name: \'upper\',\n on: {\n event: SIMPLE_EVENT,\n },\n});\n\nupper.task({\n name: \'upper\',\n fn: (input) => {\n return {\n TransformedMessage: input.Message.toUpperCase(),\n };\n },\n});\n', 'source': 'out/typescript/on_event/workflow.ts', 'blocks': { 'run_workflow_on_event': { - 'start': 16, - 'stop': 20 + 'start': 17, + 'stop': 21 } }, 'highlights': {} diff --git a/frontend/docs/pages/sdks/python/client.mdx b/frontend/docs/pages/sdks/python/client.mdx index 1cd893c5c..6f026d5c9 100644 --- a/frontend/docs/pages/sdks/python/client.mdx +++ b/frontend/docs/pages/sdks/python/client.mdx @@ -73,14 +73,14 @@ Create a Hatchet worker on which to run workflows. Parameters: -| Name | Type | Description | Default | -| --------------- | ---------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ | ---------- | -| `name` | `str` | The name of the worker. | _required_ | -| `slots` | `int` | The number of workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time | `100` | -| `durable_slots` | `int` | The number of durable workflow slots on the worker. In other words, the number of concurrent tasks the worker can run at any point in time that are durable. | `1000` | -| `labels` | `dict[str, Union[str, int]]` | A dictionary of labels to assign to the worker. For more details, view examples on affinity and worker labels. | `{}` | -| `workflows` | `list[BaseWorkflow[Any]]` | A list of workflows to register on the worker, as a shorthand for calling `register_workflow` on each or `register_workflows` on all of them. | `[]` | -| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. This function will be called when the worker is started, and can be used to perform any setup or teardown tasks. | `None` | +| Name | Type | Description | Default | +| --------------- | ---------------------------- | --------------------------------------------------- | ---------- | +| `name` | `str` | The name of the worker. | _required_ | +| `slots` | `int` | The number of workflow slots on the worker. | `100` | +| `durable_slots` | `int` | The number of durable workflow slots on the worker. | `1000` | +| `labels` | `dict[str, Union[str, int]]` | A dictionary of labels to assign to the worker. | `{}` | +| `workflows` | `list[BaseWorkflow[Any]]` | A list of workflows to register on the worker. | `[]` | +| `lifespan` | `LifespanFn \| None` | A lifespan function to run on the worker. | `None` | Returns: @@ -94,18 +94,18 @@ Define a Hatchet workflow, which can then declare `task`s and be `run`, `schedul Parameters: -| Name | Type | Description | Default | -| ------------------ | -------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------- | -| `name` | `str` | The name of the workflow. | _required_ | -| `description` | `str \| None` | A description for the workflow | `None` | -| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the `input` to the tasks in the workflow. If no validator is provided, defaults to an `EmptyModel` under the hood. | `None` | -| `on_events` | `list[str]` | A list of event triggers for the workflow - events which cause the workflow to be run. | `[]` | -| `on_crons` | `list[str]` | A list of cron triggers for the workflow. | `[]` | -| `version` | `str \| None` | A version for the workflow | `None` | -| `sticky` | `StickyStrategy \| None` | A sticky strategy for the workflow | `None` | -| `default_priority` | `int` | The priority of the workflow. Higher values will cause this workflow to have priority in scheduling over other, lower priority ones. | `1` | -| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this workflow. | `None` | -| `task_defaults` | `TaskDefaults` | A `TaskDefaults` object controlling the default task settings for this workflow. | `TaskDefaults()` | +| Name | Type | Description | Default | +| ------------------ | -------------------------------------------------------------- | ---------------------------------------------------------------------------------------------------- | ---------------- | +| `name` | `str` | The name of the workflow. | _required_ | +| `description` | `str \| None` | A description for the workflow | `None` | +| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the `input` to the tasks in the workflow. | `None` | +| `on_events` | `list[str]` | A list of event triggers for the workflow - events which cause the workflow to be run. | `[]` | +| `on_crons` | `list[str]` | A list of cron triggers for the workflow. | `[]` | +| `version` | `str \| None` | A version for the workflow | `None` | +| `sticky` | `StickyStrategy \| None` | A sticky strategy for the workflow | `None` | +| `default_priority` | `int` | The priority of the workflow. Higher values will cause this workflow to have priority in scheduling. | `1` | +| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this workflow. | `None` | +| `task_defaults` | `TaskDefaults` | A `TaskDefaults` object controlling the default task settings for this workflow. | `TaskDefaults()` | Returns: @@ -119,30 +119,30 @@ A decorator to transform a function into a standalone Hatchet task that runs as Parameters: -| Name | Type | Description | Default | -| ----------------------- | -------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | ----------------------- | -| `name` | `str` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | _required_ | -| `description` | `str \| None` | An optional description for the task. | `None` | -| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` | -| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` | -| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` | -| `version` | `str \| None` | A version for the task. | `None` | -| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` | -| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` | -| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this task. | `None` | -| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` | -| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` | -| `retries` | `int` | The number of times to retry the task before failing. | `0` | -| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` | -| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` | -| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` | -| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` | +| Name | Type | Description | Default | +| ----------------------- | -------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ----------------------- | +| `name` | `str \| None` | The name of the task. | `None` | +| `description` | `str \| None` | An optional description for the task. | `None` | +| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. | `None` | +| `on_events` | `list[str]` | A list of event triggers for the task. | `[]` | +| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` | +| `version` | `str \| None` | A version for the task. | `None` | +| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` | +| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` | +| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this task. | `None` | +| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` | +| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` | +| `retries` | `int` | The number of times to retry the task before failing. | `0` | +| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` | +| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` | +| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` | +| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` | Returns: -| Type | Description | -| ---------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------- | -| `Callable[[Callable[[EmptyModel, Context], R]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, Context], R]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. | +| Type | Description | +| -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------- | +| `Callable[[Callable[[EmptyModel, Context], R \| CoroutineLike[R]]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, Context], R \| CoroutineLike[R]]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. | #### `durable_task` @@ -150,27 +150,27 @@ A decorator to transform a function into a standalone Hatchet _durable_ task tha Parameters: -| Name | Type | Description | Default | -| ----------------------- | ------------------------------- | --------------------------------------------------------------------------------------------------------------------------- | ----------------------- | -| `name` | `str` | The name of the task. If not specified, defaults to the name of the function being wrapped by the `task` decorator. | _required_ | -| `description` | `str \| None` | An optional description for the task. | `None` | -| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. If no validator is provided, defaults to an `EmptyModel`. | `None` | -| `on_events` | `list[str]` | A list of event triggers for the task - events which cause the task to be run. | `[]` | -| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` | -| `version` | `str \| None` | A version for the task. | `None` | -| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` | -| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` | -| `concurrency` | `ConcurrencyExpression \| None` | A concurrency object controlling the concurrency settings for this task. | `None` | -| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` | -| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` | -| `retries` | `int` | The number of times to retry the task before failing. | `0` | -| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` | -| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` | -| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` | -| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` | +| Name | Type | Description | Default | +| ----------------------- | -------------------------------------------------------------- | ------------------------------------------------------------------------------------------------- | ----------------------- | +| `name` | `str \| None` | The name of the task. | `None` | +| `description` | `str \| None` | An optional description for the task. | `None` | +| `input_validator` | `Type[TWorkflowInput] \| None` | A Pydantic model to use as a validator for the input to the task. | `None` | +| `on_events` | `list[str]` | A list of event triggers for the task. | `[]` | +| `on_crons` | `list[str]` | A list of cron triggers for the task. | `[]` | +| `version` | `str \| None` | A version for the task. | `None` | +| `sticky` | `StickyStrategy \| None` | A sticky strategy for the task. | `None` | +| `default_priority` | `int` | The priority of the task. Higher values will cause this task to have priority in scheduling. | `1` | +| `concurrency` | `ConcurrencyExpression \| list[ConcurrencyExpression] \| None` | A concurrency object controlling the concurrency settings for this task. | `None` | +| `schedule_timeout` | `Duration` | The maximum time allowed for scheduling the task. | `timedelta(minutes=5)` | +| `execution_timeout` | `Duration` | The maximum time allowed for executing the task. | `timedelta(seconds=60)` | +| `retries` | `int` | The number of times to retry the task before failing. | `0` | +| `rate_limits` | `list[RateLimit]` | A list of rate limit configurations for the task. | `[]` | +| `desired_worker_labels` | `dict[str, DesiredWorkerLabel]` | A dictionary of desired worker labels that determine to which worker the task should be assigned. | `{}` | +| `backoff_factor` | `float \| None` | The backoff factor for controlling exponential backoff in retries. | `None` | +| `backoff_max_seconds` | `int \| None` | The maximum number of seconds to allow retries with exponential backoff to continue. | `None` | Returns: -| Type | Description | -| ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | ----------------------------------------------------- | -| `Callable[[Callable[[EmptyModel, DurableContext], R]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, DurableContext], R]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. | +| Type | Description | +| ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ----------------------------------------------------- | +| `Callable[[Callable[[EmptyModel, DurableContext], R \| CoroutineLike[R]]], Standalone[EmptyModel, R]] \| Callable[[Callable[[TWorkflowInput, DurableContext], R \| CoroutineLike[R]]], Standalone[TWorkflowInput, R]]` | A decorator which creates a `Standalone` task object. | diff --git a/frontend/docs/pages/sdks/python/feature-clients/_meta.js b/frontend/docs/pages/sdks/python/feature-clients/_meta.js index cfb6c7994..fcbb64fad 100644 --- a/frontend/docs/pages/sdks/python/feature-clients/_meta.js +++ b/frontend/docs/pages/sdks/python/feature-clients/_meta.js @@ -6,6 +6,13 @@ export default { }, }, + filters: { + title: "Filters", + theme: { + toc: true, + }, + }, + logs: { title: "Logs", theme: { diff --git a/frontend/docs/pages/sdks/python/feature-clients/filters.mdx b/frontend/docs/pages/sdks/python/feature-clients/filters.mdx new file mode 100644 index 000000000..3f614cb02 --- /dev/null +++ b/frontend/docs/pages/sdks/python/feature-clients/filters.mdx @@ -0,0 +1,158 @@ +# Filters Client + +Bases: `BaseRestClient` + +The filters client is a client for interacting with Hatchet's filters API. + +Methods: + +| Name | Description | +| ------------ | -------------------------------- | +| `aio_create` | Create a new filter. | +| `aio_delete` | Delete a filter by its ID. | +| `aio_get` | Get a filter by its ID. | +| `aio_list` | List filters for a given tenant. | +| `create` | Create a new filter. | +| `delete` | Delete a filter by its ID. | +| `get` | Get a filter by its ID. | +| `list` | List filters for a given tenant. | + +### Functions + +#### `aio_create` + +Create a new filter. + +Parameters: + +| Name | Type | Description | Default | +| ------------- | ------------------------- | ---------------------------------------------------- | ---------- | +| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ | +| `expression` | `str` | The expression to evaluate for the filter. | _required_ | +| `scope` | `str` | The scope for the filter. | _required_ | +| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` | + +Returns: + +| Type | Description | +| ---------- | ------------------- | +| `V1Filter` | The created filter. | + +#### `aio_delete` + +Delete a filter by its ID. + +Parameters: + +| Name | Type | Description | Default | +| ----------- | ----- | ------------------------------- | ---------- | +| `filter_id` | `str` | The ID of the filter to delete. | _required_ | + +Returns: + +| Type | Description | +| ---------- | ------------------- | +| `V1Filter` | The deleted filter. | + +#### `aio_get` + +Get a filter by its ID. + +Parameters: + +| Name | Type | Description | Default | +| ----------- | ----- | --------------------------------- | ---------- | +| `filter_id` | `str` | The ID of the filter to retrieve. | _required_ | + +Returns: + +| Type | Description | +| ---------- | --------------------------------- | +| `V1Filter` | The filter with the specified ID. | + +#### `aio_list` + +List filters for a given tenant. + +Parameters: + +| Name | Type | Description | Default | +| ------------------------- | ------------------------------- | --------------------------------------------------------------------------------------------------------------------- | ------- | +| `limit` | `int \| None` | The maximum number of filters to return. | `None` | +| `offset` | `int \| None` | The number of filters to skip before starting to collect the result set. | `None` | +| `workflow_id_scope_pairs` | `list[tuple[str, str]] \| None` | A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second. | `None` | + +Returns: + +| Type | Description | +| -------------- | -------------------------------------------------- | +| `V1FilterList` | A list of filters matching the specified criteria. | + +#### `create` + +Create a new filter. + +Parameters: + +| Name | Type | Description | Default | +| ------------- | ------------------------- | ---------------------------------------------------- | ---------- | +| `workflow_id` | `str` | The ID of the workflow to associate with the filter. | _required_ | +| `expression` | `str` | The expression to evaluate for the filter. | _required_ | +| `scope` | `str` | The scope for the filter. | _required_ | +| `payload` | `JSONSerializableMapping` | The payload to send with the filter. | `{}` | + +Returns: + +| Type | Description | +| ---------- | ------------------- | +| `V1Filter` | The created filter. | + +#### `delete` + +Delete a filter by its ID. + +Parameters: + +| Name | Type | Description | Default | +| ----------- | ----- | ------------------------------- | ---------- | +| `filter_id` | `str` | The ID of the filter to delete. | _required_ | + +Returns: + +| Type | Description | +| ---------- | ------------------- | +| `V1Filter` | The deleted filter. | + +#### `get` + +Get a filter by its ID. + +Parameters: + +| Name | Type | Description | Default | +| ----------- | ----- | --------------------------------- | ---------- | +| `filter_id` | `str` | The ID of the filter to retrieve. | _required_ | + +Returns: + +| Type | Description | +| ---------- | --------------------------------- | +| `V1Filter` | The filter with the specified ID. | + +#### `list` + +List filters for a given tenant. + +Parameters: + +| Name | Type | Description | Default | +| ------------------------- | ------------------------------- | --------------------------------------------------------------------------------------------------------------------- | ------- | +| `limit` | `int \| None` | The maximum number of filters to return. | `None` | +| `offset` | `int \| None` | The number of filters to skip before starting to collect the result set. | `None` | +| `workflow_id_scope_pairs` | `list[tuple[str, str]] \| None` | A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second. | `None` | + +Returns: + +| Type | Description | +| -------------- | -------------------------------------------------- | +| `V1FilterList` | A list of filters matching the specified criteria. | diff --git a/frontend/docs/pages/sdks/python/feature-clients/metrics.mdx b/frontend/docs/pages/sdks/python/feature-clients/metrics.mdx index 338d7a847..262ba3c32 100644 --- a/frontend/docs/pages/sdks/python/feature-clients/metrics.mdx +++ b/frontend/docs/pages/sdks/python/feature-clients/metrics.mdx @@ -9,10 +9,10 @@ Methods: | Name | Description | | -------------------------- | ------------------------------------------------------------------------- | | `aio_get_queue_metrics` | Retrieve queue metrics for a set of workflow ids and additional metadata. | -| `aio_get_task_metrics` | Retrieve queue metrics. | +| `aio_get_task_metrics` | Retrieve queue metrics | | `aio_get_workflow_metrics` | Retrieve workflow metrics for a given workflow ID. | | `get_queue_metrics` | Retrieve queue metrics for a set of workflow ids and additional metadata. | -| `get_task_metrics` | Retrieve queue metrics. | +| `get_task_metrics` | Retrieve queue metrics | | `get_workflow_metrics` | Retrieve workflow metrics for a given workflow ID. | ### Functions @@ -36,13 +36,13 @@ Returns: #### `aio_get_task_metrics` -Retrieve queue metrics. +Retrieve queue metrics Returns: -| Type | Description | -| --------------------------- | -------------------------------------- | -| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant. | +| Type | Description | +| --------------------------- | ------------------------------------- | +| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant | #### `aio_get_workflow_metrics` @@ -81,13 +81,13 @@ Returns: #### `get_task_metrics` -Retrieve queue metrics. +Retrieve queue metrics Returns: -| Type | Description | -| --------------------------- | -------------------------------------- | -| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant. | +| Type | Description | +| --------------------------- | ------------------------------------- | +| `TenantStepRunQueueMetrics` | Step run queue metrics for the tenant | #### `get_workflow_metrics` diff --git a/frontend/docs/pages/sdks/python/feature-clients/runs.mdx b/frontend/docs/pages/sdks/python/feature-clients/runs.mdx index 363418ec9..86c5dc61f 100644 --- a/frontend/docs/pages/sdks/python/feature-clients/runs.mdx +++ b/frontend/docs/pages/sdks/python/feature-clients/runs.mdx @@ -66,18 +66,19 @@ List task runs according to a set of filters. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | --------------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for filtering task runs. | `None` | -| `only_tasks` | `bool` | Whether to only list task runs. | `False` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `limit` | `int \| None` | The maximum number of task runs to return. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` | -| `until` | `datetime \| None` | The end time for filtering task runs. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` | -| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` | -| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` | -| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | --------------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for filtering task runs. | `None` | +| `only_tasks` | `bool` | Whether to only list task runs. | `False` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `limit` | `int \| None` | The maximum number of task runs to return. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` | +| `until` | `datetime \| None` | The end time for filtering task runs. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` | +| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` | +| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` | +| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: @@ -91,18 +92,19 @@ List task runs according to a set of filters. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | --------------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for filtering task runs. | `None` | -| `only_tasks` | `bool` | Whether to only list task runs. | `False` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `limit` | `int \| None` | The maximum number of task runs to return. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` | -| `until` | `datetime \| None` | The end time for filtering task runs. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` | -| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` | -| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` | -| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | --------------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for filtering task runs. | `None` | +| `only_tasks` | `bool` | Whether to only list task runs. | `False` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `limit` | `int \| None` | The maximum number of task runs to return. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses to filter task runs by. | `None` | +| `until` | `datetime \| None` | The end time for filtering task runs. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata to filter task runs by. | `None` | +| `workflow_ids` | `list[str] \| None` | The workflow IDs to filter task runs by. | `None` | +| `worker_id` | `str \| None` | The worker ID to filter task runs by. | `None` | +| `parent_task_external_id` | `str \| None` | The parent task external ID to filter task runs by. | `None` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: diff --git a/frontend/docs/pages/sdks/python/runnables.mdx b/frontend/docs/pages/sdks/python/runnables.mdx index 396f7dd97..b1e3cf8ac 100644 --- a/frontend/docs/pages/sdks/python/runnables.mdx +++ b/frontend/docs/pages/sdks/python/runnables.mdx @@ -9,7 +9,7 @@ Bases: `BaseWorkflow[TWorkflowInput]` -A Hatchet workflow allows you to define tasks to be run and perform actions on the workflow. +A Hatchet workflow, which allows you to define tasks to be run and perform actions on the workflow. Workflows in Hatchet represent coordinated units of work that can be triggered, scheduled, or run on a cron schedule. Each workflow can contain multiple tasks that can be arranged in dependencies (DAGs), have customized retry behavior, timeouts, concurrency controls, and more. @@ -40,9 +40,9 @@ Workflows support various execution patterns including: - Cron-based recurring execution with `create_cron()` - Bulk operations with `run_many()` -Tasks within workflows can be defined with `@workflow.task()` or `@workflow.durable_task()` decorators and arranged into complex dependency patterns. +Tasks within workflows can be defined with `@workflow.task()` or `@workflow.durable_task()` decorators and can be arranged into complex dependency patterns. -### Methods +Methods: | Name | Description | | ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------ | @@ -91,9 +91,9 @@ Parameters: Returns: -| Type | Description | -| ----------------------------------------------------------------------------- | ------------------------------------------ | -| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | +| Type | Description | +| ------------------------------------------------------------------------------------------------- | ------------------------------------------ | +| `Callable[[Callable[[TWorkflowInput, Context], R \| CoroutineLike[R]]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | #### `durable_task` @@ -123,9 +123,9 @@ Parameters: Returns: -| Type | Description | -| ------------------------------------------------------------------------------------ | ------------------------------------------ | -| `Callable[[Callable[[TWorkflowInput, DurableContext], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | +| Type | Description | +| -------------------------------------------------------------------------------------------------------- | ------------------------------------------ | +| `Callable[[Callable[[TWorkflowInput, DurableContext], R \| CoroutineLike[R]]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | #### `on_failure_task` @@ -146,9 +146,9 @@ Parameters: Returns: -| Type | Description | -| ----------------------------------------------------------------------------- | ------------------------------------------ | -| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | +| Type | Description | +| ------------------------------------------------------------------------------------------------- | ------------------------------------------ | +| `Callable[[Callable[[TWorkflowInput, Context], R \| CoroutineLike[R]]], Task[TWorkflowInput, R]]` | A decorator which creates a `Task` object. | #### `on_success_task` @@ -169,9 +169,9 @@ Parameters: Returns: -| Type | Description | -| ----------------------------------------------------------------------------- | ---------------------------------------- | -| `Callable[[Callable[[TWorkflowInput, Context], R]], Task[TWorkflowInput, R]]` | A decorator which creates a Task object. | +| Type | Description | +| ------------------------------------------------------------------------------------------------- | ---------------------------------------- | +| `Callable[[Callable[[TWorkflowInput, Context], R \| CoroutineLike[R]]], Task[TWorkflowInput, R]]` | A decorator which creates a Task object. | #### `run` @@ -395,17 +395,18 @@ List runs of the workflow. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | ------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | -| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | -| `limit` | `int` | The maximum number of runs to be listed. | `100` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | -| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | -| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | -| `only_tasks` | `bool` | Whether to list only task runs. | `False` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | ------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | +| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | +| `limit` | `int` | The maximum number of runs to be listed. | `100` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | +| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | +| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| `only_tasks` | `bool` | Whether to list only task runs. | `False` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: @@ -419,17 +420,18 @@ List runs of the workflow. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | ------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | -| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | -| `limit` | `int` | The maximum number of runs to be listed. | `100` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | -| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | -| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | -| `only_tasks` | `bool` | Whether to list only task runs. | `False` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | ------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | +| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | +| `limit` | `int` | The maximum number of runs to be listed. | `100` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | +| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | +| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| `only_tasks` | `bool` | Whether to list only task runs. | `False` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: @@ -441,7 +443,7 @@ Returns: Bases: `BaseWorkflow[TWorkflowInput]`, `Generic[TWorkflowInput, R]` -### Methods +Methods: | Name | Description | | ---------------------- | ------------------------------------------------------------------------- | @@ -684,16 +686,17 @@ List runs of the workflow. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | ------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | -| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | -| `limit` | `int` | The maximum number of runs to be listed. | `100` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | -| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | -| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | ------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | +| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | +| `limit` | `int` | The maximum number of runs to be listed. | `100` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | +| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | +| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: @@ -707,16 +710,17 @@ List runs of the workflow. Parameters: -| Name | Type | Description | Default | -| ------------------------- | ---------------------------- | ------------------------------------------- | ------- | -| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | -| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | -| `limit` | `int` | The maximum number of runs to be listed. | `100` | -| `offset` | `int \| None` | The offset for pagination. | `None` | -| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | -| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | -| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | -| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| Name | Type | Description | Default | +| ------------------------------ | ---------------------------- | ------------------------------------------- | ------- | +| `since` | `datetime \| None` | The start time for the runs to be listed. | `None` | +| `until` | `datetime \| None` | The end time for the runs to be listed. | `None` | +| `limit` | `int` | The maximum number of runs to be listed. | `100` | +| `offset` | `int \| None` | The offset for pagination. | `None` | +| `statuses` | `list[V1TaskStatus] \| None` | The statuses of the runs to be listed. | `None` | +| `additional_metadata` | `dict[str, str] \| None` | Additional metadata for filtering the runs. | `None` | +| `worker_id` | `str \| None` | The ID of the worker that ran the tasks. | `None` | +| `parent_task_external_id` | `str \| None` | The external ID of the parent task. | `None` | +| `triggering_event_external_id` | `str \| None` | The event id that triggered the task run. | `None` | Returns: diff --git a/internal/cel/cel.go b/internal/cel/cel.go index 84445d479..fce73e71a 100644 --- a/internal/cel/cel.go +++ b/internal/cel/cel.go @@ -18,6 +18,7 @@ import ( type CELParser struct { workflowStrEnv *cel.Env stepRunEnv *cel.Env + eventEnv *cel.Env } var checksumDecl = decls.NewFunction("checksum", @@ -67,9 +68,20 @@ func NewCELParser() *CELParser { checksum, ) + eventEnv, _ := cel.NewEnv( + cel.Declarations( + decls.NewVar("input", decls.NewMapType(decls.String, decls.Dyn)), + decls.NewVar("additional_metadata", decls.NewMapType(decls.String, decls.Dyn)), + decls.NewVar("payload", decls.NewMapType(decls.String, decls.Dyn)), + decls.NewVar("event_id", decls.String), + checksumDecl, + ), + ) + return &CELParser{ workflowStrEnv: workflowStrEnv, stepRunEnv: stepRunEnv, + eventEnv: eventEnv, } } @@ -101,6 +113,18 @@ func WithWorkflowRunID(workflowRunID string) InputOpts { } } +func WithPayload(payload map[string]interface{}) InputOpts { + return func(w Input) { + w["payload"] = payload + } +} + +func WithEventID(eventID string) InputOpts { + return func(w Input) { + w["event_id"] = eventID + } +} + func NewInput(opts ...InputOpts) Input { res := make(map[string]interface{}) @@ -294,3 +318,29 @@ func (p *CELParser) CheckStepRunOutAgainstKnownV1(out *StepRunOut, knownType sql return nil } + +func (p *CELParser) EvaluateEventExpression(expr string, input Input) (bool, error) { + ast, issues := p.eventEnv.Compile(expr) + + if issues != nil && issues.Err() != nil { + return false, fmt.Errorf("failed to compile expression: %w", issues.Err()) + } + + program, err := p.eventEnv.Program(ast) + if err != nil { + return false, fmt.Errorf("failed to create program: %w", err) + } + + var inMap map[string]interface{} = input + + out, _, err := program.Eval(inMap) + if err != nil { + return false, fmt.Errorf("failed to evaluate expression: %w", err) + } + + if out.Type() != types.BoolType { + return false, fmt.Errorf("expression did not evaluate to a boolean: got %s", out.Type().TypeName()) + } + + return out.Value().(bool), nil +} diff --git a/internal/cel/cel_test.go b/internal/cel/cel_test.go index 8b4401797..4ffa221f6 100644 --- a/internal/cel/cel_test.go +++ b/internal/cel/cel_test.go @@ -93,3 +93,84 @@ func TestCELParser(t *testing.T) { }) } } + +func TestCELParserEventExpression(t *testing.T) { + parser := cel.NewCELParser() + + tests := []struct { + expression string + input cel.Input + expected bool + expectError bool + }{ + { + expression: `has(input.custom.value)`, + input: cel.NewInput( + cel.WithInput(map[string]interface{}{ + "custom": map[string]interface{}{ + "value": "actual value", + }, + }), + ), + expected: true, + expectError: false, + }, + { + expression: `has(input.custom)`, + input: cel.NewInput( + cel.WithInput(map[string]interface{}{}), + ), + expected: false, + expectError: false, + }, + { + expression: `input.custom.value > 314`, + input: cel.NewInput( + cel.WithInput(map[string]interface{}{ + "custom": map[string]interface{}{ + "value": 400, + }, + }), + ), + expected: true, + expectError: false, + }, + { + expression: `input.custom.value < 314`, + input: cel.NewInput( + cel.WithInput(map[string]interface{}{ + "custom": map[string]interface{}{ + "value": 400, + }, + }), + ), + expected: false, + expectError: false, + }, + { + expression: `checksum(input.missing_key)`, // Should throw an error due to missing key + input: cel.NewInput(), + expected: false, + expectError: true, + }, + { + expression: `input.custom.value = 1234`, // Invalid expression (mismatched types), expecting error + input: cel.NewInput(), + expected: false, + expectError: true, + }, + } + + for _, tt := range tests { + t.Run(tt.expression, func(t *testing.T) { + result, err := parser.EvaluateEventExpression(tt.expression, tt.input) + + if tt.expectError { + assert.Error(t, err, "Expected error but got none") + } else { + assert.NoError(t, err, "Did not expect error but got one") + assert.Equal(t, tt.expected, result, "Unexpected result") + } + }) + } +} diff --git a/internal/services/controllers/v1/task/controller.go b/internal/services/controllers/v1/task/controller.go index 35a533e33..b9af30653 100644 --- a/internal/services/controllers/v1/task/controller.go +++ b/internal/services/controllers/v1/task/controller.go @@ -818,6 +818,7 @@ func (tc *TasksControllerImpl) handleProcessUserEventTrigger(ctx context.Context Data: msg.EventData, AdditionalMetadata: msg.EventAdditionalMetadata, Priority: msg.EventPriority, + Scope: msg.EventScope, } opts = append(opts, opt) diff --git a/internal/services/ingestor/contracts/events.pb.go b/internal/services/ingestor/contracts/events.pb.go index d1d65ab33..d061f7a74 100644 --- a/internal/services/ingestor/contracts/events.pb.go +++ b/internal/services/ingestor/contracts/events.pb.go @@ -38,6 +38,8 @@ type Event struct { EventTimestamp *timestamppb.Timestamp `protobuf:"bytes,5,opt,name=eventTimestamp,proto3" json:"eventTimestamp,omitempty"` // the payload for the event AdditionalMetadata *string `protobuf:"bytes,6,opt,name=additionalMetadata,proto3,oneof" json:"additionalMetadata,omitempty"` + // the scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope *string `protobuf:"bytes,7,opt,name=scope,proto3,oneof" json:"scope,omitempty"` } func (x *Event) Reset() { @@ -114,6 +116,13 @@ func (x *Event) GetAdditionalMetadata() string { return "" } +func (x *Event) GetScope() string { + if x != nil && x.Scope != nil { + return *x.Scope + } + return "" +} + type Events struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -466,6 +475,8 @@ type PushEventRequest struct { // metadata for the event AdditionalMetadata *string `protobuf:"bytes,4,opt,name=additionalMetadata,proto3,oneof" json:"additionalMetadata,omitempty"` Priority *int32 `protobuf:"varint,5,opt,name=priority,proto3,oneof" json:"priority,omitempty"` + // the scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope *string `protobuf:"bytes,6,opt,name=scope,proto3,oneof" json:"scope,omitempty"` } func (x *PushEventRequest) Reset() { @@ -535,6 +546,13 @@ func (x *PushEventRequest) GetPriority() int32 { return 0 } +func (x *PushEventRequest) GetScope() string { + if x != nil && x.Scope != nil { + return *x.Scope + } + return "" +} + type ReplayEventRequest struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache @@ -589,7 +607,7 @@ var file_events_proto_rawDesc = []byte{ 0x0a, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, - 0xf9, 0x01, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, + 0x9e, 0x02, 0x0a, 0x05, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x74, 0x65, 0x6e, 0x61, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, @@ -603,85 +621,90 @@ var file_events_proto_rawDesc = []byte{ 0x33, 0x0a, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, - 0x61, 0x88, 0x01, 0x01, 0x42, 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, - 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x28, 0x0a, 0x06, 0x45, - 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, - 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x82, 0x02, 0x0a, 0x0d, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, - 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, - 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, - 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, - 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, - 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x19, 0x0a, 0x05, 0x6c, 0x65, 0x76, - 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x6c, 0x65, 0x76, 0x65, - 0x6c, 0x88, 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, - 0x12, 0x2b, 0x0a, 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, - 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, 0x0e, 0x74, 0x61, 0x73, 0x6b, - 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, 0x01, 0x01, 0x42, 0x08, 0x0a, - 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x42, 0x11, 0x0a, 0x0f, 0x5f, 0x74, 0x61, 0x73, 0x6b, - 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0x10, 0x0a, 0x0e, 0x50, 0x75, - 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xa5, 0x01, 0x0a, - 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, - 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, - 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, - 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, - 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, - 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x12, 0x18, - 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, - 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x74, 0x61, - 0x64, 0x61, 0x74, 0x61, 0x22, 0x18, 0x0a, 0x16, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, - 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x41, - 0x0a, 0x14, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, - 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, - 0x73, 0x22, 0xfc, 0x01, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, - 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, - 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, - 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, - 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, - 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x54, 0x69, 0x6d, - 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x33, 0x0a, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, - 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, - 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, - 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, 0x12, 0x1f, 0x0a, 0x08, 0x70, - 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, - 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, 0x01, 0x42, 0x15, 0x0a, 0x13, + 0x61, 0x88, 0x01, 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x07, 0x20, + 0x01, 0x28, 0x09, 0x48, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, + 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, + 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, + 0x22, 0x28, 0x0a, 0x06, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1e, 0x0a, 0x06, 0x65, 0x76, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x06, 0x2e, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x82, 0x02, 0x0a, 0x0d, 0x50, + 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, + 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x09, 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, + 0x65, 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, + 0x65, 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x19, + 0x0a, 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, + 0x05, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x88, 0x01, 0x01, 0x12, 0x1a, 0x0a, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x6d, 0x65, 0x74, + 0x61, 0x64, 0x61, 0x74, 0x61, 0x12, 0x2b, 0x0a, 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, + 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x05, 0x48, 0x01, 0x52, + 0x0e, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x88, + 0x01, 0x01, 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x6c, 0x65, 0x76, 0x65, 0x6c, 0x42, 0x11, 0x0a, 0x0f, + 0x5f, 0x74, 0x61, 0x73, 0x6b, 0x52, 0x65, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x22, + 0x10, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, + 0x65, 0x22, 0xa5, 0x01, 0x0a, 0x15, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1c, 0x0a, 0x09, 0x73, + 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, + 0x73, 0x74, 0x65, 0x70, 0x52, 0x75, 0x6e, 0x49, 0x64, 0x12, 0x38, 0x0a, 0x09, 0x63, 0x72, 0x65, + 0x61, 0x74, 0x65, 0x64, 0x41, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, + 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, + 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x09, 0x63, 0x72, 0x65, 0x61, 0x74, 0x65, + 0x64, 0x41, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x1a, 0x0a, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, + 0x08, 0x6d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x22, 0x18, 0x0a, 0x16, 0x50, 0x75, 0x74, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x41, 0x0a, 0x14, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x29, 0x0a, 0x06, 0x65, + 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x50, 0x75, + 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x52, 0x06, + 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0xa1, 0x02, 0x0a, 0x10, 0x50, 0x75, 0x73, 0x68, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x6b, + 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x18, 0x0a, + 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, + 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x42, 0x0a, 0x0e, 0x65, 0x76, 0x65, 0x6e, 0x74, + 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, + 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0e, 0x65, 0x76, 0x65, + 0x6e, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x33, 0x0a, 0x12, 0x61, + 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, + 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x12, 0x61, 0x64, 0x64, 0x69, 0x74, + 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x88, 0x01, 0x01, + 0x12, 0x1f, 0x0a, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x18, 0x05, 0x20, 0x01, + 0x28, 0x05, 0x48, 0x01, 0x52, 0x08, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x88, 0x01, + 0x01, 0x12, 0x19, 0x0a, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, + 0x48, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x88, 0x01, 0x01, 0x42, 0x15, 0x0a, 0x13, 0x5f, 0x61, 0x64, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x61, 0x6c, 0x4d, 0x65, 0x74, 0x61, 0x64, 0x61, 0x74, 0x61, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x70, 0x72, 0x69, 0x6f, 0x72, 0x69, 0x74, 0x79, - 0x22, 0x2e, 0x0a, 0x12, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, - 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, - 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, - 0x32, 0x88, 0x02, 0x0a, 0x0d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, - 0x63, 0x65, 0x12, 0x23, 0x0a, 0x04, 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, - 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, - 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2c, 0x0a, 0x08, 0x42, 0x75, 0x6c, 0x6b, 0x50, - 0x75, 0x73, 0x68, 0x12, 0x15, 0x2e, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, - 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x07, 0x2e, 0x45, 0x76, 0x65, - 0x6e, 0x74, 0x73, 0x22, 0x00, 0x12, 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, - 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, - 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, - 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, 0x74, - 0x4c, 0x6f, 0x67, 0x12, 0x0e, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, - 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x43, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, - 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x16, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, - 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, - 0x1a, 0x17, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, - 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x45, 0x5a, 0x43, 0x67, - 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, - 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, - 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, - 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, 0x6f, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, - 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x42, 0x08, 0x0a, 0x06, 0x5f, 0x73, 0x63, 0x6f, 0x70, 0x65, 0x22, 0x2e, 0x0a, 0x12, 0x52, 0x65, + 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, + 0x12, 0x18, 0x0a, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x09, 0x52, 0x07, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x32, 0x88, 0x02, 0x0a, 0x0d, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x73, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x23, 0x0a, 0x04, + 0x50, 0x75, 0x73, 0x68, 0x12, 0x11, 0x2e, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, + 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x22, + 0x00, 0x12, 0x2c, 0x0a, 0x08, 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x12, 0x15, 0x2e, + 0x42, 0x75, 0x6c, 0x6b, 0x50, 0x75, 0x73, 0x68, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, + 0x75, 0x65, 0x73, 0x74, 0x1a, 0x07, 0x2e, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x00, 0x12, + 0x32, 0x0a, 0x11, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x53, 0x69, 0x6e, 0x67, 0x6c, 0x65, 0x45, + 0x76, 0x65, 0x6e, 0x74, 0x12, 0x13, 0x2e, 0x52, 0x65, 0x70, 0x6c, 0x61, 0x79, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x06, 0x2e, 0x45, 0x76, 0x65, 0x6e, + 0x74, 0x22, 0x00, 0x12, 0x2b, 0x0a, 0x06, 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x12, 0x0e, 0x2e, + 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0f, 0x2e, + 0x50, 0x75, 0x74, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, + 0x12, 0x43, 0x0a, 0x0e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, + 0x6e, 0x74, 0x12, 0x16, 0x2e, 0x50, 0x75, 0x74, 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, + 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e, 0x50, 0x75, 0x74, + 0x53, 0x74, 0x72, 0x65, 0x61, 0x6d, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, + 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x45, 0x5a, 0x43, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, + 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2d, 0x64, 0x65, 0x76, 0x2f, + 0x68, 0x61, 0x74, 0x63, 0x68, 0x65, 0x74, 0x2f, 0x69, 0x6e, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x6c, + 0x2f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x73, 0x2f, 0x69, 0x6e, 0x67, 0x65, 0x73, 0x74, + 0x6f, 0x72, 0x2f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x61, 0x63, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, + 0x6f, 0x74, 0x6f, 0x33, } var ( diff --git a/internal/services/ingestor/ingestor.go b/internal/services/ingestor/ingestor.go index c02f74898..3ee6bc0c0 100644 --- a/internal/services/ingestor/ingestor.go +++ b/internal/services/ingestor/ingestor.go @@ -22,7 +22,7 @@ import ( type Ingestor interface { contracts.EventsServiceServer - IngestEvent(ctx context.Context, tenant *dbsqlc.Tenant, eventName string, data []byte, metadata []byte, priority *int32) (*dbsqlc.Event, error) + IngestEvent(ctx context.Context, tenant *dbsqlc.Tenant, eventName string, data []byte, metadata []byte, priority *int32, scope *string) (*dbsqlc.Event, error) BulkIngestEvent(ctx context.Context, tenant *dbsqlc.Tenant, eventOpts []*repository.CreateEventOpts) ([]*dbsqlc.Event, error) IngestReplayedEvent(ctx context.Context, tenant *dbsqlc.Tenant, replayedEvent *dbsqlc.Event) (*dbsqlc.Event, error) } @@ -163,12 +163,12 @@ func NewIngestor(fs ...IngestorOptFunc) (Ingestor, error) { }, nil } -func (i *IngestorImpl) IngestEvent(ctx context.Context, tenant *dbsqlc.Tenant, key string, data []byte, metadata []byte, priority *int32) (*dbsqlc.Event, error) { +func (i *IngestorImpl) IngestEvent(ctx context.Context, tenant *dbsqlc.Tenant, key string, data []byte, metadata []byte, priority *int32, scope *string) (*dbsqlc.Event, error) { switch tenant.Version { case dbsqlc.TenantMajorEngineVersionV0: return i.ingestEventV0(ctx, tenant, key, data, metadata) case dbsqlc.TenantMajorEngineVersionV1: - return i.ingestEventV1(ctx, tenant, key, data, metadata, priority) + return i.ingestEventV1(ctx, tenant, key, data, metadata, priority, scope) default: return nil, fmt.Errorf("unsupported tenant version: %s", tenant.Version) } diff --git a/internal/services/ingestor/ingestor_v1.go b/internal/services/ingestor/ingestor_v1.go index 917643ade..e8fb02c88 100644 --- a/internal/services/ingestor/ingestor_v1.go +++ b/internal/services/ingestor/ingestor_v1.go @@ -25,7 +25,7 @@ type EventResult struct { AdditionalMetadata string } -func (i *IngestorImpl) ingestEventV1(ctx context.Context, tenant *dbsqlc.Tenant, key string, data []byte, metadata []byte, priority *int32) (*dbsqlc.Event, error) { +func (i *IngestorImpl) ingestEventV1(ctx context.Context, tenant *dbsqlc.Tenant, key string, data []byte, metadata []byte, priority *int32, scope *string) (*dbsqlc.Event, error) { ctx, span := telemetry.NewSpan(ctx, "ingest-event") defer span.End() @@ -49,10 +49,10 @@ func (i *IngestorImpl) ingestEventV1(ctx context.Context, tenant *dbsqlc.Tenant, ) } - return i.ingestSingleton(tenantId, key, data, metadata, priority) + return i.ingestSingleton(tenantId, key, data, metadata, priority, scope) } -func (i *IngestorImpl) ingestSingleton(tenantId, key string, data []byte, metadata []byte, priority *int32) (*dbsqlc.Event, error) { +func (i *IngestorImpl) ingestSingleton(tenantId, key string, data []byte, metadata []byte, priority *int32, scope *string) (*dbsqlc.Event, error) { eventId := uuid.New().String() msg, err := eventToTaskV1( @@ -62,6 +62,7 @@ func (i *IngestorImpl) ingestSingleton(tenantId, key string, data []byte, metada data, metadata, priority, + scope, ) if err != nil { @@ -116,7 +117,7 @@ func (i *IngestorImpl) bulkIngestEventV1(ctx context.Context, tenant *dbsqlc.Ten results := make([]*dbsqlc.Event, 0, len(eventOpts)) for _, event := range eventOpts { - res, err := i.ingestSingleton(tenantId, event.Key, event.Data, event.AdditionalMetadata, event.Priority) + res, err := i.ingestSingleton(tenantId, event.Key, event.Data, event.AdditionalMetadata, event.Priority, event.Scope) if err != nil { return nil, fmt.Errorf("could not ingest event: %w", err) @@ -134,16 +135,17 @@ func (i *IngestorImpl) ingestReplayedEventV1(ctx context.Context, tenant *dbsqlc tenantId := sqlchelpers.UUIDToStr(tenant.ID) - return i.ingestSingleton(tenantId, replayedEvent.Key, replayedEvent.Data, replayedEvent.AdditionalMetadata, nil) + return i.ingestSingleton(tenantId, replayedEvent.Key, replayedEvent.Data, replayedEvent.AdditionalMetadata, nil, nil) } -func eventToTaskV1(tenantId, eventExternalId, key string, data, additionalMeta []byte, priority *int32) (*msgqueue.Message, error) { +func eventToTaskV1(tenantId, eventExternalId, key string, data, additionalMeta []byte, priority *int32, scope *string) (*msgqueue.Message, error) { payloadTyped := tasktypes.UserEventTaskPayload{ EventExternalId: eventExternalId, EventKey: key, EventData: data, EventAdditionalMetadata: additionalMeta, EventPriority: priority, + EventScope: scope, } return msgqueue.NewTenantMessage( diff --git a/internal/services/ingestor/server.go b/internal/services/ingestor/server.go index 6c9bceb7b..f1868b4fb 100644 --- a/internal/services/ingestor/server.go +++ b/internal/services/ingestor/server.go @@ -27,7 +27,7 @@ func (i *IngestorImpl) Push(ctx context.Context, req *contracts.PushEventRequest if req.AdditionalMetadata != nil { additionalMeta = []byte(*req.AdditionalMetadata) } - event, err := i.IngestEvent(ctx, tenant, req.Key, []byte(req.Payload), additionalMeta, req.Priority) + event, err := i.IngestEvent(ctx, tenant, req.Key, []byte(req.Payload), additionalMeta, req.Priority, req.Scope) if err == metered.ErrResourceExhausted { return nil, status.Errorf(codes.ResourceExhausted, "resource exhausted: event limit exceeded for tenant") @@ -73,6 +73,7 @@ func (i *IngestorImpl) BulkPush(ctx context.Context, req *contracts.BulkPushEven Data: []byte(e.Payload), AdditionalMetadata: additionalMeta, Priority: e.Priority, + Scope: e.Scope, }) } @@ -276,12 +277,20 @@ func toEvent(e *dbsqlc.Event) (*contracts.Event, error) { tenantId := sqlchelpers.UUIDToStr(e.TenantId) eventId := sqlchelpers.UUIDToStr(e.ID) + var additionalMeta *string + + if e.AdditionalMetadata != nil { + additionalMetaStr := string(e.AdditionalMetadata) + additionalMeta = &additionalMetaStr + } + return &contracts.Event{ - TenantId: tenantId, - EventId: eventId, - Key: e.Key, - Payload: string(e.Data), - EventTimestamp: timestamppb.New(e.CreatedAt.Time), + TenantId: tenantId, + EventId: eventId, + Key: e.Key, + Payload: string(e.Data), + EventTimestamp: timestamppb.New(e.CreatedAt.Time), + AdditionalMetadata: additionalMeta, }, nil } diff --git a/internal/services/shared/tasktypes/v1/event.go b/internal/services/shared/tasktypes/v1/event.go index f9afb9e73..dfb74890a 100644 --- a/internal/services/shared/tasktypes/v1/event.go +++ b/internal/services/shared/tasktypes/v1/event.go @@ -8,11 +8,12 @@ import ( ) type UserEventTaskPayload struct { - EventExternalId string `json:"event_id" validate:"required,uuid"` - EventKey string `json:"event_key" validate:"required"` - EventData []byte `json:"event_data" validate:"required"` - EventAdditionalMetadata []byte `json:"event_additional_metadata"` - EventPriority *int32 `json:"event_priority,omitempty"` + EventExternalId string `json:"event_id" validate:"required,uuid"` + EventKey string `json:"event_key" validate:"required"` + EventData []byte `json:"event_data" validate:"required"` + EventAdditionalMetadata []byte `json:"event_additional_metadata"` + EventPriority *int32 `json:"event_priority,omitempty"` + EventScope *string `json:"event_scope,omitempty"` } func NewInternalEventMessage(tenantId string, timestamp time.Time, events ...v1.InternalTaskEvent) (*msgqueue.Message, error) { diff --git a/pkg/client/event.go b/pkg/client/event.go index 8cf63371a..79f33b97a 100644 --- a/pkg/client/event.go +++ b/pkg/client/event.go @@ -16,6 +16,8 @@ import ( type pushOpt struct { additionalMetadata map[string]string + priority *int32 + scope *string } type PushOpFunc func(*pushOpt) error @@ -36,6 +38,8 @@ type EventWithAdditionalMetadata struct { Event interface{} `json:"event"` AdditionalMetadata map[string]string `json:"metadata"` Key string `json:"key"` + Priority *int32 `json:"priority"` + Scope *string `json:"scope"` } type eventClientImpl struct { @@ -74,6 +78,20 @@ func WithEventMetadata(metadata map[string]string) PushOpFunc { } } +func WithEventPriority(priority *int32) PushOpFunc { + return func(r *pushOpt) error { + r.priority = priority + return nil + } +} + +func WithFilterScope(scope *string) PushOpFunc { + return func(r *pushOpt) error { + r.scope = scope + return nil + } +} + func (a *eventClientImpl) Push(ctx context.Context, eventKey string, payload interface{}, options ...PushOpFunc) error { key := client.ApplyNamespace(eventKey, &a.namespace) @@ -108,6 +126,8 @@ func (a *eventClientImpl) Push(ctx context.Context, eventKey string, payload int additionalMetaString := string(additionalMetaBytes) request.AdditionalMetadata = &additionalMetaString + request.Priority = opts.priority + request.Scope = opts.scope _, err = a.client.Push(a.ctx.newContext(ctx), &request) @@ -141,6 +161,8 @@ func (a *eventClientImpl) BulkPush(ctx context.Context, payload []EventWithAddit EventTimestamp: timestamppb.Now(), Payload: string(ePayload), AdditionalMetadata: &eMetadataString, + Priority: p.Priority, + Scope: p.Scope, }) } diff --git a/pkg/client/rest/gen.go b/pkg/client/rest/gen.go index e724a8f8e..0760f8c55 100644 --- a/pkg/client/rest/gen.go +++ b/pkg/client/rest/gen.go @@ -426,6 +426,9 @@ type CreateEventRequest struct { // Priority The priority of the event. Priority *int32 `json:"priority,omitempty"` + + // Scope The scope for event filtering. + Scope *string `json:"scope,omitempty"` } // CreateSNSIntegrationRequest defines model for CreateSNSIntegrationRequest. @@ -1191,6 +1194,21 @@ type V1CancelTaskRequest struct { Filter *V1TaskFilter `json:"filter,omitempty"` } +// V1CreateFilterRequest defines model for V1CreateFilterRequest. +type V1CreateFilterRequest struct { + // Expression The expression for the filter + Expression string `json:"expression"` + + // Payload The payload for the filter + Payload *map[string]interface{} `json:"payload,omitempty"` + + // Scope The scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope string `json:"scope"` + + // WorkflowId The workflow id + WorkflowId openapi_types.UUID `json:"workflowId"` +} + // V1DagChildren defines model for V1DagChildren. type V1DagChildren struct { Children *[]V1TaskSummary `json:"children,omitempty"` @@ -1236,6 +1254,31 @@ type V1EventWorkflowRunSummary struct { Succeeded int64 `json:"succeeded"` } +// V1Filter defines model for V1Filter. +type V1Filter struct { + // Expression The expression associated with this filter. + Expression string `json:"expression"` + Metadata APIResourceMeta `json:"metadata"` + + // Payload Additional payload data associated with the filter + Payload map[string]interface{} `json:"payload"` + + // Scope The scope associated with this filter. Used for subsetting candidate filters at evaluation time + Scope string `json:"scope"` + + // TenantId The ID of the tenant associated with this filter. + TenantId string `json:"tenantId"` + + // WorkflowId The workflow id associated with this filter. + WorkflowId openapi_types.UUID `json:"workflowId"` +} + +// V1FilterList defines model for V1FilterList. +type V1FilterList struct { + Pagination *PaginationResponse `json:"pagination,omitempty"` + Rows *[]V1Filter `json:"rows,omitempty"` +} + // V1LogLine defines model for V1LogLine. type V1LogLine struct { // Attempt The attempt number of the log line. @@ -1950,6 +1993,21 @@ type V1EventListParams struct { Keys *[]EventKey `form:"keys,omitempty" json:"keys,omitempty"` } +// V1FilterListParams defines parameters for V1FilterList. +type V1FilterListParams struct { + // Offset The number to skip + Offset *int64 `form:"offset,omitempty" json:"offset,omitempty"` + + // Limit The number to limit by + Limit *int64 `form:"limit,omitempty" json:"limit,omitempty"` + + // WorkflowIds The workflow ids to filter by + WorkflowIds *[]openapi_types.UUID `form:"workflowIds,omitempty" json:"workflowIds,omitempty"` + + // Scopes The scopes to subset candidate filters by + Scopes *[]string `form:"scopes,omitempty" json:"scopes,omitempty"` +} + // V1TaskListStatusMetricsParams defines parameters for V1TaskListStatusMetrics. type V1TaskListStatusMetricsParams struct { // Since The start time to get metrics for @@ -2304,6 +2362,9 @@ type WorkflowVersionGetParams struct { // AlertEmailGroupUpdateJSONRequestBody defines body for AlertEmailGroupUpdate for application/json ContentType. type AlertEmailGroupUpdateJSONRequestBody = UpdateTenantAlertEmailGroupRequest +// V1FilterCreateJSONRequestBody defines body for V1FilterCreate for application/json ContentType. +type V1FilterCreateJSONRequestBody = V1CreateFilterRequest + // V1TaskCancelJSONRequestBody defines body for V1TaskCancel for application/json ContentType. type V1TaskCancelJSONRequestBody = V1CancelTaskRequest @@ -2520,6 +2581,20 @@ type ClientInterface interface { // V1EventList request V1EventList(ctx context.Context, tenant openapi_types.UUID, params *V1EventListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1FilterList request + V1FilterList(ctx context.Context, tenant openapi_types.UUID, params *V1FilterListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1FilterCreateWithBody request with any body + V1FilterCreateWithBody(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + V1FilterCreate(ctx context.Context, tenant openapi_types.UUID, body V1FilterCreateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1FilterDelete request + V1FilterDelete(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) + + // V1FilterGet request + V1FilterGet(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) + // V1TaskListStatusMetrics request V1TaskListStatusMetrics(ctx context.Context, tenant openapi_types.UUID, params *V1TaskListStatusMetricsParams, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -3087,6 +3162,66 @@ func (c *Client) V1EventList(ctx context.Context, tenant openapi_types.UUID, par return c.Client.Do(req) } +func (c *Client) V1FilterList(ctx context.Context, tenant openapi_types.UUID, params *V1FilterListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1FilterListRequest(c.Server, tenant, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1FilterCreateWithBody(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1FilterCreateRequestWithBody(c.Server, tenant, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1FilterCreate(ctx context.Context, tenant openapi_types.UUID, body V1FilterCreateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1FilterCreateRequest(c.Server, tenant, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1FilterDelete(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1FilterDeleteRequest(c.Server, tenant, v1Filter) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) V1FilterGet(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewV1FilterGetRequest(c.Server, tenant, v1Filter) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) V1TaskListStatusMetrics(ctx context.Context, tenant openapi_types.UUID, params *V1TaskListStatusMetricsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewV1TaskListStatusMetricsRequest(c.Server, tenant, params) if err != nil { @@ -5271,6 +5406,239 @@ func NewV1EventListRequest(server string, tenant openapi_types.UUID, params *V1E return req, nil } +// NewV1FilterListRequest generates requests for V1FilterList +func NewV1FilterListRequest(server string, tenant openapi_types.UUID, params *V1FilterListParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "tenant", runtime.ParamLocationPath, tenant) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1/stable/tenants/%s/filters", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Offset != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "offset", runtime.ParamLocationQuery, *params.Offset); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Limit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "limit", runtime.ParamLocationQuery, *params.Limit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WorkflowIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "workflowIds", runtime.ParamLocationQuery, *params.WorkflowIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Scopes != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "scopes", runtime.ParamLocationQuery, *params.Scopes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewV1FilterCreateRequest calls the generic V1FilterCreate builder with application/json body +func NewV1FilterCreateRequest(server string, tenant openapi_types.UUID, body V1FilterCreateJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewV1FilterCreateRequestWithBody(server, tenant, "application/json", bodyReader) +} + +// NewV1FilterCreateRequestWithBody generates requests for V1FilterCreate with any type of body +func NewV1FilterCreateRequestWithBody(server string, tenant openapi_types.UUID, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "tenant", runtime.ParamLocationPath, tenant) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1/stable/tenants/%s/filters", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewV1FilterDeleteRequest generates requests for V1FilterDelete +func NewV1FilterDeleteRequest(server string, tenant openapi_types.UUID, v1Filter openapi_types.UUID) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "tenant", runtime.ParamLocationPath, tenant) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "v1-filter", runtime.ParamLocationPath, v1Filter) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1/stable/tenants/%s/filters/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewV1FilterGetRequest generates requests for V1FilterGet +func NewV1FilterGetRequest(server string, tenant openapi_types.UUID, v1Filter openapi_types.UUID) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "tenant", runtime.ParamLocationPath, tenant) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "v1-filter", runtime.ParamLocationPath, v1Filter) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/v1/stable/tenants/%s/filters/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewV1TaskListStatusMetricsRequest generates requests for V1TaskListStatusMetrics func NewV1TaskListStatusMetricsRequest(server string, tenant openapi_types.UUID, params *V1TaskListStatusMetricsParams) (*http.Request, error) { var err error @@ -10457,6 +10825,20 @@ type ClientWithResponsesInterface interface { // V1EventListWithResponse request V1EventListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1EventListParams, reqEditors ...RequestEditorFn) (*V1EventListResponse, error) + // V1FilterListWithResponse request + V1FilterListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1FilterListParams, reqEditors ...RequestEditorFn) (*V1FilterListResponse, error) + + // V1FilterCreateWithBodyWithResponse request with any body + V1FilterCreateWithBodyWithResponse(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1FilterCreateResponse, error) + + V1FilterCreateWithResponse(ctx context.Context, tenant openapi_types.UUID, body V1FilterCreateJSONRequestBody, reqEditors ...RequestEditorFn) (*V1FilterCreateResponse, error) + + // V1FilterDeleteWithResponse request + V1FilterDeleteWithResponse(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1FilterDeleteResponse, error) + + // V1FilterGetWithResponse request + V1FilterGetWithResponse(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1FilterGetResponse, error) + // V1TaskListStatusMetricsWithResponse request V1TaskListStatusMetricsWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1TaskListStatusMetricsParams, reqEditors ...RequestEditorFn) (*V1TaskListStatusMetricsResponse, error) @@ -11232,6 +11614,104 @@ func (r V1EventListResponse) StatusCode() int { return 0 } +type V1FilterListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1FilterList + JSON400 *APIErrors + JSON403 *APIErrors +} + +// Status returns HTTPResponse.Status +func (r V1FilterListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1FilterListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1FilterCreateResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1Filter + JSON400 *APIErrors + JSON403 *APIErrors + JSON404 *APIErrors +} + +// Status returns HTTPResponse.Status +func (r V1FilterCreateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1FilterCreateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1FilterDeleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1Filter + JSON400 *APIErrors + JSON403 *APIErrors + JSON404 *APIErrors +} + +// Status returns HTTPResponse.Status +func (r V1FilterDeleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1FilterDeleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type V1FilterGetResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *V1Filter + JSON400 *APIErrors + JSON403 *APIErrors +} + +// Status returns HTTPResponse.Status +func (r V1FilterGetResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r V1FilterGetResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + type V1TaskListStatusMetricsResponse struct { Body []byte HTTPResponse *http.Response @@ -13596,6 +14076,50 @@ func (c *ClientWithResponses) V1EventListWithResponse(ctx context.Context, tenan return ParseV1EventListResponse(rsp) } +// V1FilterListWithResponse request returning *V1FilterListResponse +func (c *ClientWithResponses) V1FilterListWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1FilterListParams, reqEditors ...RequestEditorFn) (*V1FilterListResponse, error) { + rsp, err := c.V1FilterList(ctx, tenant, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1FilterListResponse(rsp) +} + +// V1FilterCreateWithBodyWithResponse request with arbitrary body returning *V1FilterCreateResponse +func (c *ClientWithResponses) V1FilterCreateWithBodyWithResponse(ctx context.Context, tenant openapi_types.UUID, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*V1FilterCreateResponse, error) { + rsp, err := c.V1FilterCreateWithBody(ctx, tenant, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1FilterCreateResponse(rsp) +} + +func (c *ClientWithResponses) V1FilterCreateWithResponse(ctx context.Context, tenant openapi_types.UUID, body V1FilterCreateJSONRequestBody, reqEditors ...RequestEditorFn) (*V1FilterCreateResponse, error) { + rsp, err := c.V1FilterCreate(ctx, tenant, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1FilterCreateResponse(rsp) +} + +// V1FilterDeleteWithResponse request returning *V1FilterDeleteResponse +func (c *ClientWithResponses) V1FilterDeleteWithResponse(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1FilterDeleteResponse, error) { + rsp, err := c.V1FilterDelete(ctx, tenant, v1Filter, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1FilterDeleteResponse(rsp) +} + +// V1FilterGetWithResponse request returning *V1FilterGetResponse +func (c *ClientWithResponses) V1FilterGetWithResponse(ctx context.Context, tenant openapi_types.UUID, v1Filter openapi_types.UUID, reqEditors ...RequestEditorFn) (*V1FilterGetResponse, error) { + rsp, err := c.V1FilterGet(ctx, tenant, v1Filter, reqEditors...) + if err != nil { + return nil, err + } + return ParseV1FilterGetResponse(rsp) +} + // V1TaskListStatusMetricsWithResponse request returning *V1TaskListStatusMetricsResponse func (c *ClientWithResponses) V1TaskListStatusMetricsWithResponse(ctx context.Context, tenant openapi_types.UUID, params *V1TaskListStatusMetricsParams, reqEditors ...RequestEditorFn) (*V1TaskListStatusMetricsResponse, error) { rsp, err := c.V1TaskListStatusMetrics(ctx, tenant, params, reqEditors...) @@ -15328,6 +15852,180 @@ func ParseV1EventListResponse(rsp *http.Response) (*V1EventListResponse, error) return response, nil } +// ParseV1FilterListResponse parses an HTTP response from a V1FilterListWithResponse call +func ParseV1FilterListResponse(rsp *http.Response) (*V1FilterListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1FilterListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1FilterList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + } + + return response, nil +} + +// ParseV1FilterCreateResponse parses an HTTP response from a V1FilterCreateWithResponse call +func ParseV1FilterCreateResponse(rsp *http.Response) (*V1FilterCreateResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1FilterCreateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1Filter + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil +} + +// ParseV1FilterDeleteResponse parses an HTTP response from a V1FilterDeleteWithResponse call +func ParseV1FilterDeleteResponse(rsp *http.Response) (*V1FilterDeleteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1FilterDeleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1Filter + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil +} + +// ParseV1FilterGetResponse parses an HTTP response from a V1FilterGetWithResponse call +func ParseV1FilterGetResponse(rsp *http.Response) (*V1FilterGetResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &V1FilterGetResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest V1Filter + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APIErrors + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + } + + return response, nil +} + // ParseV1TaskListStatusMetricsResponse parses an HTTP response from a V1TaskListStatusMetricsWithResponse call func ParseV1TaskListStatusMetricsResponse(rsp *http.Response) (*V1TaskListStatusMetricsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) diff --git a/pkg/repository/event.go b/pkg/repository/event.go index ac07c532b..1774a649f 100644 --- a/pkg/repository/event.go +++ b/pkg/repository/event.go @@ -30,6 +30,9 @@ type CreateEventOpts struct { // (optional) the event priority Priority *int32 `validate:"omitempty,min=1,max=3"` + + // (optional) the event scope + Scope *string `validate:"omitempty"` } type ListEventOpts struct { diff --git a/pkg/repository/postgres/event.go b/pkg/repository/postgres/event.go index 9bebb11bf..d12725f8e 100644 --- a/pkg/repository/postgres/event.go +++ b/pkg/repository/postgres/event.go @@ -272,6 +272,8 @@ func (r *eventEngineRepository) CreateEvent(ctx context.Context, opts *repositor Data: opts.Data, AdditionalMetadata: opts.AdditionalMetadata, ReplayedEvent: opts.ReplayedEvent, + Priority: opts.Priority, + Scope: opts.Scope, } event, err := r.bulkUserEventBuffer.FireAndWait(ctx, opts.TenantId, &createOpts) diff --git a/pkg/repository/v1/filters.go b/pkg/repository/v1/filters.go new file mode 100644 index 000000000..0fddc7e92 --- /dev/null +++ b/pkg/repository/v1/filters.go @@ -0,0 +1,78 @@ +package v1 + +import ( + "context" + + "github.com/hatchet-dev/hatchet/pkg/repository/postgres/sqlchelpers" + "github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1" + "github.com/jackc/pgx/v5/pgtype" +) + +type FilterRepository interface { + CreateFilter(ctx context.Context, tenantId string, params CreateFilterOpts) (*sqlcv1.V1Filter, error) + ListFilters(ctx context.Context, tenantId string, params ListFiltersOpts) ([]*sqlcv1.V1Filter, error) + DeleteFilter(ctx context.Context, tenantId, filterId string) (*sqlcv1.V1Filter, error) + GetFilter(ctx context.Context, tenantId, filterId string) (*sqlcv1.V1Filter, error) +} + +type filterRepository struct { + *sharedRepository +} + +func newFilterRepository(shared *sharedRepository) FilterRepository { + return &filterRepository{ + sharedRepository: shared, + } +} + +type CreateFilterOpts struct { + Workflowid pgtype.UUID `json:"workflowid" validate:"required,uuid"` + Scope string `json:"scope" validate:"required"` + Expression string `json:"expression" validate:"required"` + Payload []byte `json:"payload"` +} + +func (r *filterRepository) CreateFilter(ctx context.Context, tenantId string, opts CreateFilterOpts) (*sqlcv1.V1Filter, error) { + return r.queries.CreateFilter(ctx, r.pool, sqlcv1.CreateFilterParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + Workflowid: opts.Workflowid, + Scope: opts.Scope, + Expression: opts.Expression, + Payload: opts.Payload, + }) +} + +type ListFiltersOpts struct { + WorkflowIds []pgtype.UUID `json:"workflow_ids" validate:"required,dive,uuid"` + Scopes []*string `json:"scopes"` + FilterLimit *int64 `json:"limit" validate:"omitnil,min=1"` + FilterOffset *int64 `json:"offset" validate:"omitnil,min=0"` +} + +func (r *filterRepository) ListFilters(ctx context.Context, tenantId string, opts ListFiltersOpts) ([]*sqlcv1.V1Filter, error) { + if err := r.v.Validate(opts); err != nil { + return nil, err + } + + return r.queries.ListFilters(ctx, r.pool, sqlcv1.ListFiltersParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + Workflowids: opts.WorkflowIds, + Scopes: opts.Scopes, + FilterLimit: opts.FilterLimit, + FilterOffset: opts.FilterOffset, + }) +} + +func (r *filterRepository) DeleteFilter(ctx context.Context, tenantId, filterId string) (*sqlcv1.V1Filter, error) { + return r.queries.DeleteFilter(ctx, r.pool, sqlcv1.DeleteFilterParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + ID: sqlchelpers.UUIDFromStr(filterId), + }) +} + +func (r *filterRepository) GetFilter(ctx context.Context, tenantId, filterId string) (*sqlcv1.V1Filter, error) { + return r.queries.GetFilter(ctx, r.pool, sqlcv1.GetFilterParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + ID: sqlchelpers.UUIDFromStr(filterId), + }) +} diff --git a/pkg/repository/v1/input.go b/pkg/repository/v1/input.go index 755cda0ef..c1ec72d37 100644 --- a/pkg/repository/v1/input.go +++ b/pkg/repository/v1/input.go @@ -8,6 +8,8 @@ type TaskInput struct { Input map[string]interface{} `json:"input"` TriggerData *MatchData `json:"trigger_datas"` + + FilterPayload map[string]interface{} `json:"filter_payload"` } func (s *sharedRepository) DesiredWorkerId(t *TaskInput) *string { @@ -34,7 +36,7 @@ func (s *sharedRepository) newTaskInputFromExistingBytes(inputBytes []byte) *Tas return i } -func (s *sharedRepository) newTaskInput(inputBytes []byte, triggerData *MatchData) *TaskInput { +func (s *sharedRepository) newTaskInput(inputBytes []byte, triggerData *MatchData, filterPayload []byte) *TaskInput { var input map[string]interface{} if len(inputBytes) > 0 { @@ -45,9 +47,18 @@ func (s *sharedRepository) newTaskInput(inputBytes []byte, triggerData *MatchDat } } + var filterPayloadMap map[string]interface{} + if len(filterPayload) > 0 { + err := json.Unmarshal(filterPayload, &filterPayloadMap) + if err != nil { + s.l.Error().Err(err).Msg("failed to unmarshal event filter payload bytes") + } + } + return &TaskInput{ - Input: input, - TriggerData: triggerData, + Input: input, + TriggerData: triggerData, + FilterPayload: filterPayloadMap, } } @@ -101,6 +112,8 @@ func (s *sharedRepository) ToV1StepRunData(t *TaskInput) *V1StepRunData { } } + triggers["filter_payload"] = t.FilterPayload + return &V1StepRunData{ Input: t.Input, TriggeredBy: "manual", diff --git a/pkg/repository/v1/match.go b/pkg/repository/v1/match.go index 53168a783..403434801 100644 --- a/pkg/repository/v1/match.go +++ b/pkg/repository/v1/match.go @@ -485,7 +485,7 @@ func (m *sharedRepository) processEventMatches(ctx context.Context, tx sqlcv1.DB switch matchData.Action() { case sqlcv1.V1MatchConditionActionQUEUE: - opt.Input = m.newTaskInput(input, matchData) + opt.Input = m.newTaskInput(input, matchData, nil) opt.InitialState = sqlcv1.V1TaskInitialStateQUEUED case sqlcv1.V1MatchConditionActionCANCEL: opt.InitialState = sqlcv1.V1TaskInitialStateCANCELLED @@ -505,7 +505,7 @@ func (m *sharedRepository) processEventMatches(ctx context.Context, tx sqlcv1.DB switch matchData.Action() { case sqlcv1.V1MatchConditionActionQUEUE: - opt.Input = m.newTaskInput(input, matchData) + opt.Input = m.newTaskInput(input, matchData, nil) opt.DesiredWorkerId = m.DesiredWorkerId(opt.Input) opt.InitialState = sqlcv1.V1TaskInitialStateQUEUED case sqlcv1.V1MatchConditionActionCANCEL: diff --git a/pkg/repository/v1/repository.go b/pkg/repository/v1/repository.go index 7581267e8..08cb99dd1 100644 --- a/pkg/repository/v1/repository.go +++ b/pkg/repository/v1/repository.go @@ -22,6 +22,7 @@ type Repository interface { Workers() WorkerRepository Workflows() WorkflowRepository Ticker() TickerRepository + Filters() FilterRepository } type repositoryImpl struct { @@ -34,6 +35,7 @@ type repositoryImpl struct { workers WorkerRepository workflows WorkflowRepository ticker TickerRepository + filters FilterRepository } func NewRepository(pool *pgxpool.Pool, l *zerolog.Logger, taskRetentionPeriod, olapRetentionPeriod time.Duration, maxInternalRetryCount int32, entitlements repository.EntitlementsRepository) (Repository, func() error) { @@ -57,6 +59,7 @@ func NewRepository(pool *pgxpool.Pool, l *zerolog.Logger, taskRetentionPeriod, o workers: newWorkerRepository(shared), workflows: newWorkflowRepository(shared), ticker: newTickerRepository(shared), + filters: newFilterRepository(shared), } return impl, func() error { @@ -107,3 +110,7 @@ func (r *repositoryImpl) Workflows() WorkflowRepository { func (r *repositoryImpl) Ticker() TickerRepository { return r.ticker } + +func (r *repositoryImpl) Filters() FilterRepository { + return r.filters +} diff --git a/pkg/repository/v1/sqlcv1/filters-overwrite.sql b/pkg/repository/v1/sqlcv1/filters-overwrite.sql new file mode 100644 index 000000000..503dc35b5 --- /dev/null +++ b/pkg/repository/v1/sqlcv1/filters-overwrite.sql @@ -0,0 +1,14 @@ +-- name: ListFilters :many +WITH inputs AS ( + SELECT + UNNEST(@workflowIds::UUID[]) AS workflow_id, + -- NOTE: this is nullable, so sqlc doesn't support casting to a type + UNNEST(@scopes::TEXT[]) AS scope +) + +SELECT f.* +FROM v1_filter f +JOIN inputs i ON (f.tenant_id, f.workflow_id, f.scope) = (@tenantId::UUID, i.workflow_id, i.scope) +LIMIT COALESCE(sqlc.narg('filterLimit')::BIGINT, 20000) +OFFSET COALESCE(sqlc.narg('filterOffset')::BIGINT, 0) +; diff --git a/pkg/repository/v1/sqlcv1/filters-overwrite.sql.go b/pkg/repository/v1/sqlcv1/filters-overwrite.sql.go new file mode 100644 index 000000000..a64a1bebd --- /dev/null +++ b/pkg/repository/v1/sqlcv1/filters-overwrite.sql.go @@ -0,0 +1,63 @@ +package sqlcv1 + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const listFilters = `-- name: ListFilters :many +WITH inputs AS ( + SELECT + UNNEST($2::UUID[]) AS workflow_id, + UNNEST($3::TEXT[]) AS scope +) + +SELECT f.id, f.tenant_id, f.workflow_id, f.scope, f.expression, f.payload, f.inserted_at, f.updated_at +FROM v1_filter f +JOIN inputs i ON (f.tenant_id, f.workflow_id, f.scope) = ($1::UUID, i.workflow_id, i.scope) +LIMIT COALESCE($4::BIGINT, 20000) +OFFSET COALESCE($5::BIGINT, 0)` + +type ListFiltersParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + Workflowids []pgtype.UUID `json:"workflowids"` + Scopes []*string `json:"scopes"` + FilterLimit *int64 `json:"limit"` + FilterOffset *int64 `json:"offset"` +} + +func (q *Queries) ListFilters(ctx context.Context, db DBTX, arg ListFiltersParams) ([]*V1Filter, error) { + rows, err := db.Query(ctx, listFilters, + arg.Tenantid, + arg.Workflowids, + arg.Scopes, + arg.FilterLimit, + arg.FilterOffset, + ) + if err != nil { + return nil, err + } + defer rows.Close() + var items []*V1Filter + for rows.Next() { + var i V1Filter + if err := rows.Scan( + &i.ID, + &i.TenantID, + &i.WorkflowID, + &i.Scope, + &i.Expression, + &i.Payload, + &i.InsertedAt, + &i.UpdatedAt, + ); err != nil { + return nil, err + } + items = append(items, &i) + } + if err := rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/pkg/repository/v1/sqlcv1/filters.sql b/pkg/repository/v1/sqlcv1/filters.sql new file mode 100644 index 000000000..40d476fb2 --- /dev/null +++ b/pkg/repository/v1/sqlcv1/filters.sql @@ -0,0 +1,37 @@ +-- name: CreateFilter :one +INSERT INTO v1_filter ( + tenant_id, + workflow_id, + scope, + expression, + payload +) VALUES ( + @tenantId::UUID, + @workflowId::UUID, + @scope::TEXT, + @expression::TEXT, + @payload::JSONB +) +ON CONFLICT (tenant_id, workflow_id, scope, expression) DO UPDATE +SET + payload = EXCLUDED.payload, + updated_at = NOW() +WHERE v1_filter.tenant_id = @tenantId::UUID + AND v1_filter.workflow_id = @workflowId::UUID + AND v1_filter.scope = @scope::TEXT + AND v1_filter.expression = @expression::TEXT +RETURNING *; + +-- name: DeleteFilter :one +DELETE FROM v1_filter +WHERE + tenant_id = @tenantId::UUID + AND id = @id::UUID +RETURNING *; + +-- name: GetFilter :one +SELECT * +FROM v1_filter +WHERE + tenant_id = @tenantId::UUID + AND id = @id::UUID; diff --git a/pkg/repository/v1/sqlcv1/filters.sql.go b/pkg/repository/v1/sqlcv1/filters.sql.go new file mode 100644 index 000000000..1a6c5cdfa --- /dev/null +++ b/pkg/repository/v1/sqlcv1/filters.sql.go @@ -0,0 +1,125 @@ +// Code generated by sqlc. DO NOT EDIT. +// versions: +// sqlc v1.24.0 +// source: filters.sql + +package sqlcv1 + +import ( + "context" + + "github.com/jackc/pgx/v5/pgtype" +) + +const createFilter = `-- name: CreateFilter :one +INSERT INTO v1_filter ( + tenant_id, + workflow_id, + scope, + expression, + payload +) VALUES ( + $1::UUID, + $2::UUID, + $3::TEXT, + $4::TEXT, + $5::JSONB +) +ON CONFLICT (tenant_id, workflow_id, scope, expression) DO UPDATE +SET + payload = EXCLUDED.payload, + updated_at = NOW() +WHERE v1_filter.tenant_id = $1::UUID + AND v1_filter.workflow_id = $2::UUID + AND v1_filter.scope = $3::TEXT + AND v1_filter.expression = $4::TEXT +RETURNING id, tenant_id, workflow_id, scope, expression, payload, inserted_at, updated_at +` + +type CreateFilterParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + Workflowid pgtype.UUID `json:"workflowid"` + Scope string `json:"scope"` + Expression string `json:"expression"` + Payload []byte `json:"payload"` +} + +func (q *Queries) CreateFilter(ctx context.Context, db DBTX, arg CreateFilterParams) (*V1Filter, error) { + row := db.QueryRow(ctx, createFilter, + arg.Tenantid, + arg.Workflowid, + arg.Scope, + arg.Expression, + arg.Payload, + ) + var i V1Filter + err := row.Scan( + &i.ID, + &i.TenantID, + &i.WorkflowID, + &i.Scope, + &i.Expression, + &i.Payload, + &i.InsertedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const deleteFilter = `-- name: DeleteFilter :one +DELETE FROM v1_filter +WHERE + tenant_id = $1::UUID + AND id = $2::UUID +RETURNING id, tenant_id, workflow_id, scope, expression, payload, inserted_at, updated_at +` + +type DeleteFilterParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + ID pgtype.UUID `json:"id"` +} + +func (q *Queries) DeleteFilter(ctx context.Context, db DBTX, arg DeleteFilterParams) (*V1Filter, error) { + row := db.QueryRow(ctx, deleteFilter, arg.Tenantid, arg.ID) + var i V1Filter + err := row.Scan( + &i.ID, + &i.TenantID, + &i.WorkflowID, + &i.Scope, + &i.Expression, + &i.Payload, + &i.InsertedAt, + &i.UpdatedAt, + ) + return &i, err +} + +const getFilter = `-- name: GetFilter :one +SELECT id, tenant_id, workflow_id, scope, expression, payload, inserted_at, updated_at +FROM v1_filter +WHERE + tenant_id = $1::UUID + AND id = $2::UUID +` + +type GetFilterParams struct { + Tenantid pgtype.UUID `json:"tenantid"` + ID pgtype.UUID `json:"id"` +} + +func (q *Queries) GetFilter(ctx context.Context, db DBTX, arg GetFilterParams) (*V1Filter, error) { + row := db.QueryRow(ctx, getFilter, arg.Tenantid, arg.ID) + var i V1Filter + err := row.Scan( + &i.ID, + &i.TenantID, + &i.WorkflowID, + &i.Scope, + &i.Expression, + &i.Payload, + &i.InsertedAt, + &i.UpdatedAt, + ) + return &i, err +} diff --git a/pkg/repository/v1/sqlcv1/models.go b/pkg/repository/v1/sqlcv1/models.go index cf3d96bb4..f3915ba20 100644 --- a/pkg/repository/v1/sqlcv1/models.go +++ b/pkg/repository/v1/sqlcv1/models.go @@ -2503,6 +2503,17 @@ type V1EventsOlap struct { AdditionalMetadata []byte `json:"additional_metadata"` } +type V1Filter struct { + ID pgtype.UUID `json:"id"` + TenantID pgtype.UUID `json:"tenant_id"` + WorkflowID pgtype.UUID `json:"workflow_id"` + Scope string `json:"scope"` + Expression string `json:"expression"` + Payload []byte `json:"payload"` + InsertedAt pgtype.Timestamptz `json:"inserted_at"` + UpdatedAt pgtype.Timestamptz `json:"updated_at"` +} + type V1LogLine struct { ID int64 `json:"id"` CreatedAt pgtype.Timestamptz `json:"created_at"` diff --git a/pkg/repository/v1/sqlcv1/sqlc.yaml b/pkg/repository/v1/sqlcv1/sqlc.yaml index fa34796c7..a0d313f2b 100644 --- a/pkg/repository/v1/sqlcv1/sqlc.yaml +++ b/pkg/repository/v1/sqlcv1/sqlc.yaml @@ -18,6 +18,7 @@ sql: - log_line.sql - sleep.sql - ticker.sql + - filters.sql schema: - ../../../../sql/schema/v0.sql - ../../../../sql/schema/v1-core.sql diff --git a/pkg/repository/v1/task.go b/pkg/repository/v1/task.go index 1e3984cc5..a7ca3815d 100644 --- a/pkg/repository/v1/task.go +++ b/pkg/repository/v1/task.go @@ -31,6 +31,8 @@ type CreateTaskOpts struct { // (required) the input bytes to the task Input *TaskInput + FilterPayload []byte + // (required) the step index for the task StepIndex int diff --git a/pkg/repository/v1/trigger.go b/pkg/repository/v1/trigger.go index d419b5545..d001cab11 100644 --- a/pkg/repository/v1/trigger.go +++ b/pkg/repository/v1/trigger.go @@ -12,6 +12,7 @@ import ( "github.com/jackc/pgx/v5/pgtype" "github.com/rs/zerolog" + "github.com/hatchet-dev/hatchet/internal/cel" "github.com/hatchet-dev/hatchet/pkg/repository/postgres/dbsqlc" "github.com/hatchet-dev/hatchet/pkg/repository/postgres/sqlchelpers" "github.com/hatchet-dev/hatchet/pkg/repository/v1/sqlcv1" @@ -27,6 +28,8 @@ type EventTriggerOpts struct { AdditionalMetadata []byte Priority *int32 + + Scope *string } type TriggerTaskData struct { @@ -116,6 +119,61 @@ type TriggerFromEventsResult struct { EventExternalIdToRuns map[string][]*Run } +type TriggerDecision struct { + ShouldTrigger bool + FilterPayload []byte +} + +func (r *TriggerRepositoryImpl) makeTriggerDecision(ctx context.Context, filters []*sqlcv1.V1Filter, opt EventTriggerOpts) TriggerDecision { + if len(filters) == 0 { + // If no filters were found, we should trigger the workflow + return TriggerDecision{ + ShouldTrigger: true, + FilterPayload: nil, + } + } + + for _, filterPtr := range filters { + if filterPtr == nil { + continue + } + + filter := *filterPtr + + if filter.Expression != "" { + shouldTrigger, err := r.processWorkflowExpression(ctx, filter.Expression, opt, filter.Payload) + + if err != nil { + r.l.Error(). + Err(err). + Str("expression", filter.Expression). + Msg("Failed to evaluate workflow expression") + + // If we fail to parse the expression, we should not run the workflow. + // See: https://github.com/hatchet-dev/hatchet/pull/1676#discussion_r2073790939 + return TriggerDecision{ + ShouldTrigger: false, + FilterPayload: filter.Payload, + } + } + + if shouldTrigger { + return TriggerDecision{ + ShouldTrigger: true, + FilterPayload: filter.Payload, + } + } + } + } + + // If we reach here, we haven't returned yet meaning we haven't found + // an expression that evaluates to `true`. + return TriggerDecision{ + ShouldTrigger: false, + FilterPayload: nil, + } +} + func (r *TriggerRepositoryImpl) TriggerFromEvents(ctx context.Context, tenantId string, opts []EventTriggerOpts) (*TriggerFromEventsResult, error) { pre, post := r.m.Meter(ctx, dbsqlc.LimitResourceEVENT, tenantId, int32(len(opts))) // nolint: gosec @@ -152,8 +210,8 @@ func (r *TriggerRepositoryImpl) TriggerFromEvents(ctx context.Context, tenantId return nil, fmt.Errorf("failed to list workflows for events: %w", err) } - // each (workflowVersionId, eventKey, opt) is a separate workflow that we need to create - triggerOpts := make([]triggerTuple, 0) + workflowIds := make([]pgtype.UUID, 0) + scopes := make([]*string, 0) externalIdToEventId := make(map[string]string) @@ -165,6 +223,46 @@ func (r *TriggerRepositoryImpl) TriggerFromEvents(ctx context.Context, tenantId } for _, opt := range opts { + workflowIds = append(workflowIds, workflow.WorkflowId) + scopes = append(scopes, opt.Scope) + } + } + + filters, err := r.queries.ListFilters(ctx, r.pool, sqlcv1.ListFiltersParams{ + Tenantid: sqlchelpers.UUIDFromStr(tenantId), + Workflowids: workflowIds, + Scopes: scopes, + }) + + if err != nil { + return nil, fmt.Errorf("failed to list filters: %w", err) + } + + workflowIdToFilters := make(map[string][]*sqlcv1.V1Filter) + + for _, filter := range filters { + workflowIdToFilters[filter.WorkflowID.String()] = append(workflowIdToFilters[filter.WorkflowID.String()], filter) + } + + // each (workflowVersionId, eventKey, opt) is a separate workflow that we need to create + triggerOpts := make([]triggerTuple, 0) + + for _, workflow := range workflowVersionIdsAndEventKeys { + opts, ok := eventKeysToOpts[workflow.EventKey] + + if !ok { + continue + } + + filters := workflowIdToFilters[sqlchelpers.UUIDToStr(workflow.WorkflowId)] + + for _, opt := range opts { + triggerDecision := r.makeTriggerDecision(ctx, filters, opt) + + if !triggerDecision.ShouldTrigger { + continue + } + triggerConverter := &TriggeredByEvent{ l: r.l, eventID: opt.ExternalId, @@ -182,6 +280,7 @@ func (r *TriggerRepositoryImpl) TriggerFromEvents(ctx context.Context, tenantId input: opt.Data, additionalMetadata: additionalMetadata, priority: opt.Priority, + filterPayload: triggerDecision.FilterPayload, }) externalIdToEventId[externalId] = opt.ExternalId @@ -419,6 +518,8 @@ type triggerTuple struct { input []byte + filterPayload []byte + additionalMetadata []byte desiredWorkerId *string @@ -801,7 +902,7 @@ func (r *TriggerRepositoryImpl) triggerWorkflows(ctx context.Context, tenantId s ExternalId: taskExternalId, WorkflowRunId: tuple.externalId, StepId: sqlchelpers.UUIDToStr(step.ID), - Input: r.newTaskInput(tuple.input, nil), + Input: r.newTaskInput(tuple.input, nil, tuple.filterPayload), AdditionalMetadata: tuple.additionalMetadata, InitialState: sqlcv1.V1TaskInitialStateQUEUED, DesiredWorkerId: tuple.desiredWorkerId, @@ -1471,3 +1572,55 @@ func orderSteps(steps []*sqlcv1.ListStepsByWorkflowVersionIdsRow) []*sqlcv1.List return steps } + +func (r *sharedRepository) processWorkflowExpression(ctx context.Context, expression string, opt EventTriggerOpts, filterPayload []byte) (bool, error) { + var inputData map[string]interface{} + if opt.Data != nil { + err := json.Unmarshal(opt.Data, &inputData) + if err != nil { + return false, fmt.Errorf("failed to unmarshal input data: %w", err) + } + } else { + inputData = make(map[string]interface{}) + } + + var additionalMetadata map[string]interface{} + if opt.AdditionalMetadata != nil { + err := json.Unmarshal(opt.AdditionalMetadata, &additionalMetadata) + if err != nil { + return false, fmt.Errorf("failed to unmarshal additional metadata: %w", err) + } + } else { + additionalMetadata = make(map[string]interface{}) + } + + payload := make(map[string]interface{}) + if filterPayload != nil { + err := json.Unmarshal(filterPayload, &payload) + + if err != nil { + return false, fmt.Errorf("failed to unmarshal filter payload: %w", err) + } + } + + match, err := r.celParser.EvaluateEventExpression( + expression, + cel.NewInput( + cel.WithInput(inputData), + cel.WithAdditionalMetadata(additionalMetadata), + cel.WithPayload(payload), + cel.WithEventID(opt.ExternalId), + ), + ) + + if err != nil { + r.l.Warn(). + Err(err). + Str("expression", expression). + Msg("Failed to evaluate event expression") + + return false, err + } + + return match, nil +} diff --git a/sdks/python/CHANGELOG.md b/sdks/python/CHANGELOG.md new file mode 100644 index 000000000..abe3aa585 --- /dev/null +++ b/sdks/python/CHANGELOG.md @@ -0,0 +1,21 @@ +# Changelog + +All notable changes to Hatchet's Python SDK will be documented in this changelog. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.10.0] - 2025-05-16 + +### Added + +- The main `Hatchet` client now has a `filters` attribute (a `Filters` client) which wraps basic CRUD operations for managing filters. +- Events can now be pushed with a `priority` attribute, which sets the priority of the runs triggered by the event. +- There are new `list` and `aio_list` methods for the `Events` client, which allow listing events. +- Workflow runs can now be filtered by `triggering_event_external_id`, to allow for seeing runs triggered by a specific event. +- There is now an `id` property on all `Workflow` objects (`Workflow` created by `hatchet.workflow` and `Standalone` created by `hatchet.task`) that returns the ID (UUID) of the workflow. +- Events can now be pushed with a `scope` parameter, which is required for using filters to narrow down the filters to consider applying when triggering workflows from the event. + +### Changed + +- The `name` parameter to `hatchet.task` and `hatchet.durable_task` is now optional. If not provided, the task name will be the same as the function name. diff --git a/sdks/python/docs/feature-clients/filters.md b/sdks/python/docs/feature-clients/filters.md new file mode 100644 index 000000000..f21daa6ae --- /dev/null +++ b/sdks/python/docs/feature-clients/filters.md @@ -0,0 +1,3 @@ +# Filters Client + +::: features.filters.FiltersClient diff --git a/sdks/python/examples/events/test_event.py b/sdks/python/examples/events/test_event.py index 2a76bb748..d06313f25 100644 --- a/sdks/python/examples/events/test_event.py +++ b/sdks/python/examples/events/test_event.py @@ -1,40 +1,198 @@ -import pytest +import asyncio +import json +from contextlib import asynccontextmanager +from typing import AsyncGenerator, cast +from uuid import uuid4 -from hatchet_sdk.clients.events import BulkPushEventOptions, BulkPushEventWithMetadata +import pytest +from pydantic import BaseModel + +from examples.events.worker import EventWorkflowInput, event_workflow +from hatchet_sdk.clients.events import ( + BulkPushEventOptions, + BulkPushEventWithMetadata, + PushEventOptions, +) +from hatchet_sdk.clients.rest.models.v1_task_status import V1TaskStatus +from hatchet_sdk.clients.rest.models.v1_task_summary import V1TaskSummary +from hatchet_sdk.contracts.events_pb2 import Event from hatchet_sdk.hatchet import Hatchet +class ProcessedEvent(BaseModel): + id: str + payload: dict[str, str | bool] + meta: dict[str, str | bool | int] + should_have_runs: bool + test_run_id: str + + def __hash__(self) -> int: + return hash(self.model_dump_json()) + + +@asynccontextmanager +async def event_filter( + hatchet: Hatchet, + test_run_id: str, + expression: str | None = None, + payload: dict[str, str] = {}, +) -> AsyncGenerator[None, None]: + expression = ( + expression + or f"input.should_skip == false && payload.testRunId == '{test_run_id}'" + ) + + f = await hatchet.filters.aio_create( + workflow_id=event_workflow.id, + expression=expression, + scope=test_run_id, + payload={"testRunId": test_run_id, **payload}, + ) + + yield + + await hatchet.filters.aio_delete(f.metadata.id) + + +async def fetch_runs_for_event( + hatchet: Hatchet, event: Event +) -> tuple[ProcessedEvent, list[V1TaskSummary]]: + runs = await hatchet.runs.aio_list(triggering_event_external_id=event.eventId) + + meta = ( + cast(dict[str, str | int | bool], json.loads(event.additionalMetadata)) + if event.additionalMetadata + else {} + ) + payload = ( + cast(dict[str, str | bool], json.loads(event.payload)) if event.payload else {} + ) + + return ( + ProcessedEvent( + id=event.eventId, + payload=payload, + meta=meta, + should_have_runs=meta.get("should_have_runs", False) is True, + test_run_id=cast(str, meta["test_run_id"]), + ), + runs.rows or [], + ) + + +async def wait_for_result( + hatchet: Hatchet, events: list[Event] +) -> dict[ProcessedEvent, list[V1TaskSummary]]: + await asyncio.sleep(3) + + persisted = (await hatchet.event.aio_list(limit=100)).rows or [] + + assert {e.eventId for e in events}.issubset({e.metadata.id for e in persisted}) + + iters = 0 + while True: + print("Waiting for event runs to complete...") + if iters > 15: + print("Timed out waiting for event runs to complete.") + return {} + + iters += 1 + + event_runs = await asyncio.gather( + *[fetch_runs_for_event(hatchet, event) for event in events] + ) + + all_empty = all(not event_run for _, event_run in event_runs) + + if all_empty: + await asyncio.sleep(1) + continue + + event_id_to_runs = {event_id: runs for (event_id, runs) in event_runs} + + any_queued_or_running = any( + run.status in [V1TaskStatus.QUEUED, V1TaskStatus.RUNNING] + for runs in event_id_to_runs.values() + for run in runs + ) + + if any_queued_or_running: + await asyncio.sleep(1) + continue + + break + + return event_id_to_runs + + +async def assert_event_runs_processed( + event: ProcessedEvent, + runs: list[V1TaskSummary], +) -> None: + if event.should_have_runs: + assert len(runs) > 0 + else: + assert len(runs) == 0 + + +def bpi( + index: int = 1, + test_run_id: str = "", + should_skip: bool = False, + should_have_runs: bool = True, + key: str = "user:create", + payload: dict[str, str] = {}, + scope: str | None = None, +) -> BulkPushEventWithMetadata: + return BulkPushEventWithMetadata( + key=key, + payload={ + "should_skip": should_skip, + **payload, + }, + additional_metadata={ + "should_have_runs": should_have_runs, + "test_run_id": test_run_id, + "key": index, + }, + scope=scope, + ) + + +def cp(should_skip: bool) -> dict[str, bool]: + return EventWorkflowInput(should_skip=should_skip).model_dump() + + @pytest.mark.asyncio(loop_scope="session") async def test_event_push(hatchet: Hatchet) -> None: - e = hatchet.event.push("user:create", {"test": "test"}) + e = hatchet.event.push("user:create", cp(False)) assert e.eventId is not None @pytest.mark.asyncio(loop_scope="session") async def test_async_event_push(hatchet: Hatchet) -> None: - e = await hatchet.event.aio_push("user:create", {"test": "test"}) + e = await hatchet.event.aio_push("user:create", cp(False)) assert e.eventId is not None @pytest.mark.asyncio(loop_scope="session") async def test_async_event_bulk_push(hatchet: Hatchet) -> None: - events = [ BulkPushEventWithMetadata( key="event1", - payload={"message": "This is event 1"}, + payload={"message": "This is event 1", "should_skip": False}, additional_metadata={"source": "test", "user_id": "user123"}, ), BulkPushEventWithMetadata( key="event2", - payload={"message": "This is event 2"}, + payload={"message": "This is event 2", "should_skip": False}, additional_metadata={"source": "test", "user_id": "user456"}, ), BulkPushEventWithMetadata( key="event3", - payload={"message": "This is event 3"}, + payload={"message": "This is event 3", "should_skip": False}, additional_metadata={"source": "test", "user_id": "user789"}, ), ] @@ -52,3 +210,171 @@ async def test_async_event_bulk_push(hatchet: Hatchet) -> None: # Check that the returned events match the original events for original_event, returned_event in zip(sorted_events, sorted_returned_events): assert returned_event.key == namespace + original_event.key + + +@pytest.fixture(scope="function") +def test_run_id() -> str: + return str(uuid4()) + + +@pytest.mark.asyncio(loop_scope="session") +async def test_event_engine_behavior(hatchet: Hatchet) -> None: + test_run_id = str(uuid4()) + events = [ + bpi( + test_run_id=test_run_id, + ), + bpi( + test_run_id=test_run_id, + key="thisisafakeeventfoobarbaz", + should_have_runs=False, + ), + ] + + print("Events:", events) + + result = await hatchet.event.aio_bulk_push(events) + + print("Result:", result) + + runs = await wait_for_result(hatchet, result) + + for event, r in runs.items(): + await assert_event_runs_processed(event, r) + + +def gen_bulk_events(test_run_id: str) -> list[BulkPushEventWithMetadata]: + return [ + bpi( + index=1, + test_run_id=test_run_id, + should_skip=False, + should_have_runs=True, + ), + bpi( + index=2, + test_run_id=test_run_id, + should_skip=True, + should_have_runs=True, + ), + bpi( + index=3, + test_run_id=test_run_id, + should_skip=False, + should_have_runs=True, + scope=test_run_id, + ), + bpi( + index=4, + test_run_id=test_run_id, + should_skip=True, + should_have_runs=False, + scope=test_run_id, + ), + bpi( + index=5, + test_run_id=test_run_id, + should_skip=True, + should_have_runs=False, + scope=test_run_id, + key="thisisafakeeventfoobarbaz", + ), + bpi( + index=6, + test_run_id=test_run_id, + should_skip=False, + should_have_runs=False, + scope=test_run_id, + key="thisisafakeeventfoobarbaz", + ), + ] + + +@pytest.mark.asyncio(loop_scope="session") +async def test_event_skipping_filtering(hatchet: Hatchet, test_run_id: str) -> None: + async with event_filter(hatchet, test_run_id): + events = gen_bulk_events(test_run_id) + + result = await hatchet.event.aio_bulk_push(events) + + runs = await wait_for_result(hatchet, result) + for e, r in runs.items(): + await assert_event_runs_processed(e, r) + + +async def bulk_to_single(hatchet: Hatchet, event: BulkPushEventWithMetadata) -> Event: + return await hatchet.event.aio_push( + event_key=event.key, + payload=event.payload, + options=PushEventOptions( + scope=event.scope, + additional_metadata=event.additional_metadata, + priority=event.priority, + ), + ) + + +@pytest.mark.asyncio(loop_scope="session") +async def test_event_skipping_filtering_no_bulk( + hatchet: Hatchet, test_run_id: str +) -> None: + async with event_filter(hatchet, test_run_id): + raw_events = gen_bulk_events(test_run_id) + events = await asyncio.gather( + *[bulk_to_single(hatchet, event) for event in raw_events] + ) + + result = await wait_for_result(hatchet, events) + for event, runs in result.items(): + await assert_event_runs_processed(event, runs) + + +@pytest.mark.asyncio(loop_scope="session") +async def test_event_payload_filtering(hatchet: Hatchet, test_run_id: str) -> None: + async with event_filter( + hatchet, + test_run_id, + "input.should_skip == false && payload.foobar == 'baz'", + {"foobar": "qux"}, + ): + event = await hatchet.event.aio_push( + event_key="user:create", + payload={"message": "This is event 1", "should_skip": False}, + options=PushEventOptions( + scope=test_run_id, + additional_metadata={ + "should_have_runs": False, + "test_run_id": test_run_id, + "key": 1, + }, + ), + ) + + runs = await wait_for_result(hatchet, [event]) + assert len(runs) == 0 + + +@pytest.mark.asyncio(loop_scope="session") +async def test_event_payload_filtering_with_payload_match( + hatchet: Hatchet, test_run_id: str +) -> None: + async with event_filter( + hatchet, + test_run_id, + "input.should_skip == false && payload.foobar == 'baz'", + {"foobar": "baz"}, + ): + event = await hatchet.event.aio_push( + event_key="user:create", + payload={"message": "This is event 1", "should_skip": False}, + options=PushEventOptions( + scope=test_run_id, + additional_metadata={ + "should_have_runs": True, + "test_run_id": test_run_id, + "key": 1, + }, + ), + ) + runs = await wait_for_result(hatchet, [event]) + assert len(runs) == 1 diff --git a/sdks/python/examples/events/worker.py b/sdks/python/examples/events/worker.py index 70f86c38f..ff46a0fcc 100644 --- a/sdks/python/examples/events/worker.py +++ b/sdks/python/examples/events/worker.py @@ -1,14 +1,26 @@ -from hatchet_sdk import Context, EmptyModel, Hatchet +from pydantic import BaseModel + +from hatchet_sdk import Context, Hatchet hatchet = Hatchet() +EVENT_KEY = "user:create" + + +class EventWorkflowInput(BaseModel): + should_skip: bool + # > Event trigger -event_workflow = hatchet.workflow(name="EventWorkflow", on_events=["user:create"]) +event_workflow = hatchet.workflow( + name="EventWorkflow", + on_events=[EVENT_KEY], + input_validator=EventWorkflowInput, +) # !! @event_workflow.task() -def task(input: EmptyModel, ctx: Context) -> None: +def task(input: EventWorkflowInput, ctx: Context) -> None: print("event received") diff --git a/sdks/python/examples/simple/test_simple_workflow.py b/sdks/python/examples/simple/test_simple_workflow.py index 1a0eeba37..7192791c7 100644 --- a/sdks/python/examples/simple/test_simple_workflow.py +++ b/sdks/python/examples/simple/test_simple_workflow.py @@ -1,33 +1,38 @@ import pytest -from examples.simple.worker import step1 +from examples.simple.worker import simple, simple_durable +from hatchet_sdk import EmptyModel +from hatchet_sdk.runnables.standalone import Standalone +@pytest.mark.parametrize("task", [simple, simple_durable]) @pytest.mark.asyncio(loop_scope="session") -async def test_simple_workflow_running_options() -> None: - x1 = step1.run() - x2 = await step1.aio_run() +async def test_simple_workflow_running_options( + task: Standalone[EmptyModel, dict[str, str]] +) -> None: + x1 = task.run() + x2 = await task.aio_run() - x3 = step1.run_many([step1.create_bulk_run_item()])[0] - x4 = (await step1.aio_run_many([step1.create_bulk_run_item()]))[0] + x3 = task.run_many([task.create_bulk_run_item()])[0] + x4 = (await task.aio_run_many([task.create_bulk_run_item()]))[0] - x5 = step1.run_no_wait().result() - x6 = (await step1.aio_run_no_wait()).result() - x7 = [x.result() for x in step1.run_many_no_wait([step1.create_bulk_run_item()])][0] + x5 = task.run_no_wait().result() + x6 = (await task.aio_run_no_wait()).result() + x7 = [x.result() for x in task.run_many_no_wait([task.create_bulk_run_item()])][0] x8 = [ x.result() - for x in await step1.aio_run_many_no_wait([step1.create_bulk_run_item()]) + for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()]) ][0] - x9 = await step1.run_no_wait().aio_result() - x10 = await (await step1.aio_run_no_wait()).aio_result() + x9 = await task.run_no_wait().aio_result() + x10 = await (await task.aio_run_no_wait()).aio_result() x11 = [ await x.aio_result() - for x in step1.run_many_no_wait([step1.create_bulk_run_item()]) + for x in task.run_many_no_wait([task.create_bulk_run_item()]) ][0] x12 = [ await x.aio_result() - for x in await step1.aio_run_many_no_wait([step1.create_bulk_run_item()]) + for x in await task.aio_run_many_no_wait([task.create_bulk_run_item()]) ][0] assert all( diff --git a/sdks/python/examples/simple/trigger.py b/sdks/python/examples/simple/trigger.py index 9e638b5ba..cf1917f7a 100644 --- a/sdks/python/examples/simple/trigger.py +++ b/sdks/python/examples/simple/trigger.py @@ -1,3 +1,3 @@ -from examples.simple.worker import step1 +from examples.simple.worker import simple -step1.run() +simple.run() diff --git a/sdks/python/examples/simple/worker.py b/sdks/python/examples/simple/worker.py index 1e70c16f2..7986b21fd 100644 --- a/sdks/python/examples/simple/worker.py +++ b/sdks/python/examples/simple/worker.py @@ -5,13 +5,18 @@ from hatchet_sdk import Context, EmptyModel, Hatchet hatchet = Hatchet(debug=True) -@hatchet.task(name="SimpleWorkflow") -def step1(input: EmptyModel, ctx: Context) -> dict[str, str]: +@hatchet.task() +def simple(input: EmptyModel, ctx: Context) -> dict[str, str]: + return {"result": "Hello, world!"} + + +@hatchet.durable_task() +def simple_durable(input: EmptyModel, ctx: Context) -> dict[str, str]: return {"result": "Hello, world!"} def main() -> None: - worker = hatchet.worker("test-worker", workflows=[step1]) + worker = hatchet.worker("test-worker", workflows=[simple, simple_durable]) worker.start() diff --git a/sdks/python/examples/worker.py b/sdks/python/examples/worker.py index dbfee2d23..75b5047b4 100644 --- a/sdks/python/examples/worker.py +++ b/sdks/python/examples/worker.py @@ -10,13 +10,14 @@ from examples.concurrency_workflow_level.worker import ( from examples.dag.worker import dag_workflow from examples.dedupe.worker import dedupe_child_wf, dedupe_parent_wf from examples.durable.worker import durable_workflow +from examples.events.worker import event_workflow from examples.fanout.worker import child_wf, parent_wf from examples.fanout_sync.worker import sync_fanout_child, sync_fanout_parent from examples.lifespans.simple import lifespan, lifespan_task from examples.logger.workflow import logging_workflow from examples.non_retryable.worker import non_retryable_workflow from examples.on_failure.worker import on_failure_wf, on_failure_wf_with_details -from examples.simple.worker import step1 +from examples.simple.worker import simple, simple_durable from examples.timeout.worker import refresh_timeout_wf, timeout_wf from examples.waits.worker import task_condition_workflow from hatchet_sdk import Hatchet @@ -40,6 +41,7 @@ def main() -> None: dedupe_parent_wf, durable_workflow, child_wf, + event_workflow, parent_wf, on_failure_wf, on_failure_wf_with_details, @@ -53,7 +55,8 @@ def main() -> None: non_retryable_workflow, concurrency_workflow_level_workflow, lifespan_task, - step1, + simple, + simple_durable, ], lifespan=lifespan, ) diff --git a/sdks/python/hatchet_sdk/client.py b/sdks/python/hatchet_sdk/client.py index 39d5895a5..df3ca1152 100644 --- a/sdks/python/hatchet_sdk/client.py +++ b/sdks/python/hatchet_sdk/client.py @@ -5,6 +5,7 @@ from hatchet_sdk.clients.listeners.run_event_listener import RunEventListenerCli from hatchet_sdk.clients.listeners.workflow_listener import PooledWorkflowRunListener from hatchet_sdk.config import ClientConfig from hatchet_sdk.features.cron import CronClient +from hatchet_sdk.features.filters import FiltersClient from hatchet_sdk.features.logs import LogsClient from hatchet_sdk.features.metrics import MetricsClient from hatchet_sdk.features.rate_limits import RateLimitsClient @@ -34,6 +35,7 @@ class Client: self.debug = debug self.cron = CronClient(self.config) + self.filters = FiltersClient(self.config) self.logs = LogsClient(self.config) self.metrics = MetricsClient(self.config) self.rate_limits = RateLimitsClient(self.config) diff --git a/sdks/python/hatchet_sdk/clients/admin.py b/sdks/python/hatchet_sdk/clients/admin.py index 3a7acd041..6e23e90fd 100644 --- a/sdks/python/hatchet_sdk/clients/admin.py +++ b/sdks/python/hatchet_sdk/clients/admin.py @@ -46,9 +46,7 @@ class ScheduleTriggerWorkflowOptions(BaseModel): class TriggerWorkflowOptions(ScheduleTriggerWorkflowOptions): - additional_metadata: JSONSerializableMapping = Field(default_factory=dict) desired_worker_id: str | None = None - namespace: str | None = None sticky: bool = False key: str | None = None @@ -253,8 +251,7 @@ class AdminClient: try: namespace = options.namespace or self.namespace - if namespace != "" and not name.startswith(self.namespace): - name = f"{namespace}{name}" + name = self.config.apply_namespace(name, namespace) request = self._prepare_schedule_workflow_request( name, schedules, input, options @@ -312,8 +309,7 @@ class AdminClient: namespace = options.namespace or self.namespace - if namespace != "" and not workflow_name.startswith(self.namespace): - workflow_name = f"{namespace}{workflow_name}" + workflow_name = self.config.apply_namespace(workflow_name, namespace) return self._prepare_workflow_request(workflow_name, input, trigger_options) diff --git a/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py b/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py index fb8dbb70e..7354f28a9 100644 --- a/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py +++ b/sdks/python/hatchet_sdk/clients/dispatcher/action_listener.py @@ -73,14 +73,27 @@ class ActionPayload(BaseModel): step_run_errors: dict[str, str] = Field(default_factory=dict) triggered_by: str | None = None triggers: JSONSerializableMapping = Field(default_factory=dict) + filter_payload: JSONSerializableMapping = Field(default_factory=dict) @field_validator( - "input", "parents", "overrides", "user_data", "step_run_errors", mode="before" + "input", + "parents", + "overrides", + "user_data", + "step_run_errors", + "filter_payload", + mode="before", ) @classmethod def validate_fields(cls, v: Any) -> Any: return v or {} + @model_validator(mode="after") + def validate_filter_payload(self) -> "ActionPayload": + self.filter_payload = self.triggers.get("filter_payload", {}) + + return self + class ActionType(str, Enum): START_STEP_RUN = "START_STEP_RUN" diff --git a/sdks/python/hatchet_sdk/clients/events.py b/sdks/python/hatchet_sdk/clients/events.py index 56ca28390..532f56a05 100644 --- a/sdks/python/hatchet_sdk/clients/events.py +++ b/sdks/python/hatchet_sdk/clients/events.py @@ -6,7 +6,12 @@ from typing import List, cast from google.protobuf import timestamp_pb2 from pydantic import BaseModel, Field +from hatchet_sdk.clients.rest.api.event_api import EventApi +from hatchet_sdk.clients.rest.api.workflow_runs_api import WorkflowRunsApi +from hatchet_sdk.clients.rest.api_client import ApiClient +from hatchet_sdk.clients.rest.models.v1_event_list import V1EventList from hatchet_sdk.clients.rest.tenacity_utils import tenacity_retry +from hatchet_sdk.clients.v1.api_client import BaseRestClient from hatchet_sdk.config import ClientConfig from hatchet_sdk.connection import new_conn from hatchet_sdk.contracts.events_pb2 import ( @@ -33,6 +38,8 @@ def proto_timestamp_now() -> timestamp_pb2.Timestamp: class PushEventOptions(BaseModel): additional_metadata: JSONSerializableMapping = Field(default_factory=dict) namespace: str | None = None + priority: int | None = None + scope: str | None = None class BulkPushEventOptions(BaseModel): @@ -43,16 +50,26 @@ class BulkPushEventWithMetadata(BaseModel): key: str payload: JSONSerializableMapping = Field(default_factory=dict) additional_metadata: JSONSerializableMapping = Field(default_factory=dict) + priority: int | None = None + scope: str | None = None -class EventClient: +class EventClient(BaseRestClient): def __init__(self, config: ClientConfig): + super().__init__(config) + conn = new_conn(config, False) - self.client = EventsServiceStub(conn) + self.events_service_client = EventsServiceStub(conn) self.token = config.token self.namespace = config.namespace + def _wra(self, client: ApiClient) -> WorkflowRunsApi: + return WorkflowRunsApi(client) + + def _ea(self, client: ApiClient) -> EventApi: + return EventApi(client) + async def aio_push( self, event_key: str, @@ -79,7 +96,7 @@ class EventClient: options: PushEventOptions = PushEventOptions(), ) -> Event: namespace = options.namespace or self.namespace - namespaced_event_key = namespace + event_key + namespaced_event_key = self.client_config.apply_namespace(event_key, namespace) try: meta_bytes = json.dumps(options.additional_metadata) @@ -96,16 +113,21 @@ class EventClient: payload=payload_str, eventTimestamp=proto_timestamp_now(), additionalMetadata=meta_bytes, + priority=options.priority, + scope=options.scope, ) - return cast(Event, self.client.Push(request, metadata=get_metadata(self.token))) + return cast( + Event, + self.events_service_client.Push(request, metadata=get_metadata(self.token)), + ) def _create_push_event_request( self, event: BulkPushEventWithMetadata, namespace: str, ) -> PushEventRequest: - event_key = namespace + event.key + event_key = self.client_config.apply_namespace(event.key, namespace) payload = event.payload meta = event.additional_metadata @@ -125,6 +147,8 @@ class EventClient: payload=serialized_payload, eventTimestamp=proto_timestamp_now(), additionalMetadata=meta_str, + priority=event.priority, + scope=event.scope, ) ## IMPORTANT: Keep this method's signature in sync with the wrapper in the OTel instrumentor @@ -145,7 +169,9 @@ class EventClient: return list( cast( Events, - self.client.BulkPush(bulk_request, metadata=get_metadata(self.token)), + self.events_service_client.BulkPush( + bulk_request, metadata=get_metadata(self.token) + ), ).events ) @@ -157,7 +183,7 @@ class EventClient: message=message, ) - self.client.PutLog(request, metadata=get_metadata(self.token)) + self.events_service_client.PutLog(request, metadata=get_metadata(self.token)) @tenacity_retry def stream(self, data: str | bytes, step_run_id: str) -> None: @@ -174,4 +200,28 @@ class EventClient: message=data_bytes, ) - self.client.PutStreamEvent(request, metadata=get_metadata(self.token)) + self.events_service_client.PutStreamEvent( + request, metadata=get_metadata(self.token) + ) + + async def aio_list( + self, + offset: int | None = None, + limit: int | None = None, + keys: list[str] | None = None, + ) -> V1EventList: + return await asyncio.to_thread(self.list, offset=offset, limit=limit, keys=keys) + + def list( + self, + offset: int | None = None, + limit: int | None = None, + keys: list[str] | None = None, + ) -> V1EventList: + with self.client() as client: + return self._ea(client).v1_event_list( + tenant=self.client_config.tenant_id, + offset=offset, + limit=limit, + keys=keys, + ) diff --git a/sdks/python/hatchet_sdk/clients/rest/__init__.py b/sdks/python/hatchet_sdk/clients/rest/__init__.py index 3b01f54f7..b29dd08f3 100644 --- a/sdks/python/hatchet_sdk/clients/rest/__init__.py +++ b/sdks/python/hatchet_sdk/clients/rest/__init__.py @@ -20,6 +20,7 @@ __version__ = "1.0.0" from hatchet_sdk.clients.rest.api.api_token_api import APITokenApi from hatchet_sdk.clients.rest.api.default_api import DefaultApi from hatchet_sdk.clients.rest.api.event_api import EventApi +from hatchet_sdk.clients.rest.api.filter_api import FilterApi from hatchet_sdk.clients.rest.api.github_api import GithubApi from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi from hatchet_sdk.clients.rest.api.log_api import LogApi @@ -229,7 +230,17 @@ from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import ( ) from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic from hatchet_sdk.clients.rest.models.v1_cancel_task_request import V1CancelTaskRequest +from hatchet_sdk.clients.rest.models.v1_create_filter_request import ( + V1CreateFilterRequest, +) from hatchet_sdk.clients.rest.models.v1_dag_children import V1DagChildren +from hatchet_sdk.clients.rest.models.v1_event import V1Event +from hatchet_sdk.clients.rest.models.v1_event_list import V1EventList +from hatchet_sdk.clients.rest.models.v1_event_workflow_run_summary import ( + V1EventWorkflowRunSummary, +) +from hatchet_sdk.clients.rest.models.v1_filter import V1Filter +from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList from hatchet_sdk.clients.rest.models.v1_log_line import V1LogLine from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList diff --git a/sdks/python/hatchet_sdk/clients/rest/api/__init__.py b/sdks/python/hatchet_sdk/clients/rest/api/__init__.py index 96940b5bf..6c7000e9a 100644 --- a/sdks/python/hatchet_sdk/clients/rest/api/__init__.py +++ b/sdks/python/hatchet_sdk/clients/rest/api/__init__.py @@ -4,6 +4,7 @@ from hatchet_sdk.clients.rest.api.api_token_api import APITokenApi from hatchet_sdk.clients.rest.api.default_api import DefaultApi from hatchet_sdk.clients.rest.api.event_api import EventApi +from hatchet_sdk.clients.rest.api.filter_api import FilterApi from hatchet_sdk.clients.rest.api.github_api import GithubApi from hatchet_sdk.clients.rest.api.healthcheck_api import HealthcheckApi from hatchet_sdk.clients.rest.api.log_api import LogApi diff --git a/sdks/python/hatchet_sdk/clients/rest/api/event_api.py b/sdks/python/hatchet_sdk/clients/rest/api/event_api.py index 28dd745aa..98d7b8ade 100644 --- a/sdks/python/hatchet_sdk/clients/rest/api/event_api.py +++ b/sdks/python/hatchet_sdk/clients/rest/api/event_api.py @@ -37,6 +37,7 @@ from hatchet_sdk.clients.rest.models.event_update_cancel200_response import ( ) from hatchet_sdk.clients.rest.models.events import Events from hatchet_sdk.clients.rest.models.replay_event_request import ReplayEventRequest +from hatchet_sdk.clients.rest.models.v1_event_list import V1EventList from hatchet_sdk.clients.rest.models.workflow_run_status import WorkflowRunStatus from hatchet_sdk.clients.rest.rest import RESTResponseType @@ -2546,3 +2547,337 @@ class EventApi: _host=_host, _request_auth=_request_auth, ) + + @validate_call + def v1_event_list( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + keys: Annotated[ + Optional[List[StrictStr]], Field(description="A list of keys to filter by") + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1EventList: + """List events + + Lists all events for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param keys: A list of keys to filter by + :type keys: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_event_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + keys=keys, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1EventList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def v1_event_list_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + keys: Annotated[ + Optional[List[StrictStr]], Field(description="A list of keys to filter by") + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1EventList]: + """List events + + Lists all events for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param keys: A list of keys to filter by + :type keys: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_event_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + keys=keys, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1EventList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def v1_event_list_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + keys: Annotated[ + Optional[List[StrictStr]], Field(description="A list of keys to filter by") + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List events + + Lists all events for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param keys: A list of keys to filter by + :type keys: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_event_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + keys=keys, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1EventList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _v1_event_list_serialize( + self, + tenant, + offset, + limit, + keys, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + "keys": "multi", + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + if offset is not None: + + _query_params.append(("offset", offset)) + + if limit is not None: + + _query_params.append(("limit", limit)) + + if keys is not None: + + _query_params.append(("keys", keys)) + + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/stable/tenants/{tenant}/events", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) diff --git a/sdks/python/hatchet_sdk/clients/rest/api/filter_api.py b/sdks/python/hatchet_sdk/clients/rest/api/filter_api.py new file mode 100644 index 000000000..d139f29c7 --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/api/filter_api.py @@ -0,0 +1,1305 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +import warnings +from typing import Any, Dict, List, Optional, Tuple, Union + +from pydantic import Field, StrictFloat, StrictInt, StrictStr, validate_call +from typing_extensions import Annotated + +from hatchet_sdk.clients.rest.api_client import ApiClient, RequestSerialized +from hatchet_sdk.clients.rest.api_response import ApiResponse +from hatchet_sdk.clients.rest.models.v1_create_filter_request import ( + V1CreateFilterRequest, +) +from hatchet_sdk.clients.rest.models.v1_filter import V1Filter +from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList +from hatchet_sdk.clients.rest.rest import RESTResponseType + + +class FilterApi: + """NOTE: This class is auto generated by OpenAPI Generator + Ref: https://openapi-generator.tech + + Do not edit the class manually. + """ + + def __init__(self, api_client=None) -> None: + if api_client is None: + api_client = ApiClient.get_default() + self.api_client = api_client + + @validate_call + def v1_filter_create( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_create_filter_request: Annotated[ + V1CreateFilterRequest, Field(description="The input to the filter creation") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1Filter: + """Create a filter + + Create a new filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_create_filter_request: The input to the filter creation (required) + :type v1_create_filter_request: V1CreateFilterRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_create_serialize( + tenant=tenant, + v1_create_filter_request=v1_create_filter_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def v1_filter_create_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_create_filter_request: Annotated[ + V1CreateFilterRequest, Field(description="The input to the filter creation") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1Filter]: + """Create a filter + + Create a new filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_create_filter_request: The input to the filter creation (required) + :type v1_create_filter_request: V1CreateFilterRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_create_serialize( + tenant=tenant, + v1_create_filter_request=v1_create_filter_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def v1_filter_create_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_create_filter_request: Annotated[ + V1CreateFilterRequest, Field(description="The input to the filter creation") + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Create a filter + + Create a new filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_create_filter_request: The input to the filter creation (required) + :type v1_create_filter_request: V1CreateFilterRequest + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_create_serialize( + tenant=tenant, + v1_create_filter_request=v1_create_filter_request, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _v1_filter_create_serialize( + self, + tenant, + v1_create_filter_request, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + if v1_create_filter_request is not None: + _body_params = v1_create_filter_request + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type( + ["application/json"] + ) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="POST", + resource_path="/api/v1/stable/tenants/{tenant}/filters", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def v1_filter_delete( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The filter id to delete", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1Filter: + """v1_filter_delete + + Delete a filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id to delete (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_delete_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def v1_filter_delete_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The filter id to delete", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1Filter]: + """v1_filter_delete + + Delete a filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id to delete (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_delete_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def v1_filter_delete_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, + strict=True, + max_length=36, + description="The filter id to delete", + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """v1_filter_delete + + Delete a filter + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id to delete (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_delete_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + "404": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _v1_filter_delete_serialize( + self, + tenant, + v1_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + if v1_filter is not None: + _path_params["v1-filter"] = v1_filter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="DELETE", + resource_path="/api/v1/stable/tenants/{tenant}/filters/{v1-filter}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def v1_filter_get( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The filter id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1Filter: + """Get a filter + + Get a filter by its id + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_get_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def v1_filter_get_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The filter id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1Filter]: + """Get a filter + + Get a filter by its id + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_get_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def v1_filter_get_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + v1_filter: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The filter id" + ), + ], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a filter + + Get a filter by its id + + :param tenant: The tenant id (required) + :type tenant: str + :param v1_filter: The filter id (required) + :type v1_filter: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_get_serialize( + tenant=tenant, + v1_filter=v1_filter, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1Filter", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _v1_filter_get_serialize( + self, + tenant, + v1_filter, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = {} + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + if v1_filter is not None: + _path_params["v1-filter"] = v1_filter + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/stable/tenants/{tenant}/filters/{v1-filter}", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) + + @validate_call + def v1_filter_list( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_ids: Annotated[ + Optional[ + List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]] + ], + Field(description="The workflow ids to filter by"), + ] = None, + scopes: Annotated[ + Optional[List[StrictStr]], + Field(description="The scopes to subset candidate filters by"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> V1FilterList: + """List filters + + Lists all filters for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_ids: The workflow ids to filter by + :type workflow_ids: List[str] + :param scopes: The scopes to subset candidate filters by + :type scopes: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + workflow_ids=workflow_ids, + scopes=scopes, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1FilterList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + @validate_call + def v1_filter_list_with_http_info( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_ids: Annotated[ + Optional[ + List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]] + ], + Field(description="The workflow ids to filter by"), + ] = None, + scopes: Annotated[ + Optional[List[StrictStr]], + Field(description="The scopes to subset candidate filters by"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[V1FilterList]: + """List filters + + Lists all filters for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_ids: The workflow ids to filter by + :type workflow_ids: List[str] + :param scopes: The scopes to subset candidate filters by + :type scopes: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + workflow_ids=workflow_ids, + scopes=scopes, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1FilterList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + @validate_call + def v1_filter_list_without_preload_content( + self, + tenant: Annotated[ + str, + Field( + min_length=36, strict=True, max_length=36, description="The tenant id" + ), + ], + offset: Annotated[ + Optional[StrictInt], Field(description="The number to skip") + ] = None, + limit: Annotated[ + Optional[StrictInt], Field(description="The number to limit by") + ] = None, + workflow_ids: Annotated[ + Optional[ + List[Annotated[str, Field(min_length=36, strict=True, max_length=36)]] + ], + Field(description="The workflow ids to filter by"), + ] = None, + scopes: Annotated[ + Optional[List[StrictStr]], + Field(description="The scopes to subset candidate filters by"), + ] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], Annotated[StrictFloat, Field(gt=0)] + ], + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List filters + + Lists all filters for a tenant. + + :param tenant: The tenant id (required) + :type tenant: str + :param offset: The number to skip + :type offset: int + :param limit: The number to limit by + :type limit: int + :param workflow_ids: The workflow ids to filter by + :type workflow_ids: List[str] + :param scopes: The scopes to subset candidate filters by + :type scopes: List[str] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._v1_filter_list_serialize( + tenant=tenant, + offset=offset, + limit=limit, + workflow_ids=workflow_ids, + scopes=scopes, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index, + ) + + _response_types_map: Dict[str, Optional[str]] = { + "200": "V1FilterList", + "400": "APIErrors", + "403": "APIErrors", + } + response_data = self.api_client.call_api( + *_param, _request_timeout=_request_timeout + ) + return response_data.response + + def _v1_filter_list_serialize( + self, + tenant, + offset, + limit, + workflow_ids, + scopes, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + "workflowIds": "multi", + "scopes": "multi", + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if tenant is not None: + _path_params["tenant"] = tenant + # process the query parameters + if offset is not None: + + _query_params.append(("offset", offset)) + + if limit is not None: + + _query_params.append(("limit", limit)) + + if workflow_ids is not None: + + _query_params.append(("workflowIds", workflow_ids)) + + if scopes is not None: + + _query_params.append(("scopes", scopes)) + + # process the header parameters + # process the form parameters + # process the body parameter + + # set the HTTP header `Accept` + if "Accept" not in _header_params: + _header_params["Accept"] = self.api_client.select_header_accept( + ["application/json"] + ) + + # authentication setting + _auth_settings: List[str] = ["cookieAuth", "bearerAuth"] + + return self.api_client.param_serialize( + method="GET", + resource_path="/api/v1/stable/tenants/{tenant}/filters", + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth, + ) diff --git a/sdks/python/hatchet_sdk/clients/rest/api/task_api.py b/sdks/python/hatchet_sdk/clients/rest/api/task_api.py index 9ca57126c..1af794412 100644 --- a/sdks/python/hatchet_sdk/clients/rest/api/task_api.py +++ b/sdks/python/hatchet_sdk/clients/rest/api/task_api.py @@ -954,6 +954,9 @@ class TaskApi: str, Field(min_length=36, strict=True, max_length=36, description="The task id"), ], + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -972,6 +975,8 @@ class TaskApi: :param task: The task id (required) :type task: str + :param attempt: The attempt number + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -996,6 +1001,7 @@ class TaskApi: _param = self._v1_task_get_serialize( task=task, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1025,6 +1031,9 @@ class TaskApi: str, Field(min_length=36, strict=True, max_length=36, description="The task id"), ], + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1043,6 +1052,8 @@ class TaskApi: :param task: The task id (required) :type task: str + :param attempt: The attempt number + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1067,6 +1078,7 @@ class TaskApi: _param = self._v1_task_get_serialize( task=task, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1096,6 +1108,9 @@ class TaskApi: str, Field(min_length=36, strict=True, max_length=36, description="The task id"), ], + attempt: Annotated[ + Optional[StrictInt], Field(description="The attempt number") + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1114,6 +1129,8 @@ class TaskApi: :param task: The task id (required) :type task: str + :param attempt: The attempt number + :type attempt: int :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1138,6 +1155,7 @@ class TaskApi: _param = self._v1_task_get_serialize( task=task, + attempt=attempt, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1159,6 +1177,7 @@ class TaskApi: def _v1_task_get_serialize( self, task, + attempt, _request_auth, _content_type, _headers, @@ -1182,6 +1201,10 @@ class TaskApi: if task is not None: _path_params["task"] = task # process the query parameters + if attempt is not None: + + _query_params.append(("attempt", attempt)) + # process the header parameters # process the form parameters # process the body parameter @@ -1568,6 +1591,10 @@ class TaskApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task's external id"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The id of the event that triggered the task"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1594,6 +1621,8 @@ class TaskApi: :type workflow_ids: List[str] :param parent_task_external_id: The parent task's external id :type parent_task_external_id: str + :param triggering_event_external_id: The id of the event that triggered the task + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1622,6 +1651,7 @@ class TaskApi: until=until, workflow_ids=workflow_ids, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1668,6 +1698,10 @@ class TaskApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task's external id"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The id of the event that triggered the task"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1694,6 +1728,8 @@ class TaskApi: :type workflow_ids: List[str] :param parent_task_external_id: The parent task's external id :type parent_task_external_id: str + :param triggering_event_external_id: The id of the event that triggered the task + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1722,6 +1758,7 @@ class TaskApi: until=until, workflow_ids=workflow_ids, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1768,6 +1805,10 @@ class TaskApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task's external id"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field(description="The id of the event that triggered the task"), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1794,6 +1835,8 @@ class TaskApi: :type workflow_ids: List[str] :param parent_task_external_id: The parent task's external id :type parent_task_external_id: str + :param triggering_event_external_id: The id of the event that triggered the task + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1822,6 +1865,7 @@ class TaskApi: until=until, workflow_ids=workflow_ids, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1846,6 +1890,7 @@ class TaskApi: until, workflow_ids, parent_task_external_id, + triggering_event_external_id, _request_auth, _content_type, _headers, @@ -1901,6 +1946,12 @@ class TaskApi: _query_params.append(("parent_task_external_id", parent_task_external_id)) + if triggering_event_external_id is not None: + + _query_params.append( + ("triggering_event_external_id", triggering_event_external_id) + ) + # process the header parameters # process the form parameters # process the body parameter diff --git a/sdks/python/hatchet_sdk/clients/rest/api/workflow_runs_api.py b/sdks/python/hatchet_sdk/clients/rest/api/workflow_runs_api.py index 656761f3f..d0f41f0d4 100644 --- a/sdks/python/hatchet_sdk/clients/rest/api/workflow_runs_api.py +++ b/sdks/python/hatchet_sdk/clients/rest/api/workflow_runs_api.py @@ -1258,6 +1258,12 @@ class WorkflowRunsApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task external id to filter by"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field( + description="The external id of the event that triggered the workflow run" + ), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1296,6 +1302,8 @@ class WorkflowRunsApi: :type worker_id: str :param parent_task_external_id: The parent task external id to filter by :type parent_task_external_id: str + :param triggering_event_external_id: The external id of the event that triggered the workflow run + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1330,6 +1338,7 @@ class WorkflowRunsApi: workflow_ids=workflow_ids, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1396,6 +1405,12 @@ class WorkflowRunsApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task external id to filter by"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field( + description="The external id of the event that triggered the workflow run" + ), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1434,6 +1449,8 @@ class WorkflowRunsApi: :type worker_id: str :param parent_task_external_id: The parent task external id to filter by :type parent_task_external_id: str + :param triggering_event_external_id: The external id of the event that triggered the workflow run + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1468,6 +1485,7 @@ class WorkflowRunsApi: workflow_ids=workflow_ids, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1534,6 +1552,12 @@ class WorkflowRunsApi: Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], Field(description="The parent task external id to filter by"), ] = None, + triggering_event_external_id: Annotated[ + Optional[Annotated[str, Field(min_length=36, strict=True, max_length=36)]], + Field( + description="The external id of the event that triggered the workflow run" + ), + ] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1572,6 +1596,8 @@ class WorkflowRunsApi: :type worker_id: str :param parent_task_external_id: The parent task external id to filter by :type parent_task_external_id: str + :param triggering_event_external_id: The external id of the event that triggered the workflow run + :type triggering_event_external_id: str :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1606,6 +1632,7 @@ class WorkflowRunsApi: workflow_ids=workflow_ids, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1636,6 +1663,7 @@ class WorkflowRunsApi: workflow_ids, worker_id, parent_task_external_id, + triggering_event_external_id, _request_auth, _content_type, _headers, @@ -1717,6 +1745,12 @@ class WorkflowRunsApi: _query_params.append(("parent_task_external_id", parent_task_external_id)) + if triggering_event_external_id is not None: + + _query_params.append( + ("triggering_event_external_id", triggering_event_external_id) + ) + # process the header parameters # process the form parameters # process the body parameter diff --git a/sdks/python/hatchet_sdk/clients/rest/models/__init__.py b/sdks/python/hatchet_sdk/clients/rest/models/__init__.py index 0691183df..cbe9e8cb0 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/__init__.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/__init__.py @@ -194,7 +194,17 @@ from hatchet_sdk.clients.rest.models.user_tenant_memberships_list import ( ) from hatchet_sdk.clients.rest.models.user_tenant_public import UserTenantPublic from hatchet_sdk.clients.rest.models.v1_cancel_task_request import V1CancelTaskRequest +from hatchet_sdk.clients.rest.models.v1_create_filter_request import ( + V1CreateFilterRequest, +) from hatchet_sdk.clients.rest.models.v1_dag_children import V1DagChildren +from hatchet_sdk.clients.rest.models.v1_event import V1Event +from hatchet_sdk.clients.rest.models.v1_event_list import V1EventList +from hatchet_sdk.clients.rest.models.v1_event_workflow_run_summary import ( + V1EventWorkflowRunSummary, +) +from hatchet_sdk.clients.rest.models.v1_filter import V1Filter +from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList from hatchet_sdk.clients.rest.models.v1_log_line import V1LogLine from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel from hatchet_sdk.clients.rest.models.v1_log_line_list import V1LogLineList diff --git a/sdks/python/hatchet_sdk/clients/rest/models/create_event_request.py b/sdks/python/hatchet_sdk/clients/rest/models/create_event_request.py index adc37ce62..3d4240789 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/create_event_request.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/create_event_request.py @@ -19,7 +19,7 @@ import pprint import re # noqa: F401 from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr from typing_extensions import Self @@ -35,7 +35,19 @@ class CreateEventRequest(BaseModel): description="Additional metadata for the event.", alias="additionalMetadata", ) - __properties: ClassVar[List[str]] = ["key", "data", "additionalMetadata"] + priority: Optional[StrictInt] = Field( + default=None, description="The priority of the event." + ) + scope: Optional[StrictStr] = Field( + default=None, description="The scope for event filtering." + ) + __properties: ClassVar[List[str]] = [ + "key", + "data", + "additionalMetadata", + "priority", + "scope", + ] model_config = ConfigDict( populate_by_name=True, @@ -90,6 +102,8 @@ class CreateEventRequest(BaseModel): "key": obj.get("key"), "data": obj.get("data"), "additionalMetadata": obj.get("additionalMetadata"), + "priority": obj.get("priority"), + "scope": obj.get("scope"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_create_filter_request.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_create_filter_request.py new file mode 100644 index 000000000..ed4575ccb --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_create_filter_request.py @@ -0,0 +1,99 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Annotated, Self + + +class V1CreateFilterRequest(BaseModel): + """ + V1CreateFilterRequest + """ # noqa: E501 + + workflow_id: Annotated[str, Field(min_length=36, strict=True, max_length=36)] = ( + Field(description="The workflow id", alias="workflowId") + ) + expression: StrictStr = Field(description="The expression for the filter") + scope: StrictStr = Field( + description="The scope associated with this filter. Used for subsetting candidate filters at evaluation time" + ) + payload: Optional[Dict[str, Any]] = Field( + default=None, description="The payload for the filter" + ) + __properties: ClassVar[List[str]] = ["workflowId", "expression", "scope", "payload"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1CreateFilterRequest from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1CreateFilterRequest from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "workflowId": obj.get("workflowId"), + "expression": obj.get("expression"), + "scope": obj.get("scope"), + "payload": obj.get("payload"), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_event.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_event.py new file mode 100644 index 000000000..71a8d5b5f --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_event.py @@ -0,0 +1,142 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Self + +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta +from hatchet_sdk.clients.rest.models.tenant import Tenant +from hatchet_sdk.clients.rest.models.v1_event_workflow_run_summary import ( + V1EventWorkflowRunSummary, +) + + +class V1Event(BaseModel): + """ + V1Event + """ # noqa: E501 + + metadata: APIResourceMeta + key: StrictStr = Field(description="The key for the event.") + tenant: Optional[Tenant] = Field( + default=None, description="The tenant associated with this event." + ) + tenant_id: StrictStr = Field( + description="The ID of the tenant associated with this event.", alias="tenantId" + ) + workflow_run_summary: V1EventWorkflowRunSummary = Field( + description="The workflow run summary for this event.", + alias="workflowRunSummary", + ) + additional_metadata: Optional[Dict[str, Any]] = Field( + default=None, + description="Additional metadata for the event.", + alias="additionalMetadata", + ) + __properties: ClassVar[List[str]] = [ + "metadata", + "key", + "tenant", + "tenantId", + "workflowRunSummary", + "additionalMetadata", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1Event from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict["metadata"] = self.metadata.to_dict() + # override the default output from pydantic by calling `to_dict()` of tenant + if self.tenant: + _dict["tenant"] = self.tenant.to_dict() + # override the default output from pydantic by calling `to_dict()` of workflow_run_summary + if self.workflow_run_summary: + _dict["workflowRunSummary"] = self.workflow_run_summary.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1Event from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "metadata": ( + APIResourceMeta.from_dict(obj["metadata"]) + if obj.get("metadata") is not None + else None + ), + "key": obj.get("key"), + "tenant": ( + Tenant.from_dict(obj["tenant"]) + if obj.get("tenant") is not None + else None + ), + "tenantId": obj.get("tenantId"), + "workflowRunSummary": ( + V1EventWorkflowRunSummary.from_dict(obj["workflowRunSummary"]) + if obj.get("workflowRunSummary") is not None + else None + ), + "additionalMetadata": obj.get("additionalMetadata"), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_event_list.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_event_list.py new file mode 100644 index 000000000..00cd8275f --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_event_list.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict +from typing_extensions import Self + +from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse +from hatchet_sdk.clients.rest.models.v1_event import V1Event + + +class V1EventList(BaseModel): + """ + V1EventList + """ # noqa: E501 + + pagination: Optional[PaginationResponse] = None + rows: Optional[List[V1Event]] = None + __properties: ClassVar[List[str]] = ["pagination", "rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1EventList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict["pagination"] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in rows (list) + _items = [] + if self.rows: + for _item_rows in self.rows: + if _item_rows: + _items.append(_item_rows.to_dict()) + _dict["rows"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1EventList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "pagination": ( + PaginationResponse.from_dict(obj["pagination"]) + if obj.get("pagination") is not None + else None + ), + "rows": ( + [V1Event.from_dict(_item) for _item in obj["rows"]] + if obj.get("rows") is not None + else None + ), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py new file mode 100644 index 000000000..a451ebd70 --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_event_workflow_run_summary.py @@ -0,0 +1,101 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing_extensions import Self + + +class V1EventWorkflowRunSummary(BaseModel): + """ + V1EventWorkflowRunSummary + """ # noqa: E501 + + running: StrictInt = Field(description="The number of running runs.") + queued: StrictInt = Field(description="The number of queued runs.") + succeeded: StrictInt = Field(description="The number of succeeded runs.") + failed: StrictInt = Field(description="The number of failed runs.") + cancelled: StrictInt = Field(description="The number of cancelled runs.") + __properties: ClassVar[List[str]] = [ + "running", + "queued", + "succeeded", + "failed", + "cancelled", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1EventWorkflowRunSummary from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1EventWorkflowRunSummary from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "running": obj.get("running"), + "queued": obj.get("queued"), + "succeeded": obj.get("succeeded"), + "failed": obj.get("failed"), + "cancelled": obj.get("cancelled"), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_filter.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_filter.py new file mode 100644 index 000000000..8ea1d433f --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_filter.py @@ -0,0 +1,127 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing_extensions import Annotated, Self + +from hatchet_sdk.clients.rest.models.api_resource_meta import APIResourceMeta + + +class V1Filter(BaseModel): + """ + V1Filter + """ # noqa: E501 + + metadata: APIResourceMeta + tenant_id: StrictStr = Field( + description="The ID of the tenant associated with this filter.", + alias="tenantId", + ) + workflow_id: Annotated[str, Field(min_length=36, strict=True, max_length=36)] = ( + Field( + description="The workflow id associated with this filter.", + alias="workflowId", + ) + ) + scope: StrictStr = Field( + description="The scope associated with this filter. Used for subsetting candidate filters at evaluation time" + ) + expression: StrictStr = Field( + description="The expression associated with this filter." + ) + payload: Dict[str, Any] = Field( + description="Additional payload data associated with the filter" + ) + __properties: ClassVar[List[str]] = [ + "metadata", + "tenantId", + "workflowId", + "scope", + "expression", + "payload", + ] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1Filter from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict["metadata"] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1Filter from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "metadata": ( + APIResourceMeta.from_dict(obj["metadata"]) + if obj.get("metadata") is not None + else None + ), + "tenantId": obj.get("tenantId"), + "workflowId": obj.get("workflowId"), + "scope": obj.get("scope"), + "expression": obj.get("expression"), + "payload": obj.get("payload"), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_filter_list.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_filter_list.py new file mode 100644 index 000000000..2ce56b680 --- /dev/null +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_filter_list.py @@ -0,0 +1,110 @@ +# coding: utf-8 + +""" + Hatchet API + + The Hatchet API + + The version of the OpenAPI document: 1.0.0 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations + +import json +import pprint +import re # noqa: F401 +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict +from typing_extensions import Self + +from hatchet_sdk.clients.rest.models.pagination_response import PaginationResponse +from hatchet_sdk.clients.rest.models.v1_filter import V1Filter + + +class V1FilterList(BaseModel): + """ + V1FilterList + """ # noqa: E501 + + pagination: Optional[PaginationResponse] = None + rows: Optional[List[V1Filter]] = None + __properties: ClassVar[List[str]] = ["pagination", "rows"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of V1FilterList from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of pagination + if self.pagination: + _dict["pagination"] = self.pagination.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in rows (list) + _items = [] + if self.rows: + for _item_rows in self.rows: + if _item_rows: + _items.append(_item_rows.to_dict()) + _dict["rows"] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of V1FilterList from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate( + { + "pagination": ( + PaginationResponse.from_dict(obj["pagination"]) + if obj.get("pagination") is not None + else None + ), + "rows": ( + [V1Filter.from_dict(_item) for _item in obj["rows"]] + if obj.get("rows") is not None + else None + ), + } + ) + return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line.py index f2352e75e..fc0316b06 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_log_line.py @@ -20,9 +20,11 @@ import re # noqa: F401 from datetime import datetime from typing import Any, ClassVar, Dict, List, Optional, Set -from pydantic import BaseModel, ConfigDict, Field, StrictStr +from pydantic import BaseModel, ConfigDict, Field, StrictInt, StrictStr from typing_extensions import Self +from hatchet_sdk.clients.rest.models.v1_log_line_level import V1LogLineLevel + class V1LogLine(BaseModel): """ @@ -34,7 +36,21 @@ class V1LogLine(BaseModel): ) message: StrictStr = Field(description="The log message.") metadata: Dict[str, Any] = Field(description="The log metadata.") - __properties: ClassVar[List[str]] = ["createdAt", "message", "metadata"] + retry_count: Optional[StrictInt] = Field( + default=None, description="The retry count of the log line.", alias="retryCount" + ) + attempt: Optional[StrictInt] = Field( + default=None, description="The attempt number of the log line." + ) + level: Optional[V1LogLineLevel] = Field(default=None, description="The log level.") + __properties: ClassVar[List[str]] = [ + "createdAt", + "message", + "metadata", + "retryCount", + "attempt", + "level", + ] model_config = ConfigDict( populate_by_name=True, @@ -89,6 +105,9 @@ class V1LogLine(BaseModel): "createdAt": obj.get("createdAt"), "message": obj.get("message"), "metadata": obj.get("metadata"), + "retryCount": obj.get("retryCount"), + "attempt": obj.get("attempt"), + "level": obj.get("level"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_event.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_event.py index 0ef045db7..0add50507 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_event.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_event.py @@ -42,6 +42,14 @@ class V1TaskEvent(BaseModel): task_display_name: Optional[StrictStr] = Field( default=None, alias="taskDisplayName" ) + retry_count: Optional[StrictInt] = Field( + default=None, + description="The number of retries of the task.", + alias="retryCount", + ) + attempt: Optional[StrictInt] = Field( + default=None, description="The attempt number of the task." + ) __properties: ClassVar[List[str]] = [ "id", "taskId", @@ -52,6 +60,8 @@ class V1TaskEvent(BaseModel): "output", "workerId", "taskDisplayName", + "retryCount", + "attempt", ] model_config = ConfigDict( @@ -113,6 +123,8 @@ class V1TaskEvent(BaseModel): "output": obj.get("output"), "workerId": obj.get("workerId"), "taskDisplayName": obj.get("taskDisplayName"), + "retryCount": obj.get("retryCount"), + "attempt": obj.get("attempt"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_summary.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_summary.py index 01939ebc8..df48de4d1 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_summary.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_summary.py @@ -37,6 +37,14 @@ class V1TaskSummary(BaseModel): action_id: Optional[StrictStr] = Field( default=None, description="The action ID of the task.", alias="actionId" ) + retry_count: Optional[StrictInt] = Field( + default=None, + description="The number of retries of the task.", + alias="retryCount", + ) + attempt: Optional[StrictInt] = Field( + default=None, description="The attempt number of the task." + ) additional_metadata: Optional[Dict[str, Any]] = Field( default=None, description="Additional metadata for the task run.", @@ -106,6 +114,8 @@ class V1TaskSummary(BaseModel): __properties: ClassVar[List[str]] = [ "metadata", "actionId", + "retryCount", + "attempt", "additionalMetadata", "children", "createdAt", @@ -196,6 +206,8 @@ class V1TaskSummary(BaseModel): else None ), "actionId": obj.get("actionId"), + "retryCount": obj.get("retryCount"), + "attempt": obj.get("attempt"), "additionalMetadata": obj.get("additionalMetadata"), "children": ( [V1TaskSummary.from_dict(_item) for _item in obj["children"]] diff --git a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_timing.py b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_timing.py index 4b3700a22..4f1372fc1 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/v1_task_timing.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/v1_task_timing.py @@ -70,6 +70,19 @@ class V1TaskTiming(BaseModel): description="The timestamp the task run finished.", alias="finishedAt", ) + workflow_run_id: Optional[StrictStr] = Field( + default=None, + description="The external ID of the workflow run.", + alias="workflowRunId", + ) + retry_count: Optional[StrictInt] = Field( + default=None, + description="The number of retries of the task.", + alias="retryCount", + ) + attempt: Optional[StrictInt] = Field( + default=None, description="The attempt number of the task." + ) __properties: ClassVar[List[str]] = [ "metadata", "depth", @@ -83,6 +96,9 @@ class V1TaskTiming(BaseModel): "queuedAt", "startedAt", "finishedAt", + "workflowRunId", + "retryCount", + "attempt", ] model_config = ConfigDict( @@ -154,6 +170,9 @@ class V1TaskTiming(BaseModel): "queuedAt": obj.get("queuedAt"), "startedAt": obj.get("startedAt"), "finishedAt": obj.get("finishedAt"), + "workflowRunId": obj.get("workflowRunId"), + "retryCount": obj.get("retryCount"), + "attempt": obj.get("attempt"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/clients/rest/models/workflow.py b/sdks/python/hatchet_sdk/clients/rest/models/workflow.py index f3107144c..44294e30d 100644 --- a/sdks/python/hatchet_sdk/clients/rest/models/workflow.py +++ b/sdks/python/hatchet_sdk/clients/rest/models/workflow.py @@ -47,6 +47,9 @@ class Workflow(BaseModel): jobs: Optional[List[Job]] = Field( default=None, description="The jobs of the workflow." ) + tenant_id: StrictStr = Field( + description="The tenant id of the workflow.", alias="tenantId" + ) __properties: ClassVar[List[str]] = [ "metadata", "name", @@ -55,6 +58,7 @@ class Workflow(BaseModel): "versions", "tags", "jobs", + "tenantId", ] model_config = ConfigDict( @@ -154,6 +158,7 @@ class Workflow(BaseModel): if obj.get("jobs") is not None else None ), + "tenantId": obj.get("tenantId"), } ) return _obj diff --git a/sdks/python/hatchet_sdk/config.py b/sdks/python/hatchet_sdk/config.py index 4e502db7f..356480cf3 100644 --- a/sdks/python/hatchet_sdk/config.py +++ b/sdks/python/hatchet_sdk/config.py @@ -1,5 +1,6 @@ import json from logging import Logger, getLogger +from typing import overload from pydantic import Field, field_validator, model_validator from pydantic_settings import BaseSettings, SettingsConfigDict @@ -121,9 +122,37 @@ class ClientConfig(BaseSettings): def validate_namespace(cls, namespace: str) -> str: if not namespace: return "" + if not namespace.endswith("_"): namespace = f"{namespace}_" + return namespace.lower() def __hash__(self) -> int: return hash(json.dumps(self.model_dump(), default=str)) + + @overload + def apply_namespace( + self, resource_name: str, namespace_override: str | None = None + ) -> str: ... + + @overload + def apply_namespace( + self, resource_name: None, namespace_override: str | None = None + ) -> None: ... + + def apply_namespace( + self, resource_name: str | None, namespace_override: str | None = None + ) -> str | None: + if resource_name is None: + return None + + namespace = namespace_override or self.namespace + + if not namespace: + return resource_name + + if resource_name.startswith(namespace): + return resource_name + + return namespace + resource_name diff --git a/sdks/python/hatchet_sdk/context/context.py b/sdks/python/hatchet_sdk/context/context.py index 1aa1857ff..7a2ba8636 100644 --- a/sdks/python/hatchet_sdk/context/context.py +++ b/sdks/python/hatchet_sdk/context/context.py @@ -59,6 +59,7 @@ class Context: self.stream_event_thread_pool = ThreadPoolExecutor(max_workers=1) self.input = self.data.input + self.filter_payload = self.data.filter_payload self._lifespan_context = lifespan_context diff --git a/sdks/python/hatchet_sdk/contracts/events_pb2.py b/sdks/python/hatchet_sdk/contracts/events_pb2.py index c3d844324..e4d318f4e 100644 --- a/sdks/python/hatchet_sdk/contracts/events_pb2.py +++ b/sdks/python/hatchet_sdk/contracts/events_pb2.py @@ -15,7 +15,7 @@ _sym_db = _symbol_database.Default() from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xb4\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\x92\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\tB\x08\n\x06_level\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\x9c\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x42\x15\n\x13_additionalMetadata\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3') +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x65vents.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xd2\x01\n\x05\x45vent\x12\x10\n\x08tenantId\x18\x01 \x01(\t\x12\x0f\n\x07\x65ventId\x18\x02 \x01(\t\x12\x0b\n\x03key\x18\x03 \x01(\t\x12\x0f\n\x07payload\x18\x04 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x06 \x01(\tH\x00\x88\x01\x01\x12\x12\n\x05scope\x18\x07 \x01(\tH\x01\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x08\n\x06_scope\" \n\x06\x45vents\x12\x16\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x06.Event\"\xc2\x01\n\rPutLogRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x12\n\x05level\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x10\n\x08metadata\x18\x05 \x01(\t\x12\x1b\n\x0etaskRetryCount\x18\x06 \x01(\x05H\x01\x88\x01\x01\x42\x08\n\x06_levelB\x11\n\x0f_taskRetryCount\"\x10\n\x0ePutLogResponse\"|\n\x15PutStreamEventRequest\x12\x11\n\tstepRunId\x18\x01 \x01(\t\x12-\n\tcreatedAt\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07message\x18\x03 \x01(\x0c\x12\x10\n\x08metadata\x18\x05 \x01(\t\"\x18\n\x16PutStreamEventResponse\"9\n\x14\x42ulkPushEventRequest\x12!\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x11.PushEventRequest\"\xde\x01\n\x10PushEventRequest\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x0f\n\x07payload\x18\x02 \x01(\t\x12\x32\n\x0e\x65ventTimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x1f\n\x12\x61\x64\x64itionalMetadata\x18\x04 \x01(\tH\x00\x88\x01\x01\x12\x15\n\x08priority\x18\x05 \x01(\x05H\x01\x88\x01\x01\x12\x12\n\x05scope\x18\x06 \x01(\tH\x02\x88\x01\x01\x42\x15\n\x13_additionalMetadataB\x0b\n\t_priorityB\x08\n\x06_scope\"%\n\x12ReplayEventRequest\x12\x0f\n\x07\x65ventId\x18\x01 \x01(\t2\x88\x02\n\rEventsService\x12#\n\x04Push\x12\x11.PushEventRequest\x1a\x06.Event\"\x00\x12,\n\x08\x42ulkPush\x12\x15.BulkPushEventRequest\x1a\x07.Events\"\x00\x12\x32\n\x11ReplaySingleEvent\x12\x13.ReplayEventRequest\x1a\x06.Event\"\x00\x12+\n\x06PutLog\x12\x0e.PutLogRequest\x1a\x0f.PutLogResponse\"\x00\x12\x43\n\x0ePutStreamEvent\x12\x16.PutStreamEventRequest\x1a\x17.PutStreamEventResponse\"\x00\x42\x45ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contractsb\x06proto3') _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) @@ -24,23 +24,23 @@ if not _descriptor._USE_C_DESCRIPTORS: _globals['DESCRIPTOR']._loaded_options = None _globals['DESCRIPTOR']._serialized_options = b'ZCgithub.com/hatchet-dev/hatchet/internal/services/ingestor/contracts' _globals['_EVENT']._serialized_start=50 - _globals['_EVENT']._serialized_end=230 - _globals['_EVENTS']._serialized_start=232 - _globals['_EVENTS']._serialized_end=264 - _globals['_PUTLOGREQUEST']._serialized_start=267 - _globals['_PUTLOGREQUEST']._serialized_end=413 - _globals['_PUTLOGRESPONSE']._serialized_start=415 - _globals['_PUTLOGRESPONSE']._serialized_end=431 - _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=433 - _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=557 - _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=559 - _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=583 - _globals['_BULKPUSHEVENTREQUEST']._serialized_start=585 - _globals['_BULKPUSHEVENTREQUEST']._serialized_end=642 - _globals['_PUSHEVENTREQUEST']._serialized_start=645 - _globals['_PUSHEVENTREQUEST']._serialized_end=801 - _globals['_REPLAYEVENTREQUEST']._serialized_start=803 - _globals['_REPLAYEVENTREQUEST']._serialized_end=840 - _globals['_EVENTSSERVICE']._serialized_start=843 - _globals['_EVENTSSERVICE']._serialized_end=1107 + _globals['_EVENT']._serialized_end=260 + _globals['_EVENTS']._serialized_start=262 + _globals['_EVENTS']._serialized_end=294 + _globals['_PUTLOGREQUEST']._serialized_start=297 + _globals['_PUTLOGREQUEST']._serialized_end=491 + _globals['_PUTLOGRESPONSE']._serialized_start=493 + _globals['_PUTLOGRESPONSE']._serialized_end=509 + _globals['_PUTSTREAMEVENTREQUEST']._serialized_start=511 + _globals['_PUTSTREAMEVENTREQUEST']._serialized_end=635 + _globals['_PUTSTREAMEVENTRESPONSE']._serialized_start=637 + _globals['_PUTSTREAMEVENTRESPONSE']._serialized_end=661 + _globals['_BULKPUSHEVENTREQUEST']._serialized_start=663 + _globals['_BULKPUSHEVENTREQUEST']._serialized_end=720 + _globals['_PUSHEVENTREQUEST']._serialized_start=723 + _globals['_PUSHEVENTREQUEST']._serialized_end=945 + _globals['_REPLAYEVENTREQUEST']._serialized_start=947 + _globals['_REPLAYEVENTREQUEST']._serialized_end=984 + _globals['_EVENTSSERVICE']._serialized_start=987 + _globals['_EVENTSSERVICE']._serialized_end=1251 # @@protoc_insertion_point(module_scope) diff --git a/sdks/python/hatchet_sdk/contracts/events_pb2.pyi b/sdks/python/hatchet_sdk/contracts/events_pb2.pyi index e9132fb26..9661e64d9 100644 --- a/sdks/python/hatchet_sdk/contracts/events_pb2.pyi +++ b/sdks/python/hatchet_sdk/contracts/events_pb2.pyi @@ -7,20 +7,22 @@ from typing import ClassVar as _ClassVar, Iterable as _Iterable, Mapping as _Map DESCRIPTOR: _descriptor.FileDescriptor class Event(_message.Message): - __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata") + __slots__ = ("tenantId", "eventId", "key", "payload", "eventTimestamp", "additionalMetadata", "scope") TENANTID_FIELD_NUMBER: _ClassVar[int] EVENTID_FIELD_NUMBER: _ClassVar[int] KEY_FIELD_NUMBER: _ClassVar[int] PAYLOAD_FIELD_NUMBER: _ClassVar[int] EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int] + SCOPE_FIELD_NUMBER: _ClassVar[int] tenantId: str eventId: str key: str payload: str eventTimestamp: _timestamp_pb2.Timestamp additionalMetadata: str - def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ... + scope: str + def __init__(self, tenantId: _Optional[str] = ..., eventId: _Optional[str] = ..., key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., scope: _Optional[str] = ...) -> None: ... class Events(_message.Message): __slots__ = ("events",) @@ -29,18 +31,20 @@ class Events(_message.Message): def __init__(self, events: _Optional[_Iterable[_Union[Event, _Mapping]]] = ...) -> None: ... class PutLogRequest(_message.Message): - __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata") + __slots__ = ("stepRunId", "createdAt", "message", "level", "metadata", "taskRetryCount") STEPRUNID_FIELD_NUMBER: _ClassVar[int] CREATEDAT_FIELD_NUMBER: _ClassVar[int] MESSAGE_FIELD_NUMBER: _ClassVar[int] LEVEL_FIELD_NUMBER: _ClassVar[int] METADATA_FIELD_NUMBER: _ClassVar[int] + TASKRETRYCOUNT_FIELD_NUMBER: _ClassVar[int] stepRunId: str createdAt: _timestamp_pb2.Timestamp message: str level: str metadata: str - def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ...) -> None: ... + taskRetryCount: int + def __init__(self, stepRunId: _Optional[str] = ..., createdAt: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., message: _Optional[str] = ..., level: _Optional[str] = ..., metadata: _Optional[str] = ..., taskRetryCount: _Optional[int] = ...) -> None: ... class PutLogResponse(_message.Message): __slots__ = () @@ -69,16 +73,20 @@ class BulkPushEventRequest(_message.Message): def __init__(self, events: _Optional[_Iterable[_Union[PushEventRequest, _Mapping]]] = ...) -> None: ... class PushEventRequest(_message.Message): - __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata") + __slots__ = ("key", "payload", "eventTimestamp", "additionalMetadata", "priority", "scope") KEY_FIELD_NUMBER: _ClassVar[int] PAYLOAD_FIELD_NUMBER: _ClassVar[int] EVENTTIMESTAMP_FIELD_NUMBER: _ClassVar[int] ADDITIONALMETADATA_FIELD_NUMBER: _ClassVar[int] + PRIORITY_FIELD_NUMBER: _ClassVar[int] + SCOPE_FIELD_NUMBER: _ClassVar[int] key: str payload: str eventTimestamp: _timestamp_pb2.Timestamp additionalMetadata: str - def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ...) -> None: ... + priority: int + scope: str + def __init__(self, key: _Optional[str] = ..., payload: _Optional[str] = ..., eventTimestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., additionalMetadata: _Optional[str] = ..., priority: _Optional[int] = ..., scope: _Optional[str] = ...) -> None: ... class ReplayEventRequest(_message.Message): __slots__ = ("eventId",) diff --git a/sdks/python/hatchet_sdk/features/cron.py b/sdks/python/hatchet_sdk/features/cron.py index aeeceab71..b2a63845b 100644 --- a/sdks/python/hatchet_sdk/features/cron.py +++ b/sdks/python/hatchet_sdk/features/cron.py @@ -102,7 +102,7 @@ class CronClient(BaseRestClient): with self.client() as client: return self._wra(client).cron_workflow_trigger_create( tenant=self.client_config.tenant_id, - workflow=workflow_name, + workflow=self.client_config.apply_namespace(workflow_name), create_cron_workflow_trigger_request=CreateCronWorkflowTriggerRequest( cronName=cron_name, cronExpression=validated_input.expression, diff --git a/sdks/python/hatchet_sdk/features/filters.py b/sdks/python/hatchet_sdk/features/filters.py new file mode 100644 index 000000000..559fa60a5 --- /dev/null +++ b/sdks/python/hatchet_sdk/features/filters.py @@ -0,0 +1,181 @@ +import asyncio + +from hatchet_sdk.clients.rest.api.filter_api import FilterApi +from hatchet_sdk.clients.rest.api_client import ApiClient +from hatchet_sdk.clients.rest.models.v1_create_filter_request import ( + V1CreateFilterRequest, +) +from hatchet_sdk.clients.rest.models.v1_filter import V1Filter +from hatchet_sdk.clients.rest.models.v1_filter_list import V1FilterList +from hatchet_sdk.clients.v1.api_client import BaseRestClient +from hatchet_sdk.utils.typing import JSONSerializableMapping + + +class FiltersClient(BaseRestClient): + """ + The filters client is a client for interacting with Hatchet's filters API. + """ + + def _fa(self, client: ApiClient) -> FilterApi: + return FilterApi(client) + + async def aio_list( + self, + limit: int | None = None, + offset: int | None = None, + workflow_id_scope_pairs: list[tuple[str, str]] | None = None, + ) -> V1FilterList: + """ + List filters for a given tenant. + + :param limit: The maximum number of filters to return. + :param offset: The number of filters to skip before starting to collect the result set. + :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second. + + :return: A list of filters matching the specified criteria. + """ + return await asyncio.to_thread( + self.list, limit, offset, workflow_id_scope_pairs + ) + + def list( + self, + limit: int | None = None, + offset: int | None = None, + workflow_id_scope_pairs: list[tuple[str, str]] | None = None, + ) -> V1FilterList: + """ + List filters for a given tenant. + + :param limit: The maximum number of filters to return. + :param offset: The number of filters to skip before starting to collect the result set. + :param workflow_id_scope_pairs: A list of tuples containing workflow IDs and scopes to filter by. The workflow id is first, then the scope is second. + + :return: A list of filters matching the specified criteria. + """ + workflow_ids = ( + [pair[0] for pair in workflow_id_scope_pairs] + if workflow_id_scope_pairs + else None + ) + scopes = ( + [pair[1] for pair in workflow_id_scope_pairs] + if workflow_id_scope_pairs + else None + ) + + with self.client() as client: + return self._fa(client).v1_filter_list( + tenant=self.tenant_id, + limit=limit, + offset=offset, + workflow_ids=workflow_ids, + scopes=scopes, + ) + + def get( + self, + filter_id: str, + ) -> V1Filter: + """ + Get a filter by its ID. + + :param filter_id: The ID of the filter to retrieve. + + :return: The filter with the specified ID. + """ + with self.client() as client: + return self._fa(client).v1_filter_get( + tenant=self.tenant_id, + v1_filter=filter_id, + ) + + async def aio_get( + self, + filter_id: str, + ) -> V1Filter: + """ + Get a filter by its ID. + + :param filter_id: The ID of the filter to retrieve. + + :return: The filter with the specified ID. + """ + return await asyncio.to_thread(self.get, filter_id) + + def create( + self, + workflow_id: str, + expression: str, + scope: str, + payload: JSONSerializableMapping = {}, + ) -> V1Filter: + """ + Create a new filter. + + :param workflow_id: The ID of the workflow to associate with the filter. + :param expression: The expression to evaluate for the filter. + :param scope: The scope for the filter. + :param payload: The payload to send with the filter. + + :return: The created filter. + """ + with self.client() as client: + return self._fa(client).v1_filter_create( + tenant=self.tenant_id, + v1_create_filter_request=V1CreateFilterRequest( + workflowId=workflow_id, + expression=expression, + scope=scope, + payload=dict(payload), + ), + ) + + async def aio_create( + self, + workflow_id: str, + expression: str, + scope: str, + payload: JSONSerializableMapping = {}, + ) -> V1Filter: + """ + Create a new filter. + + :param workflow_id: The ID of the workflow to associate with the filter. + :param expression: The expression to evaluate for the filter. + :param scope: The scope for the filter. + :param payload: The payload to send with the filter. + + :return: The created filter. + """ + return await asyncio.to_thread( + self.create, workflow_id, expression, scope, payload + ) + + def delete( + self, + filter_id: str, + ) -> V1Filter: + """ + Delete a filter by its ID. + + :param filter_id: The ID of the filter to delete. + :return: The deleted filter. + """ + with self.client() as client: + return self._fa(client).v1_filter_delete( + tenant=self.tenant_id, + v1_filter=filter_id, + ) + + async def aio_delete( + self, + filter_id: str, + ) -> V1Filter: + """ + Delete a filter by its ID. + + :param filter_id: The ID of the filter to delete. + :return: The deleted filter. + """ + return await asyncio.to_thread(self.delete, filter_id) diff --git a/sdks/python/hatchet_sdk/features/runs.py b/sdks/python/hatchet_sdk/features/runs.py index ce97253af..b309c9252 100644 --- a/sdks/python/hatchet_sdk/features/runs.py +++ b/sdks/python/hatchet_sdk/features/runs.py @@ -141,6 +141,7 @@ class RunsClient(BaseRestClient): workflow_ids: list[str] | None = None, worker_id: str | None = None, parent_task_external_id: str | None = None, + triggering_event_external_id: str | None = None, ) -> V1TaskSummaryList: """ List task runs according to a set of filters. @@ -155,6 +156,7 @@ class RunsClient(BaseRestClient): :param workflow_ids: The workflow IDs to filter task runs by. :param worker_id: The worker ID to filter task runs by. :param parent_task_external_id: The parent task external ID to filter task runs by. + :param triggering_event_external_id: The event id that triggered the task run. :return: A list of task runs matching the specified filters. """ @@ -170,6 +172,7 @@ class RunsClient(BaseRestClient): workflow_ids=workflow_ids, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) def list( @@ -184,6 +187,7 @@ class RunsClient(BaseRestClient): workflow_ids: list[str] | None = None, worker_id: str | None = None, parent_task_external_id: str | None = None, + triggering_event_external_id: str | None = None, ) -> V1TaskSummaryList: """ List task runs according to a set of filters. @@ -198,6 +202,7 @@ class RunsClient(BaseRestClient): :param workflow_ids: The workflow IDs to filter task runs by. :param worker_id: The worker ID to filter task runs by. :param parent_task_external_id: The parent task external ID to filter task runs by. + :param triggering_event_external_id: The event id that triggered the task run. :return: A list of task runs matching the specified filters. """ @@ -216,6 +221,7 @@ class RunsClient(BaseRestClient): workflow_ids=workflow_ids, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) def create( @@ -241,7 +247,7 @@ class RunsClient(BaseRestClient): return self._wra(client).v1_workflow_run_create( tenant=self.client_config.tenant_id, v1_trigger_workflow_run_request=V1TriggerWorkflowRunRequest( - workflowName=workflow_name, + workflowName=self.client_config.apply_namespace(workflow_name), input=dict(input), additionalMetadata=dict(additional_metadata), priority=priority, diff --git a/sdks/python/hatchet_sdk/features/scheduled.py b/sdks/python/hatchet_sdk/features/scheduled.py index afcda8325..39b41b258 100644 --- a/sdks/python/hatchet_sdk/features/scheduled.py +++ b/sdks/python/hatchet_sdk/features/scheduled.py @@ -59,7 +59,7 @@ class ScheduledClient(BaseRestClient): with self.client() as client: return self._wra(client).scheduled_workflow_run_create( tenant=self.client_config.tenant_id, - workflow=workflow_name, + workflow=self.client_config.apply_namespace(workflow_name), schedule_workflow_run_request=ScheduleWorkflowRunRequest( triggerAt=trigger_at, input=dict(input), diff --git a/sdks/python/hatchet_sdk/features/workflows.py b/sdks/python/hatchet_sdk/features/workflows.py index 79ed4d518..077540ca0 100644 --- a/sdks/python/hatchet_sdk/features/workflows.py +++ b/sdks/python/hatchet_sdk/features/workflows.py @@ -61,7 +61,7 @@ class WorkflowsClient(BaseRestClient): tenant=self.client_config.tenant_id, limit=limit, offset=offset, - name=workflow_name, + name=self.client_config.apply_namespace(workflow_name), ) async def aio_list( diff --git a/sdks/python/hatchet_sdk/hatchet.py b/sdks/python/hatchet_sdk/hatchet.py index 70768ebdb..ec63eb653 100644 --- a/sdks/python/hatchet_sdk/hatchet.py +++ b/sdks/python/hatchet_sdk/hatchet.py @@ -10,6 +10,7 @@ from hatchet_sdk.clients.events import EventClient from hatchet_sdk.clients.listeners.run_event_listener import RunEventListenerClient from hatchet_sdk.config import ClientConfig from hatchet_sdk.features.cron import CronClient +from hatchet_sdk.features.filters import FiltersClient from hatchet_sdk.features.logs import LogsClient from hatchet_sdk.features.metrics import MetricsClient from hatchet_sdk.features.rate_limits import RateLimitsClient @@ -64,6 +65,13 @@ class Hatchet: """ return self._client.cron + @property + def filters(self) -> FiltersClient: + """ + The filters client is a client for interacting with Hatchet's filters API. + """ + return self._client.filters + @property def logs(self) -> LogsClient: """ @@ -285,7 +293,7 @@ class Hatchet: def task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: None = None, on_events: list[str] = [], @@ -310,7 +318,7 @@ class Hatchet: def task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: Type[TWorkflowInput], on_events: list[str] = [], @@ -334,7 +342,7 @@ class Hatchet: def task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: Type[TWorkflowInput] | None = None, on_events: list[str] = [], @@ -398,45 +406,47 @@ class Hatchet: :returns: A decorator which creates a `Standalone` task object. """ - workflow = Workflow[TWorkflowInput]( - WorkflowConfig( - name=name, - version=version, - description=description, - on_events=on_events, - on_crons=on_crons, - sticky=sticky, - concurrency=concurrency, - default_priority=default_priority, - input_validator=input_validator - or cast(Type[TWorkflowInput], EmptyModel), - ), - self, - ) - - if isinstance(concurrency, list): - _concurrency = concurrency - elif isinstance(concurrency, ConcurrencyExpression): - _concurrency = [concurrency] - else: - _concurrency = [] - - task_wrapper = workflow.task( - name=name, - schedule_timeout=schedule_timeout, - execution_timeout=execution_timeout, - parents=[], - retries=retries, - rate_limits=rate_limits, - desired_worker_labels=desired_worker_labels, - backoff_factor=backoff_factor, - backoff_max_seconds=backoff_max_seconds, - concurrency=_concurrency, - ) - def inner( func: Callable[[TWorkflowInput, Context], R | CoroutineLike[R]], ) -> Standalone[TWorkflowInput, R]: + inferred_name = name or func.__name__ + + workflow = Workflow[TWorkflowInput]( + WorkflowConfig( + name=inferred_name, + version=version, + description=description, + on_events=on_events, + on_crons=on_crons, + sticky=sticky, + concurrency=concurrency, + default_priority=default_priority, + input_validator=input_validator + or cast(Type[TWorkflowInput], EmptyModel), + ), + self, + ) + + if isinstance(concurrency, list): + _concurrency = concurrency + elif isinstance(concurrency, ConcurrencyExpression): + _concurrency = [concurrency] + else: + _concurrency = [] + + task_wrapper = workflow.task( + name=inferred_name, + schedule_timeout=schedule_timeout, + execution_timeout=execution_timeout, + parents=[], + retries=retries, + rate_limits=rate_limits, + desired_worker_labels=desired_worker_labels, + backoff_factor=backoff_factor, + backoff_max_seconds=backoff_max_seconds, + concurrency=_concurrency, + ) + created_task = task_wrapper(func) return Standalone[TWorkflowInput, R]( @@ -450,7 +460,7 @@ class Hatchet: def durable_task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: None = None, on_events: list[str] = [], @@ -475,7 +485,7 @@ class Hatchet: def durable_task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: Type[TWorkflowInput], on_events: list[str] = [], @@ -499,7 +509,7 @@ class Hatchet: def durable_task( self, *, - name: str, + name: str | None = None, description: str | None = None, input_validator: Type[TWorkflowInput] | None = None, on_events: list[str] = [], @@ -563,38 +573,39 @@ class Hatchet: :returns: A decorator which creates a `Standalone` task object. """ - workflow = Workflow[TWorkflowInput]( - WorkflowConfig( - name=name, - version=version, - description=description, - on_events=on_events, - on_crons=on_crons, - sticky=sticky, - concurrency=concurrency, - input_validator=input_validator - or cast(Type[TWorkflowInput], EmptyModel), - default_priority=default_priority, - ), - self, - ) - - task_wrapper = workflow.durable_task( - name=name, - schedule_timeout=schedule_timeout, - execution_timeout=execution_timeout, - parents=[], - retries=retries, - rate_limits=rate_limits, - desired_worker_labels=desired_worker_labels, - backoff_factor=backoff_factor, - backoff_max_seconds=backoff_max_seconds, - concurrency=[concurrency] if concurrency else [], - ) - def inner( func: Callable[[TWorkflowInput, DurableContext], R | CoroutineLike[R]], ) -> Standalone[TWorkflowInput, R]: + inferred_name = name or func.__name__ + workflow = Workflow[TWorkflowInput]( + WorkflowConfig( + name=inferred_name, + version=version, + description=description, + on_events=on_events, + on_crons=on_crons, + sticky=sticky, + concurrency=concurrency, + input_validator=input_validator + or cast(Type[TWorkflowInput], EmptyModel), + default_priority=default_priority, + ), + self, + ) + + task_wrapper = workflow.durable_task( + name=inferred_name, + schedule_timeout=schedule_timeout, + execution_timeout=execution_timeout, + parents=[], + retries=retries, + rate_limits=rate_limits, + desired_worker_labels=desired_worker_labels, + backoff_factor=backoff_factor, + backoff_max_seconds=backoff_max_seconds, + concurrency=[concurrency] if concurrency else [], + ) + created_task = task_wrapper(func) return Standalone[TWorkflowInput, R]( diff --git a/sdks/python/hatchet_sdk/runnables/standalone.py b/sdks/python/hatchet_sdk/runnables/standalone.py index bdd7437e4..77004e27b 100644 --- a/sdks/python/hatchet_sdk/runnables/standalone.py +++ b/sdks/python/hatchet_sdk/runnables/standalone.py @@ -309,6 +309,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): additional_metadata: dict[str, str] | None = None, worker_id: str | None = None, parent_task_external_id: str | None = None, + triggering_event_external_id: str | None = None, ) -> list[V1TaskSummary]: """ List runs of the workflow. @@ -321,6 +322,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): :param additional_metadata: Additional metadata for filtering the runs. :param worker_id: The ID of the worker that ran the tasks. :param parent_task_external_id: The external ID of the parent task. + :param triggering_event_external_id: The event id that triggered the task run. :returns: A list of `V1TaskSummary` objects representing the runs of the workflow. """ @@ -343,6 +345,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): additional_metadata=additional_metadata, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) return response.rows @@ -357,6 +360,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): additional_metadata: dict[str, str] | None = None, worker_id: str | None = None, parent_task_external_id: str | None = None, + triggering_event_external_id: str | None = None, ) -> list[V1TaskSummary]: """ List runs of the workflow. @@ -369,6 +373,7 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): :param additional_metadata: Additional metadata for filtering the runs. :param worker_id: The ID of the worker that ran the tasks. :param parent_task_external_id: The external ID of the parent task. + :param triggering_event_external_id: The event id that triggered the task run. :returns: A list of `V1TaskSummary` objects representing the runs of the workflow. """ @@ -382,4 +387,5 @@ class Standalone(BaseWorkflow[TWorkflowInput], Generic[TWorkflowInput, R]): additional_metadata=additional_metadata, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) diff --git a/sdks/python/hatchet_sdk/runnables/workflow.py b/sdks/python/hatchet_sdk/runnables/workflow.py index e7467f696..c4885d2cc 100644 --- a/sdks/python/hatchet_sdk/runnables/workflow.py +++ b/sdks/python/hatchet_sdk/runnables/workflow.py @@ -1,5 +1,6 @@ import asyncio from datetime import datetime, timedelta +from functools import cached_property from typing import TYPE_CHECKING, Any, Callable, Generic, TypeVar, cast from google.protobuf import timestamp_pb2 @@ -127,7 +128,7 @@ class BaseWorkflow(Generic[TWorkflowInput]): @property def service_name(self) -> str: - return f"{self.client.config.namespace}{self.config.name.lower()}" + return self.client.config.apply_namespace(self.config.name.lower()) def _create_action_name(self, step: Task[TWorkflowInput, Any]) -> str: return self.service_name + ":" + step.name @@ -140,7 +141,10 @@ class BaseWorkflow(Generic[TWorkflowInput]): service_name = self.service_name name = self.name - event_triggers = [namespace + event for event in self.config.on_events] + event_triggers = [ + self.client.config.apply_namespace(event, namespace) + for event in self.config.on_events + ] if self._on_success_task: self._on_success_task.parents = [ @@ -251,6 +255,23 @@ class BaseWorkflow(Generic[TWorkflowInput]): f"Input must be a BaseModel or `None`, got {type(input)} instead." ) + @cached_property + def id(self) -> str: + """ + Get the ID of the workflow. + + :raises ValueError: If no workflow ID is found for the workflow name. + :returns: The ID of the workflow. + """ + workflows = self.client.workflows.list(workflow_name=self.name) + + if not workflows.rows: + raise ValueError(f"No id found for {self.name}") + + workflow = workflows.rows[0] + + return workflow.metadata.id + class Workflow(BaseWorkflow[TWorkflowInput]): """ @@ -912,6 +933,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]): worker_id: str | None = None, parent_task_external_id: str | None = None, only_tasks: bool = False, + triggering_event_external_id: str | None = None, ) -> list[V1TaskSummary]: """ List runs of the workflow. @@ -925,6 +947,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]): :param worker_id: The ID of the worker that ran the tasks. :param parent_task_external_id: The external ID of the parent task. :param only_tasks: Whether to list only task runs. + :param triggering_event_external_id: The event id that triggered the task run. :returns: A list of `V1TaskSummary` objects representing the runs of the workflow. """ @@ -947,6 +970,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]): additional_metadata=additional_metadata, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) return response.rows @@ -962,6 +986,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]): worker_id: str | None = None, parent_task_external_id: str | None = None, only_tasks: bool = False, + triggering_event_external_id: str | None = None, ) -> list[V1TaskSummary]: """ List runs of the workflow. @@ -975,6 +1000,7 @@ class Workflow(BaseWorkflow[TWorkflowInput]): :param worker_id: The ID of the worker that ran the tasks. :param parent_task_external_id: The external ID of the parent task. :param only_tasks: Whether to list only task runs. + :param triggering_event_external_id: The event id that triggered the task run. :returns: A list of `V1TaskSummary` objects representing the runs of the workflow. """ @@ -989,4 +1015,5 @@ class Workflow(BaseWorkflow[TWorkflowInput]): additional_metadata=additional_metadata, worker_id=worker_id, parent_task_external_id=parent_task_external_id, + triggering_event_external_id=triggering_event_external_id, ) diff --git a/sdks/python/hatchet_sdk/worker/worker.py b/sdks/python/hatchet_sdk/worker/worker.py index bf23605cd..ee226e3d7 100644 --- a/sdks/python/hatchet_sdk/worker/worker.py +++ b/sdks/python/hatchet_sdk/worker/worker.py @@ -96,7 +96,7 @@ class Worker: lifespan: LifespanFn | None = None, ) -> None: self.config = config - self.name = self.config.namespace + name + self.name = self.config.apply_namespace(name) self.slots = slots self.durable_slots = durable_slots self.debug = debug diff --git a/sdks/python/pyproject.toml b/sdks/python/pyproject.toml index 1e813fbb8..0bcbc5032 100644 --- a/sdks/python/pyproject.toml +++ b/sdks/python/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "hatchet-sdk" -version = "1.9.1" +version = "1.10.0" description = "" authors = ["Alexander Belanger "] readme = "README.md" diff --git a/sdks/typescript/package.json b/sdks/typescript/package.json index d50ee6697..4ab7850e4 100644 --- a/sdks/typescript/package.json +++ b/sdks/typescript/package.json @@ -1,6 +1,6 @@ { "name": "@hatchet-dev/typescript-sdk", - "version": "1.5.4", + "version": "1.6.0", "description": "Background task orchestration & visibility for developers", "types": "dist/index.d.ts", "files": [ diff --git a/sdks/typescript/src/clients/event/event-client.test.ts b/sdks/typescript/src/clients/event/event-client.test.ts index 844b418bd..12d21e3c7 100644 --- a/sdks/typescript/src/clients/event/event-client.test.ts +++ b/sdks/typescript/src/clients/event/event-client.test.ts @@ -2,6 +2,7 @@ import HatchetError from '@util/errors/hatchet-error'; import { DEFAULT_LOGGER } from '@clients/hatchet-client/hatchet-logger'; import { EventClient } from './event-client'; import { mockChannel, mockFactory } from '../hatchet-client/hatchet-client.test'; +import { LegacyHatchetClient } from '../hatchet-client'; let client: EventClient; @@ -23,7 +24,8 @@ describe('EventClient', () => { logger: DEFAULT_LOGGER, }, mockChannel, - mockFactory + mockFactory, + new LegacyHatchetClient() ); expect(x).toBeDefined(); @@ -46,7 +48,8 @@ describe('EventClient', () => { logger: DEFAULT_LOGGER, }, mockChannel, - mockFactory + mockFactory, + new LegacyHatchetClient() ); }); diff --git a/sdks/typescript/src/clients/event/event-client.ts b/sdks/typescript/src/clients/event/event-client.ts index a0474f373..ef192d237 100644 --- a/sdks/typescript/src/clients/event/event-client.ts +++ b/sdks/typescript/src/clients/event/event-client.ts @@ -9,6 +9,8 @@ import HatchetError from '@util/errors/hatchet-error'; import { ClientConfig } from '@clients/hatchet-client/client-config'; import { Logger } from '@hatchet/util/logger'; import { retrier } from '@hatchet/util/retrier'; +import { HatchetClient } from '@hatchet-dev/typescript-sdk/v1'; +import { LegacyHatchetClient } from '../hatchet-client'; // eslint-disable-next-line no-shadow export enum LogLevel { @@ -20,25 +22,38 @@ export enum LogLevel { export interface PushEventOptions { additionalMetadata?: Record; + priority?: number; + scope?: string; } export interface EventWithMetadata { payload: T; additionalMetadata?: Record; + priority?: number; + scope?: string; } export class EventClient { config: ClientConfig; client: EventsServiceClient; retrier: typeof retrier; + api: HatchetClient['api']; + tenantId: string; logger: Logger; - constructor(config: ClientConfig, channel: Channel, factory: ClientFactory) { + constructor( + config: ClientConfig, + channel: Channel, + factory: ClientFactory, + hatchetClient: LegacyHatchetClient + ) { this.config = config; this.client = factory.create(EventsServiceDefinition, channel); this.logger = config.logger(`Dispatcher`, config.log_level); this.retrier = retrier; + this.api = hatchetClient.api; + this.tenantId = config.tenant_id; } push(type: string, input: T, options: PushEventOptions = {}) { @@ -51,6 +66,8 @@ export class EventClient { additionalMetadata: options.additionalMetadata ? JSON.stringify(options.additionalMetadata) : undefined, + priority: options.priority, + scope: options.scope, }; try { @@ -79,6 +96,8 @@ export class EventClient { } return undefined; })(), + priority: input.priority, + scope: input.scope, }; }); @@ -150,4 +169,9 @@ export class EventClient { this.logger.warn(`Could not put log: ${e.message}`); }); } + + async list(opts?: Parameters[1]) { + const { data } = await this.api.v1EventList(this.tenantId, opts); + return data; + } } diff --git a/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts b/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts index 167748341..fa3eadd4f 100644 --- a/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts +++ b/sdks/typescript/src/clients/hatchet-client/hatchet-client.ts @@ -87,7 +87,8 @@ export class LegacyHatchetClient { this.event = new EventClient( this.config, channelFactory(this.config, this.credentials), - clientFactory + clientFactory, + this ); this.dispatcher = new DispatcherClient( this.config, diff --git a/sdks/typescript/src/clients/rest/generated/Api.ts b/sdks/typescript/src/clients/rest/generated/Api.ts index e8eba8fbe..77b8b26c5 100644 --- a/sdks/typescript/src/clients/rest/generated/Api.ts +++ b/sdks/typescript/src/clients/rest/generated/Api.ts @@ -85,7 +85,11 @@ import { UserRegisterRequest, UserTenantMembershipsList, V1CancelTaskRequest, + V1CreateFilterRequest, V1DagChildren, + V1EventList, + V1Filter, + V1FilterList, V1LogLineList, V1ReplayTaskRequest, V1TaskEventList, @@ -94,6 +98,7 @@ import { V1TaskStatus, V1TaskSummary, V1TaskSummaryList, + V1TaskTimingList, V1TriggerWorkflowRunRequest, V1WorkflowRunDetails, V1WorkflowRunDisplayNameList, @@ -133,10 +138,18 @@ export class Api extends HttpClient + v1TaskGet = ( + task: string, + query?: { + /** The attempt number */ + attempt?: number; + }, + params: RequestParams = {} + ) => this.request({ path: `/api/v1/stable/tasks/${task}`, method: 'GET', + query: query, secure: true, format: 'json', ...params, @@ -312,6 +325,13 @@ export class Api extends HttpClient @@ -421,6 +441,34 @@ export class Api extends HttpClient + this.request({ + path: `/api/v1/stable/workflow-runs/${v1WorkflowRun}/task-timings`, + method: 'GET', + query: query, + secure: true, + format: 'json', + ...params, + }); /** * @description Get a summary of task run metrics for a tenant * @@ -438,6 +486,11 @@ export class Api extends HttpClient extends HttpClient @@ -493,6 +553,130 @@ export class Api extends HttpClient + this.request({ + path: `/api/v1/stable/tenants/${tenant}/events`, + method: 'GET', + query: query, + secure: true, + format: 'json', + ...params, + }); + /** + * @description Lists all filters for a tenant. + * + * @tags Filter + * @name V1FilterList + * @summary List filters + * @request GET:/api/v1/stable/tenants/{tenant}/filters + * @secure + */ + v1FilterList = ( + tenant: string, + query?: { + /** + * The number to skip + * @format int64 + */ + offset?: number; + /** + * The number to limit by + * @format int64 + */ + limit?: number; + /** The workflow ids to filter by */ + workflowIds?: string[]; + /** The scopes to subset candidate filters by */ + scopes?: string[]; + }, + params: RequestParams = {} + ) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters`, + method: 'GET', + query: query, + secure: true, + format: 'json', + ...params, + }); + /** + * @description Create a new filter + * + * @tags Filter + * @name V1FilterCreate + * @summary Create a filter + * @request POST:/api/v1/stable/tenants/{tenant}/filters + * @secure + */ + v1FilterCreate = (tenant: string, data: V1CreateFilterRequest, params: RequestParams = {}) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters`, + method: 'POST', + body: data, + secure: true, + type: ContentType.Json, + format: 'json', + ...params, + }); + /** + * @description Get a filter by its id + * + * @tags Filter + * @name V1FilterGet + * @summary Get a filter + * @request GET:/api/v1/stable/tenants/{tenant}/filters/{v1-filter} + * @secure + */ + v1FilterGet = (tenant: string, v1Filter: string, params: RequestParams = {}) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters/${v1Filter}`, + method: 'GET', + secure: true, + format: 'json', + ...params, + }); + /** + * @description Delete a filter + * + * @tags Filter + * @name V1FilterDelete + * @request DELETE:/api/v1/stable/tenants/{tenant}/filters/{v1-filter} + * @secure + */ + v1FilterDelete = (tenant: string, v1Filter: string, params: RequestParams = {}) => + this.request({ + path: `/api/v1/stable/tenants/${tenant}/filters/${v1Filter}`, + method: 'DELETE', + secure: true, + format: 'json', + ...params, + }); /** * @description Gets the readiness status * diff --git a/sdks/typescript/src/clients/rest/generated/data-contracts.ts b/sdks/typescript/src/clients/rest/generated/data-contracts.ts index 8c3e565de..dc79d4c15 100644 --- a/sdks/typescript/src/clients/rest/generated/data-contracts.ts +++ b/sdks/typescript/src/clients/rest/generated/data-contracts.ts @@ -615,6 +615,13 @@ export interface CreateEventRequest { data: object; /** Additional metadata for the event. */ additionalMetadata?: object; + /** + * The priority of the event. + * @format int32 + */ + priority?: number; + /** The scope for event filtering. */ + scope?: string; } export interface BulkCreateEventRequest { @@ -678,6 +685,73 @@ export interface EventList { rows?: Event[]; } +export interface V1EventList { + pagination?: PaginationResponse; + rows?: { + metadata: APIResourceMeta; + /** The key for the event. */ + key: string; + /** The tenant associated with this event. */ + tenant?: Tenant; + /** The ID of the tenant associated with this event. */ + tenantId: string; + /** The workflow run summary for this event. */ + workflowRunSummary: { + /** + * The number of running runs. + * @format int64 + */ + running: number; + /** + * The number of queued runs. + * @format int64 + */ + queued: number; + /** + * The number of succeeded runs. + * @format int64 + */ + succeeded: number; + /** + * The number of failed runs. + * @format int64 + */ + failed: number; + /** + * The number of cancelled runs. + * @format int64 + */ + cancelled: number; + }; + /** Additional metadata for the event. */ + additionalMetadata?: object; + }[]; +} + +export interface V1FilterList { + pagination?: PaginationResponse; + rows?: V1Filter[]; +} + +export interface V1Filter { + metadata: APIResourceMeta; + /** The ID of the tenant associated with this filter. */ + tenantId: string; + /** + * The workflow id associated with this filter. + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + workflowId: string; + /** The scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope: string; + /** The expression associated with this filter. */ + expression: string; + /** Additional payload data associated with the filter */ + payload: object; +} + export interface RateLimit { /** The key for the rate limit. */ key: string; @@ -723,6 +797,8 @@ export interface Workflow { tags?: WorkflowTag[]; /** The jobs of the workflow. */ jobs?: Job[]; + /** The tenant id of the workflow. */ + tenantId: string; } export interface WorkflowUpdateRequest { @@ -1481,6 +1557,10 @@ export interface V1TaskSummary { metadata: APIResourceMeta; /** The action ID of the task. */ actionId?: string; + /** The number of retries of the task. */ + retryCount?: number; + /** The attempt number of the task. */ + attempt?: number; /** Additional metadata for the task run. */ additionalMetadata?: object; /** The list of children tasks */ @@ -1551,7 +1631,7 @@ export interface V1TaskSummary { * The external ID of the workflow run * @format uuid */ - workflowRunExternalId?: string; + workflowRunExternalId: string; /** * The version ID of the workflow * @format uuid @@ -1600,6 +1680,10 @@ export interface V1TaskEventList { /** @format uuid */ workerId?: string; taskDisplayName?: string; + /** The number of retries of the task. */ + retryCount?: number; + /** The attempt number of the task. */ + attempt?: number; }[]; } @@ -1736,9 +1820,99 @@ export interface V1LogLine { message: string; /** The log metadata. */ metadata: object; + /** The retry count of the log line. */ + retryCount?: number; + /** The attempt number of the log line. */ + attempt?: number; + /** The log level. */ + level?: V1LogLineLevel; } export interface V1LogLineList { pagination?: PaginationResponse; rows?: V1LogLine[]; } + +export interface V1TaskTiming { + metadata: APIResourceMeta; + /** The depth of the task in the waterfall. */ + depth: number; + status: V1TaskStatus; + /** The display name of the task run. */ + taskDisplayName: string; + /** + * The external ID of the task. + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + taskExternalId: string; + /** The ID of the task. */ + taskId: number; + /** + * The timestamp the task was inserted. + * @format date-time + */ + taskInsertedAt: string; + /** + * The ID of the tenant. + * @format uuid + * @minLength 36 + * @maxLength 36 + * @example "bb214807-246e-43a5-a25d-41761d1cff9e" + */ + tenantId: string; + /** + * The external ID of the parent task. + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + parentTaskExternalId?: string; + /** + * The timestamp the task run was queued. + * @format date-time + */ + queuedAt?: string; + /** + * The timestamp the task run started. + * @format date-time + */ + startedAt?: string; + /** + * The timestamp the task run finished. + * @format date-time + */ + finishedAt?: string; + /** + * The external ID of the workflow run. + * @format uuid + */ + workflowRunId?: string; + /** The number of retries of the task. */ + retryCount?: number; + /** The attempt number of the task. */ + attempt?: number; +} + +export interface V1TaskTimingList { + pagination: PaginationResponse; + /** The list of task timings */ + rows: V1TaskTiming[]; +} + +export interface V1CreateFilterRequest { + /** + * The workflow id + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + workflowId: string; + /** The expression for the filter */ + expression: string; + /** The scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope: string; + /** The payload for the filter */ + payload?: object; +} diff --git a/sdks/typescript/src/protoc/events/events.ts b/sdks/typescript/src/protoc/events/events.ts index 4f30aef45..b3794d954 100644 --- a/sdks/typescript/src/protoc/events/events.ts +++ b/sdks/typescript/src/protoc/events/events.ts @@ -24,6 +24,8 @@ export interface Event { eventTimestamp: Date | undefined; /** the payload for the event */ additionalMetadata?: string | undefined; + /** the scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope?: string | undefined; } export interface Events { @@ -73,6 +75,9 @@ export interface PushEventRequest { eventTimestamp: Date | undefined; /** metadata for the event */ additionalMetadata?: string | undefined; + priority?: number | undefined; + /** the scope associated with this filter. Used for subsetting candidate filters at evaluation time */ + scope?: string | undefined; } export interface ReplayEventRequest { @@ -88,6 +93,7 @@ function createBaseEvent(): Event { payload: '', eventTimestamp: undefined, additionalMetadata: undefined, + scope: undefined, }; } @@ -111,6 +117,9 @@ export const Event: MessageFns = { if (message.additionalMetadata !== undefined) { writer.uint32(50).string(message.additionalMetadata); } + if (message.scope !== undefined) { + writer.uint32(58).string(message.scope); + } return writer; }, @@ -169,6 +178,14 @@ export const Event: MessageFns = { message.additionalMetadata = reader.string(); continue; } + case 7: { + if (tag !== 58) { + break; + } + + message.scope = reader.string(); + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; @@ -190,6 +207,7 @@ export const Event: MessageFns = { additionalMetadata: isSet(object.additionalMetadata) ? globalThis.String(object.additionalMetadata) : undefined, + scope: isSet(object.scope) ? globalThis.String(object.scope) : undefined, }; }, @@ -213,6 +231,9 @@ export const Event: MessageFns = { if (message.additionalMetadata !== undefined) { obj.additionalMetadata = message.additionalMetadata; } + if (message.scope !== undefined) { + obj.scope = message.scope; + } return obj; }, @@ -227,6 +248,7 @@ export const Event: MessageFns = { message.payload = object.payload ?? ''; message.eventTimestamp = object.eventTimestamp ?? undefined; message.additionalMetadata = object.additionalMetadata ?? undefined; + message.scope = object.scope ?? undefined; return message; }, }; @@ -699,7 +721,14 @@ export const BulkPushEventRequest: MessageFns = { }; function createBasePushEventRequest(): PushEventRequest { - return { key: '', payload: '', eventTimestamp: undefined, additionalMetadata: undefined }; + return { + key: '', + payload: '', + eventTimestamp: undefined, + additionalMetadata: undefined, + priority: undefined, + scope: undefined, + }; } export const PushEventRequest: MessageFns = { @@ -716,6 +745,12 @@ export const PushEventRequest: MessageFns = { if (message.additionalMetadata !== undefined) { writer.uint32(34).string(message.additionalMetadata); } + if (message.priority !== undefined) { + writer.uint32(40).int32(message.priority); + } + if (message.scope !== undefined) { + writer.uint32(50).string(message.scope); + } return writer; }, @@ -758,6 +793,22 @@ export const PushEventRequest: MessageFns = { message.additionalMetadata = reader.string(); continue; } + case 5: { + if (tag !== 40) { + break; + } + + message.priority = reader.int32(); + continue; + } + case 6: { + if (tag !== 50) { + break; + } + + message.scope = reader.string(); + continue; + } } if ((tag & 7) === 4 || tag === 0) { break; @@ -777,6 +828,8 @@ export const PushEventRequest: MessageFns = { additionalMetadata: isSet(object.additionalMetadata) ? globalThis.String(object.additionalMetadata) : undefined, + priority: isSet(object.priority) ? globalThis.Number(object.priority) : undefined, + scope: isSet(object.scope) ? globalThis.String(object.scope) : undefined, }; }, @@ -794,6 +847,12 @@ export const PushEventRequest: MessageFns = { if (message.additionalMetadata !== undefined) { obj.additionalMetadata = message.additionalMetadata; } + if (message.priority !== undefined) { + obj.priority = Math.round(message.priority); + } + if (message.scope !== undefined) { + obj.scope = message.scope; + } return obj; }, @@ -806,6 +865,8 @@ export const PushEventRequest: MessageFns = { message.payload = object.payload ?? ''; message.eventTimestamp = object.eventTimestamp ?? undefined; message.additionalMetadata = object.additionalMetadata ?? undefined; + message.priority = object.priority ?? undefined; + message.scope = object.scope ?? undefined; return message; }, }; diff --git a/sdks/typescript/src/v1/client/client.ts b/sdks/typescript/src/v1/client/client.ts index bfe979764..358f05b55 100644 --- a/sdks/typescript/src/v1/client/client.ts +++ b/sdks/typescript/src/v1/client/client.ts @@ -35,6 +35,7 @@ import { RunsClient } from './features/runs'; import { InputType, OutputType, UnknownInputType, StrictWorkflowOutputType } from '../types'; import { RatelimitsClient } from './features'; import { AdminClient } from './admin'; +import { FiltersClient } from './features/filters'; /** * HatchetV1 implements the main client interface for interacting with the Hatchet workflow engine. @@ -354,6 +355,19 @@ export class HatchetClient implements IHatchetClient { return this._metrics; } + private _filters: FiltersClient | undefined; + + /** + * Get the filters client for creating and managing filters + * @returns A filters client instance + */ + get filters() { + if (!this._filters) { + this._filters = new FiltersClient(this); + } + return this._filters; + } + private _ratelimits: RatelimitsClient | undefined; /** diff --git a/sdks/typescript/src/v1/client/features/filters.ts b/sdks/typescript/src/v1/client/features/filters.ts new file mode 100644 index 000000000..c225a12af --- /dev/null +++ b/sdks/typescript/src/v1/client/features/filters.ts @@ -0,0 +1,57 @@ +import { HatchetClient } from '../client'; + +export type WorkflowIdScopePair = { + workflowId: string; + scope: string; +}; + +/** + * The filters client is a client for interacting with Hatchet's filters API. + */ +export class FiltersClient { + tenantId: string; + api: HatchetClient['api']; + + constructor(client: HatchetClient) { + this.tenantId = client.tenantId; + this.api = client.api; + } + + async list(opts?: { + limit?: number; + offset?: number; + workflowIdsAndScopes?: WorkflowIdScopePair[]; + }) { + const hasWorkflowIdsAndScopes = opts?.workflowIdsAndScopes !== undefined; + const workflowIds = hasWorkflowIdsAndScopes + ? opts.workflowIdsAndScopes?.map((pair) => pair.workflowId) + : undefined; + const scopes = hasWorkflowIdsAndScopes + ? opts.workflowIdsAndScopes?.map((pair) => pair.scope) + : undefined; + + const { data } = await this.api.v1FilterList(this.tenantId, { + limit: opts?.limit, + offset: opts?.offset, + workflowIds, + scopes, + }); + + return data; + } + + async get(filterId: Parameters[1]) { + const { data } = await this.api.v1FilterGet(this.tenantId, filterId); + return data; + } + + async create(opts: Parameters[1]) { + const { data } = await this.api.v1FilterCreate(this.tenantId, opts); + return data; + } + + async delete(filterId: Parameters[1]) { + const { data } = await this.api.v1FilterDelete(this.tenantId, filterId); + return data; + } +} diff --git a/sdks/typescript/src/v1/client/features/runs.ts b/sdks/typescript/src/v1/client/features/runs.ts index 0efabc7b4..ec68b22b7 100644 --- a/sdks/typescript/src/v1/client/features/runs.ts +++ b/sdks/typescript/src/v1/client/features/runs.ts @@ -50,6 +50,14 @@ export interface ListRunsOpts extends RunFilter { * @maxLength 36 */ parentTaskExternalId?: string; + + /** + * The triggering event external id to filter by + * @format uuid + * @minLength 36 + * @maxLength 36 + */ + triggeringEventExternalId?: string; } /** @@ -138,6 +146,7 @@ export class RunsClient { ), additional_metadata: am, only_tasks: opts.onlyTasks || false, + triggering_event_external_id: opts.triggeringEventExternalId, }; } } diff --git a/sdks/typescript/src/v1/client/features/workflows.ts b/sdks/typescript/src/v1/client/features/workflows.ts index 3ce97451a..45cf33043 100644 --- a/sdks/typescript/src/v1/client/features/workflows.ts +++ b/sdks/typescript/src/v1/client/features/workflows.ts @@ -53,7 +53,7 @@ export class WorkflowsClient { expiry: Date.now() + this.cacheTTL, }); - return workflow; + return wf; } throw new Error(`Workflow with name ${name} not found`); diff --git a/sdks/typescript/src/v1/examples/on_event/event.e2e.ts b/sdks/typescript/src/v1/examples/on_event/event.e2e.ts new file mode 100644 index 000000000..6226f9755 --- /dev/null +++ b/sdks/typescript/src/v1/examples/on_event/event.e2e.ts @@ -0,0 +1,374 @@ +import sleep from '@hatchet-dev/typescript-sdk/util/sleep'; +import { randomUUID } from 'crypto'; +import { Event } from '@hatchet-dev/typescript-sdk/protoc/events'; +import { SIMPLE_EVENT, lower, Input } from './workflow'; +import { hatchet } from '../hatchet-client'; +import { Worker } from '../../client/worker/worker'; + +xdescribe('events-e2e', () => { + let worker: Worker; + let testRunId: string; + + beforeEach(async () => { + testRunId = randomUUID(); + + worker = await hatchet.worker('event-worker'); + await worker.registerWorkflow(lower); + + void worker.start(); + }); + + afterAll(async () => { + await worker.stop(); + await sleep(2000); + }); + + async function setupEventFilter(expression?: string, payload: Record = {}) { + const finalExpression = + expression || `input.ShouldSkip == false && payload.testRunId == '${testRunId}'`; + + const workflowId = (await hatchet.workflows.get(lower.name)).metadata.id; + + const filter = await hatchet.filters.create({ + workflowId, + expression: finalExpression, + scope: testRunId, + payload: { testRunId, ...payload }, + }); + + return async () => { + await hatchet.filters.delete(filter.metadata.id); + }; + } + + // Helper function to wait for events to process and fetch runs + async function waitForEventsToProcess(events: Event[]): Promise> { + await sleep(3000); + + const persisted = (await hatchet.events.list({ limit: 100 })).rows || []; + + // Ensure all our events are persisted + const eventIds = new Set(events.map((e) => e.eventId)); + const persistedIds = new Set(persisted.map((e) => e.metadata.id)); + expect(Array.from(eventIds).every((id) => persistedIds.has(id))).toBeTruthy(); + + let attempts = 0; + const maxAttempts = 15; + const eventToRuns: Record = {}; + + while (true) { + console.log('Waiting for event runs to complete...'); + if (attempts > maxAttempts) { + console.log('Timed out waiting for event runs to complete.'); + return {}; + } + + attempts += 1; + + // For each event, fetch its runs + const runsPromises = events.map(async (event) => { + const runs = await hatchet.runs.list({ + triggeringEventExternalId: event.eventId, + }); + + // Extract metadata from event + const meta = event.additionalMetadata ? JSON.parse(event.additionalMetadata) : {}; + + const payload = event.payload ? JSON.parse(event.payload) : {}; + + return { + event: { + id: event.eventId, + payload, + meta, + shouldHaveRuns: Boolean(meta.should_have_runs), + testRunId: meta.test_run_id, + }, + runs: runs.rows || [], + }; + }); + + const eventRuns = await Promise.all(runsPromises); + + // If all events have no runs yet, wait and retry + if (eventRuns.every(({ runs }) => runs.length === 0)) { + await sleep(1000); + + // eslint-disable-next-line no-continue + continue; + } + + // Store runs by event ID + for (const { event, runs } of eventRuns) { + eventToRuns[event.id] = runs; + } + + // Check if any runs are still in progress + const anyInProgress = Object.values(eventToRuns).some((runs) => + runs.some((run) => run.status === 'QUEUED' || run.status === 'RUNNING') + ); + + if (anyInProgress) { + await sleep(1000); + + // eslint-disable-next-line no-continue + continue; + } + + break; + } + + return eventToRuns; + } + + // Helper to verify runs match expectations + function verifyEventRuns(eventData: any, runs: any[]) { + if (eventData.shouldHaveRuns) { + expect(runs.length).toBeGreaterThan(0); + } else { + expect(runs.length).toBe(0); + } + } + + // Helper to create bulk push event objects + function createBulkPushEvent({ + index = 1, + ShouldSkip = false, + shouldHaveRuns = true, + key = SIMPLE_EVENT, + payload = {}, + scope = null, + }: { + index?: number; + ShouldSkip?: boolean; + shouldHaveRuns?: boolean; + key?: string; + payload?: Record; + scope?: string | null; + }) { + return { + key, + payload: { + ShouldSkip, + Message: `This is event ${index}`, + ...payload, + }, + additionalMetadata: { + should_have_runs: shouldHaveRuns, + test_run_id: testRunId, + key, + index, + }, + scope: scope || undefined, + }; + } + + // Helper to create payload object + function createEventPayload(ShouldSkip: boolean): Input { + return { ShouldSkip, Message: 'This is event 1' }; + } + + it('should push an event', async () => { + const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false)); + expect(event.eventId).toBeTruthy(); + }, 10000); + + it('should push an event asynchronously', async () => { + const event = await hatchet.events.push(SIMPLE_EVENT, createEventPayload(false)); + expect(event.eventId).toBeTruthy(); + }, 10000); + + it('should bulk push events', async () => { + const events = [ + { + key: SIMPLE_EVENT, + payload: { Message: 'This is event 1', ShouldSkip: false }, + additionalMetadata: { source: 'test', user_id: 'user123' }, + }, + { + key: SIMPLE_EVENT, + payload: { Message: 'This is event 2', ShouldSkip: false }, + additionalMetadata: { source: 'test', user_id: 'user456' }, + }, + { + key: SIMPLE_EVENT, + payload: { Message: 'This is event 3', ShouldSkip: false }, + additionalMetadata: { source: 'test', user_id: 'user789' }, + }, + ]; + + const result = await hatchet.events.bulkPush(SIMPLE_EVENT, events); + + expect(result.events.length).toBe(3); + + // Sort and verify namespacing + const sortedEvents = [...events].sort((a, b) => a.key.localeCompare(b.key)); + const sortedResults = [...result.events].sort((a, b) => a.key.localeCompare(b.key)); + + sortedEvents.forEach((originalEvent, index) => { + const returnedEvent = sortedResults[index]; + expect(returnedEvent.key).toBe(originalEvent.key); + }); + }, 15000); + + it('should process events according to event engine behavior', async () => { + const eventPromises = [ + createBulkPushEvent({}), + createBulkPushEvent({ + key: 'thisisafakeeventfoobarbaz', + shouldHaveRuns: false, + }), + ].map((event) => convertBulkToSingle(event)); + const events = await Promise.all(eventPromises); + + const eventToRuns = await waitForEventsToProcess(events); + + // Verify each event's runs + Object.keys(eventToRuns).forEach((eventId) => { + const runs = eventToRuns[eventId]; + const eventInfo = events.find((e) => e.eventId === eventId); + + if (eventInfo) { + const meta = JSON.parse(eventInfo.additionalMetadata || '{}'); + verifyEventRuns( + { + shouldHaveRuns: Boolean(meta.should_have_runs), + }, + runs + ); + } + }); + }, 30000); + + function generateBulkEvents() { + return [ + createBulkPushEvent({ + index: 1, + ShouldSkip: false, + shouldHaveRuns: true, + }), + createBulkPushEvent({ + index: 2, + ShouldSkip: true, + shouldHaveRuns: true, + }), + createBulkPushEvent({ + index: 3, + ShouldSkip: false, + shouldHaveRuns: true, + scope: testRunId, + }), + createBulkPushEvent({ + index: 4, + ShouldSkip: true, + shouldHaveRuns: false, + scope: testRunId, + }), + createBulkPushEvent({ + index: 5, + ShouldSkip: true, + shouldHaveRuns: false, + scope: testRunId, + key: 'thisisafakeeventfoobarbaz', + }), + createBulkPushEvent({ + index: 6, + ShouldSkip: false, + shouldHaveRuns: false, + scope: testRunId, + key: 'thisisafakeeventfoobarbaz', + }), + ]; + } + + async function convertBulkToSingle(event: any) { + return hatchet.events.push(event.key, event.payload, { + scope: event.scope, + additionalMetadata: event.additionalMetadata, + priority: event.priority, + }); + } + + it('should handle event skipping and filtering without bulk push', async () => { + const cleanup = await setupEventFilter(); + + try { + const rawEvents = generateBulkEvents(); + const eventPromises = rawEvents.map((event) => convertBulkToSingle(event)); + const events = await Promise.all(eventPromises); + + const eventToRuns = await waitForEventsToProcess(events); + + // Verify each event's runs + Object.keys(eventToRuns).forEach((eventId) => { + const runs = eventToRuns[eventId]; + const eventInfo = events.find((e) => e.eventId === eventId); + + if (eventInfo) { + const meta = JSON.parse(eventInfo.additionalMetadata || '{}'); + verifyEventRuns( + { + shouldHaveRuns: Boolean(meta.should_have_runs), + }, + runs + ); + } + }); + } finally { + await cleanup(); + } + }, 30000); + + it('should filter events by payload expression not matching', async () => { + const cleanup = await setupEventFilter("input.ShouldSkip == false && payload.foobar == 'baz'", { + foobar: 'qux', + }); + + try { + const event = await hatchet.events.push( + SIMPLE_EVENT, + { Message: 'This is event 1', ShouldSkip: false }, + { + scope: testRunId, + additionalMetadata: { + should_have_runs: 'false', + test_run_id: testRunId, + key: '1', + }, + } + ); + + const eventToRuns = await waitForEventsToProcess([event]); + expect(Object.keys(eventToRuns).length).toBe(0); + } finally { + await cleanup(); + } + }, 20000); + + it('should filter events by payload expression matching', async () => { + const cleanup = await setupEventFilter("input.ShouldSkip == false && payload.foobar == 'baz'", { + foobar: 'baz', + }); + + try { + const event = await hatchet.events.push( + SIMPLE_EVENT, + { Message: 'This is event 1', ShouldSkip: false }, + { + scope: testRunId, + additionalMetadata: { + should_have_runs: 'true', + test_run_id: testRunId, + key: '1', + }, + } + ); + + const eventToRuns = await waitForEventsToProcess([event]); + const runs = Object.values(eventToRuns)[0] || []; + expect(runs.length).toBeGreaterThan(0); + } finally { + await cleanup(); + } + }, 20000); +}); diff --git a/sdks/typescript/src/v1/examples/on_event/event.ts b/sdks/typescript/src/v1/examples/on_event/event.ts index e6159dcbe..fa428d78e 100644 --- a/sdks/typescript/src/v1/examples/on_event/event.ts +++ b/sdks/typescript/src/v1/examples/on_event/event.ts @@ -5,6 +5,7 @@ async function main() { // > Pushing an Event const res = await hatchet.events.push('simple-event:create', { Message: 'hello', + ShouldSkip: false, }); // !! diff --git a/sdks/typescript/src/v1/examples/on_event/workflow.ts b/sdks/typescript/src/v1/examples/on_event/workflow.ts index 3db6c5ac4..ae21df33a 100644 --- a/sdks/typescript/src/v1/examples/on_event/workflow.ts +++ b/sdks/typescript/src/v1/examples/on_event/workflow.ts @@ -2,6 +2,7 @@ import { hatchet } from '../hatchet-client'; export type Input = { Message: string; + ShouldSkip: boolean; }; export const SIMPLE_EVENT = 'simple-event:create'; diff --git a/sql/schema/v1-core.sql b/sql/schema/v1-core.sql index 4d76b98c4..5f9a455a8 100644 --- a/sql/schema/v1-core.sql +++ b/sql/schema/v1-core.sql @@ -471,6 +471,27 @@ CREATE TABLE v1_match_condition ( CONSTRAINT v1_match_condition_pkey PRIMARY KEY (v1_match_id, id) ); +CREATE TABLE v1_filter ( + id UUID NOT NULL DEFAULT gen_random_uuid(), + tenant_id UUID NOT NULL, + workflow_id UUID NOT NULL, + scope TEXT NOT NULL, + expression TEXT NOT NULL, + payload JSONB NOT NULL DEFAULT '{}'::JSONB, + + inserted_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at TIMESTAMPTZ NOT NULL DEFAULT CURRENT_TIMESTAMP, + + PRIMARY KEY (tenant_id, id) +); + +CREATE UNIQUE INDEX v1_filter_unique_idx ON v1_filter ( + tenant_id ASC, + workflow_id ASC, + scope ASC, + expression ASC +); + CREATE INDEX v1_match_condition_filter_idx ON v1_match_condition ( tenant_id ASC, event_type ASC,